id
stringlengths
11
95
author
stringlengths
3
36
task_category
stringclasses
16 values
tags
sequencelengths
1
4.05k
created_time
timestamp[s]date
2022-03-02 23:29:04
2025-03-18 02:34:30
last_modified
timestamp[s]date
2021-05-13 19:09:22
2025-03-18 03:19:02
downloads
int64
0
15.6M
likes
int64
0
4.86k
README
stringlengths
246
1.01M
matched_task
sequencelengths
1
8
matched_bigbio_names
sequencelengths
1
8
Mihaiii/Venusaur
Mihaiii
sentence-similarity
[ "sentence-transformers", "onnx", "safetensors", "bert", "feature-extraction", "sentence-similarity", "gte", "mteb", "dataset:Mihaiii/qa-assistant", "base_model:Mihaiii/Bulbasaur", "base_model:quantized:Mihaiii/Bulbasaur", "license:mit", "model-index", "autotrain_compatible", "text-embeddings-inference", "endpoints_compatible", "region:us" ]
2024-04-29T21:30:53
2024-04-30T02:06:15
185
3
--- base_model: Mihaiii/Bulbasaur datasets: - Mihaiii/qa-assistant library_name: sentence-transformers license: mit pipeline_tag: sentence-similarity tags: - sentence-transformers - feature-extraction - sentence-similarity - gte - mteb model-index: - name: Venusaur results: - task: type: Classification dataset: name: MTEB AmazonCounterfactualClassification (en) type: mteb/amazon_counterfactual config: en split: test revision: e8379541af4e31359cca9fbcf4b00f2671dba205 metrics: - type: accuracy value: 73.17910447761194 - type: ap value: 35.29994612283548 - type: f1 value: 66.87845205993153 - task: type: Classification dataset: name: MTEB AmazonPolarityClassification type: mteb/amazon_polarity config: default split: test revision: e2d317d38cd51312af73b3d32a06d1a08b442046 metrics: - type: accuracy value: 79.993525 - type: ap value: 74.7042261687233 - type: f1 value: 79.9004149386498 - task: type: Classification dataset: name: MTEB AmazonReviewsClassification (en) type: mteb/amazon_reviews_multi config: en split: test revision: 1399c76144fd37290681b995c656ef9b2e06e26d metrics: - type: accuracy value: 39.656000000000006 - type: f1 value: 39.287139345446256 - task: type: Retrieval dataset: name: MTEB ArguAna type: mteb/arguana config: default split: test revision: c22ab2a51041ffd869aaddef7af8d8215647e41a metrics: - type: map_at_1 value: 16.643 - type: map_at_10 value: 28.276 - type: map_at_100 value: 29.543999999999997 - type: map_at_1000 value: 29.595 - type: map_at_20 value: 29.043000000000003 - type: map_at_3 value: 24.739 - type: map_at_5 value: 26.592 - type: mrr_at_1 value: 17.639 - type: mrr_at_10 value: 28.631 - type: mrr_at_100 value: 29.891000000000002 - type: mrr_at_1000 value: 29.942999999999998 - type: mrr_at_20 value: 29.391000000000002 - type: mrr_at_3 value: 25.107000000000003 - type: mrr_at_5 value: 26.942 - type: ndcg_at_1 value: 16.643 - type: ndcg_at_10 value: 34.8 - type: ndcg_at_100 value: 41.179 - type: ndcg_at_1000 value: 42.564 - type: ndcg_at_20 value: 37.601 - type: ndcg_at_3 value: 27.356 - type: ndcg_at_5 value: 30.725 - type: precision_at_1 value: 16.643 - type: precision_at_10 value: 5.576 - type: precision_at_100 value: 0.861 - type: precision_at_1000 value: 0.097 - type: precision_at_20 value: 3.343 - type: precision_at_3 value: 11.641 - type: precision_at_5 value: 8.634 - type: recall_at_1 value: 16.643 - type: recall_at_10 value: 55.761 - type: recall_at_100 value: 86.06 - type: recall_at_1000 value: 97.013 - type: recall_at_20 value: 66.85600000000001 - type: recall_at_3 value: 34.922 - type: recall_at_5 value: 43.172 - task: type: Clustering dataset: name: MTEB ArxivClusteringP2P type: mteb/arxiv-clustering-p2p config: default split: test revision: a122ad7f3f0291bf49cc6f4d32aa80929df69d5d metrics: - type: v_measure value: 31.76467048453136 - type: v_measures value: - 0.2646936786804572 - 0.27790871012280266 - 0.29027802989910717 - 0.27400555976615254 - 0.2823478131745678 - 0.25739544436992295 - 0.3014171939280134 - 0.2862214695233955 - 0.2856734533249879 - 0.2870107976688266 - 0.3709000837926645 - 0.3702167780750079 - 0.36556393540769305 - 0.37650336515785243 - 0.3699811227722488 - 0.36806220730606526 - 0.3696328229784335 - 0.3852970338255622 - 0.37157613433218695 - 0.368267862192135 - 0.3715516752706066 - 0.26093751350716654 - 0.24003989063421033 - 0.31112640151573373 - 0.2509815194812587 - 0.19256512170374224 - 0.2638556294764011 - 0.08503820346290819 - 0.1374194639615466 - 1.0 - 0.21057893489306592 - 0.2646936786804572 - 0.27790871012280266 - 0.29027802989910717 - 0.27400555976615254 - 0.2823478131745678 - 0.25739544436992295 - 0.3014171939280134 - 0.2862214695233955 - 0.2856734533249879 - 0.2870107976688266 - 0.3709000837926645 - 0.3702167780750079 - 0.36556393540769305 - 0.37650336515785243 - 0.3699811227722488 - 0.36806220730606526 - 0.3696328229784335 - 0.3852970338255622 - 0.37157613433218695 - 0.368267862192135 - 0.3715516752706066 - 0.26093751350716654 - 0.24003989063421033 - 0.31112640151573373 - 0.2509815194812587 - 0.19256512170374224 - 0.2638556294764011 - 0.08503820346290819 - 0.1374194639615466 - 1.0 - 0.21057893489306592 - 0.2646936786804572 - 0.27790871012280266 - 0.29027802989910717 - 0.27400555976615254 - 0.2823478131745678 - 0.25739544436992295 - 0.3014171939280134 - 0.2862214695233955 - 0.2856734533249879 - 0.2870107976688266 - 0.3709000837926645 - 0.3702167780750079 - 0.36556393540769305 - 0.37650336515785243 - 0.3699811227722488 - 0.36806220730606526 - 0.3696328229784335 - 0.3852970338255622 - 0.37157613433218695 - 0.368267862192135 - 0.3715516752706066 - 0.26093751350716654 - 0.24003989063421033 - 0.31112640151573373 - 0.2509815194812587 - 0.19256512170374224 - 0.2638556294764011 - 0.08503820346290819 - 0.1374194639615466 - 1.0 - 0.21057893489306592 - 0.2646936786804572 - 0.27790871012280266 - 0.29027802989910717 - 0.27400555976615254 - 0.2823478131745678 - 0.25739544436992295 - 0.3014171939280134 - 0.2862214695233955 - 0.2856734533249879 - 0.2870107976688266 - 0.3709000837926645 - 0.3702167780750079 - 0.36556393540769305 - 0.37650336515785243 - 0.3699811227722488 - 0.36806220730606526 - 0.3696328229784335 - 0.3852970338255622 - 0.37157613433218695 - 0.368267862192135 - 0.3715516752706066 - 0.26093751350716654 - 0.24003989063421033 - 0.31112640151573373 - 0.2509815194812587 - 0.19256512170374224 - 0.2638556294764011 - 0.08503820346290819 - 0.1374194639615466 - 1.0 - 0.21057893489306592 - 0.2646936786804572 - 0.27790871012280266 - 0.29027802989910717 - 0.27400555976615254 - 0.2823478131745678 - 0.25739544436992295 - 0.3014171939280134 - 0.2862214695233955 - 0.2856734533249879 - 0.2870107976688266 - 0.3709000837926645 - 0.3702167780750079 - 0.36556393540769305 - 0.37650336515785243 - 0.3699811227722488 - 0.36806220730606526 - 0.3696328229784335 - 0.3852970338255622 - 0.37157613433218695 - 0.368267862192135 - 0.3715516752706066 - 0.26093751350716654 - 0.24003989063421033 - 0.31112640151573373 - 0.2509815194812587 - 0.19256512170374224 - 0.2638556294764011 - 0.08503820346290819 - 0.1374194639615466 - 1.0 - 0.21057893489306592 - 0.2646936786804572 - 0.27790871012280266 - 0.29027802989910717 - 0.27400555976615254 - 0.2823478131745678 - 0.25739544436992295 - 0.3014171939280134 - 0.2862214695233955 - 0.2856734533249879 - 0.2870107976688266 - 0.3709000837926645 - 0.3702167780750079 - 0.36556393540769305 - 0.37650336515785243 - 0.3699811227722488 - 0.36806220730606526 - 0.3696328229784335 - 0.3852970338255622 - 0.37157613433218695 - 0.368267862192135 - 0.3715516752706066 - 0.26093751350716654 - 0.24003989063421033 - 0.31112640151573373 - 0.2509815194812587 - 0.19256512170374224 - 0.2638556294764011 - 0.08503820346290819 - 0.1374194639615466 - 1.0 - 0.21057893489306592 - 0.2646936786804572 - 0.27790871012280266 - 0.29027802989910717 - 0.27400555976615254 - 0.2823478131745678 - 0.25739544436992295 - 0.3014171939280134 - 0.2862214695233955 - 0.2856734533249879 - 0.2870107976688266 - 0.3709000837926645 - 0.3702167780750079 - 0.36556393540769305 - 0.37650336515785243 - 0.3699811227722488 - 0.36806220730606526 - 0.3696328229784335 - 0.3852970338255622 - 0.37157613433218695 - 0.368267862192135 - 0.3715516752706066 - 0.26093751350716654 - 0.24003989063421033 - 0.31112640151573373 - 0.2509815194812587 - 0.19256512170374224 - 0.2638556294764011 - 0.08503820346290819 - 0.1374194639615466 - 1.0 - 0.21057893489306592 - 0.2646936786804572 - 0.27790871012280266 - 0.29027802989910717 - 0.27400555976615254 - 0.2823478131745678 - 0.25739544436992295 - 0.3014171939280134 - 0.2862214695233955 - 0.2856734533249879 - 0.2870107976688266 - 0.3709000837926645 - 0.3702167780750079 - 0.36556393540769305 - 0.37650336515785243 - 0.3699811227722488 - 0.36806220730606526 - 0.3696328229784335 - 0.3852970338255622 - 0.37157613433218695 - 0.368267862192135 - 0.3715516752706066 - 0.26093751350716654 - 0.24003989063421033 - 0.31112640151573373 - 0.2509815194812587 - 0.19256512170374224 - 0.2638556294764011 - 0.08503820346290819 - 0.1374194639615466 - 1.0 - 0.21057893489306592 - 0.2646936786804572 - 0.27790871012280266 - 0.29027802989910717 - 0.27400555976615254 - 0.2823478131745678 - 0.25739544436992295 - 0.3014171939280134 - 0.2862214695233955 - 0.2856734533249879 - 0.2870107976688266 - 0.3709000837926645 - 0.3702167780750079 - 0.36556393540769305 - 0.37650336515785243 - 0.3699811227722488 - 0.36806220730606526 - 0.3696328229784335 - 0.3852970338255622 - 0.37157613433218695 - 0.368267862192135 - 0.3715516752706066 - 0.26093751350716654 - 0.24003989063421033 - 0.31112640151573373 - 0.2509815194812587 - 0.19256512170374224 - 0.2638556294764011 - 0.08503820346290819 - 0.1374194639615466 - 1.0 - 0.21057893489306592 - 0.2646936786804572 - 0.27790871012280266 - 0.29027802989910717 - 0.27400555976615254 - 0.2823478131745678 - 0.25739544436992295 - 0.3014171939280134 - 0.2862214695233955 - 0.2856734533249879 - 0.2870107976688266 - 0.3709000837926645 - 0.3702167780750079 - 0.36556393540769305 - 0.37650336515785243 - 0.3699811227722488 - 0.36806220730606526 - 0.3696328229784335 - 0.3852970338255622 - 0.37157613433218695 - 0.368267862192135 - 0.3715516752706066 - 0.26093751350716654 - 0.24003989063421033 - 0.31112640151573373 - 0.2509815194812587 - 0.19256512170374224 - 0.2638556294764011 - 0.08503820346290819 - 0.1374194639615466 - 1.0 - 0.21057893489306592 - 0.2646936786804572 - 0.27790871012280266 - 0.29027802989910717 - 0.27400555976615254 - 0.2823478131745678 - 0.25739544436992295 - 0.3014171939280134 - 0.2862214695233955 - 0.2856734533249879 - 0.2870107976688266 - 0.3709000837926645 - 0.3702167780750079 - 0.36556393540769305 - 0.37650336515785243 - 0.3699811227722488 - 0.36806220730606526 - 0.3696328229784335 - 0.3852970338255622 - 0.37157613433218695 - 0.368267862192135 - 0.3715516752706066 - 0.26093751350716654 - 0.24003989063421033 - 0.31112640151573373 - 0.2509815194812587 - 0.19256512170374224 - 0.2638556294764011 - 0.08503820346290819 - 0.1374194639615466 - 1.0 - 0.21057893489306592 - 0.2646936786804572 - 0.27790871012280266 - 0.29027802989910717 - 0.27400555976615254 - 0.2823478131745678 - 0.25739544436992295 - 0.3014171939280134 - 0.2862214695233955 - 0.2856734533249879 - 0.2870107976688266 - 0.3709000837926645 - 0.3702167780750079 - 0.36556393540769305 - 0.37650336515785243 - 0.3699811227722488 - 0.36806220730606526 - 0.3696328229784335 - 0.3852970338255622 - 0.37157613433218695 - 0.368267862192135 - 0.3715516752706066 - 0.26093751350716654 - 0.24003989063421033 - 0.31112640151573373 - 0.2509815194812587 - 0.19256512170374224 - 0.2638556294764011 - 0.08503820346290819 - 0.1374194639615466 - 1.0 - 0.21057893489306592 - 0.2646936786804572 - 0.27790871012280266 - 0.29027802989910717 - 0.27400555976615254 - 0.2823478131745678 - 0.25739544436992295 - 0.3014171939280134 - 0.2862214695233955 - 0.2856734533249879 - 0.2870107976688266 - 0.3709000837926645 - 0.3702167780750079 - 0.36556393540769305 - 0.37650336515785243 - 0.3699811227722488 - 0.36806220730606526 - 0.3696328229784335 - 0.3852970338255622 - 0.37157613433218695 - 0.368267862192135 - 0.3715516752706066 - 0.26093751350716654 - 0.24003989063421033 - 0.31112640151573373 - 0.2509815194812587 - 0.19256512170374224 - 0.2638556294764011 - 0.08503820346290819 - 0.1374194639615466 - 1.0 - 0.21057893489306592 - 0.2646936786804572 - 0.27790871012280266 - 0.29027802989910717 - 0.27400555976615254 - 0.2823478131745678 - 0.25739544436992295 - 0.3014171939280134 - 0.2862214695233955 - 0.2856734533249879 - 0.2870107976688266 - 0.3709000837926645 - 0.3702167780750079 - 0.36556393540769305 - 0.37650336515785243 - 0.3699811227722488 - 0.36806220730606526 - 0.3696328229784335 - 0.3852970338255622 - 0.37157613433218695 - 0.368267862192135 - 0.3715516752706066 - 0.26093751350716654 - 0.24003989063421033 - 0.31112640151573373 - 0.2509815194812587 - 0.19256512170374224 - 0.2638556294764011 - 0.08503820346290819 - 0.1374194639615466 - 1.0 - 0.21057893489306592 - 0.2646936786804572 - 0.27790871012280266 - 0.29027802989910717 - 0.27400555976615254 - 0.2823478131745678 - 0.25739544436992295 - 0.3014171939280134 - 0.2862214695233955 - 0.2856734533249879 - 0.2870107976688266 - 0.3709000837926645 - 0.3702167780750079 - 0.36556393540769305 - 0.37650336515785243 - 0.3699811227722488 - 0.36806220730606526 - 0.3696328229784335 - 0.3852970338255622 - 0.37157613433218695 - 0.368267862192135 - 0.3715516752706066 - 0.26093751350716654 - 0.24003989063421033 - 0.31112640151573373 - 0.2509815194812587 - 0.19256512170374224 - 0.2638556294764011 - 0.08503820346290819 - 0.1374194639615466 - 1.0 - 0.21057893489306592 - 0.2646936786804572 - 0.27790871012280266 - 0.29027802989910717 - 0.27400555976615254 - 0.2823478131745678 - 0.25739544436992295 - 0.3014171939280134 - 0.2862214695233955 - 0.2856734533249879 - 0.2870107976688266 - 0.3709000837926645 - 0.3702167780750079 - 0.36556393540769305 - 0.37650336515785243 - 0.3699811227722488 - 0.36806220730606526 - 0.3696328229784335 - 0.3852970338255622 - 0.37157613433218695 - 0.368267862192135 - 0.3715516752706066 - 0.26093751350716654 - 0.24003989063421033 - 0.31112640151573373 - 0.2509815194812587 - 0.19256512170374224 - 0.2638556294764011 - 0.08503820346290819 - 0.1374194639615466 - 1.0 - 0.21057893489306592 - 0.2646936786804572 - 0.27790871012280266 - 0.29027802989910717 - 0.27400555976615254 - 0.2823478131745678 - 0.25739544436992295 - 0.3014171939280134 - 0.2862214695233955 - 0.2856734533249879 - 0.2870107976688266 - 0.3709000837926645 - 0.3702167780750079 - 0.36556393540769305 - 0.37650336515785243 - 0.3699811227722488 - 0.36806220730606526 - 0.3696328229784335 - 0.3852970338255622 - 0.37157613433218695 - 0.368267862192135 - 0.3715516752706066 - 0.26093751350716654 - 0.24003989063421033 - 0.31112640151573373 - 0.2509815194812587 - 0.19256512170374224 - 0.2638556294764011 - 0.08503820346290819 - 0.1374194639615466 - 1.0 - 0.21057893489306592 - 0.2646936786804572 - 0.27790871012280266 - 0.29027802989910717 - 0.27400555976615254 - 0.2823478131745678 - 0.25739544436992295 - 0.3014171939280134 - 0.2862214695233955 - 0.2856734533249879 - 0.2870107976688266 - 0.3709000837926645 - 0.3702167780750079 - 0.36556393540769305 - 0.37650336515785243 - 0.3699811227722488 - 0.36806220730606526 - 0.3696328229784335 - 0.3852970338255622 - 0.37157613433218695 - 0.368267862192135 - 0.3715516752706066 - 0.26093751350716654 - 0.24003989063421033 - 0.31112640151573373 - 0.2509815194812587 - 0.19256512170374224 - 0.2638556294764011 - 0.08503820346290819 - 0.1374194639615466 - 1.0 - 0.21057893489306592 - 0.2646936786804572 - 0.27790871012280266 - 0.29027802989910717 - 0.27400555976615254 - 0.2823478131745678 - 0.25739544436992295 - 0.3014171939280134 - 0.2862214695233955 - 0.2856734533249879 - 0.2870107976688266 - 0.3709000837926645 - 0.3702167780750079 - 0.36556393540769305 - 0.37650336515785243 - 0.3699811227722488 - 0.36806220730606526 - 0.3696328229784335 - 0.3852970338255622 - 0.37157613433218695 - 0.368267862192135 - 0.3715516752706066 - 0.26093751350716654 - 0.24003989063421033 - 0.31112640151573373 - 0.2509815194812587 - 0.19256512170374224 - 0.2638556294764011 - 0.08503820346290819 - 0.1374194639615466 - 1.0 - 0.21057893489306592 - 0.2646936786804572 - 0.27790871012280266 - 0.29027802989910717 - 0.27400555976615254 - 0.2823478131745678 - 0.25739544436992295 - 0.3014171939280134 - 0.2862214695233955 - 0.2856734533249879 - 0.2870107976688266 - 0.3709000837926645 - 0.3702167780750079 - 0.36556393540769305 - 0.37650336515785243 - 0.3699811227722488 - 0.36806220730606526 - 0.3696328229784335 - 0.3852970338255622 - 0.37157613433218695 - 0.368267862192135 - 0.3715516752706066 - 0.26093751350716654 - 0.24003989063421033 - 0.31112640151573373 - 0.2509815194812587 - 0.19256512170374224 - 0.2638556294764011 - 0.08503820346290819 - 0.1374194639615466 - 1.0 - 0.21057893489306592 - 0.2646936786804572 - 0.27790871012280266 - 0.29027802989910717 - 0.27400555976615254 - 0.2823478131745678 - 0.25739544436992295 - 0.3014171939280134 - 0.2862214695233955 - 0.2856734533249879 - 0.2870107976688266 - 0.3709000837926645 - 0.3702167780750079 - 0.36556393540769305 - 0.37650336515785243 - 0.3699811227722488 - 0.36806220730606526 - 0.3696328229784335 - 0.3852970338255622 - 0.37157613433218695 - 0.368267862192135 - 0.3715516752706066 - 0.26093751350716654 - 0.24003989063421033 - 0.31112640151573373 - 0.2509815194812587 - 0.19256512170374224 - 0.2638556294764011 - 0.08503820346290819 - 0.1374194639615466 - 1.0 - 0.21057893489306592 - 0.2646936786804572 - 0.27790871012280266 - 0.29027802989910717 - 0.27400555976615254 - 0.2823478131745678 - 0.25739544436992295 - 0.3014171939280134 - 0.2862214695233955 - 0.2856734533249879 - 0.2870107976688266 - 0.3709000837926645 - 0.3702167780750079 - 0.36556393540769305 - 0.37650336515785243 - 0.3699811227722488 - 0.36806220730606526 - 0.3696328229784335 - 0.3852970338255622 - 0.37157613433218695 - 0.368267862192135 - 0.3715516752706066 - 0.26093751350716654 - 0.24003989063421033 - 0.31112640151573373 - 0.2509815194812587 - 0.19256512170374224 - 0.2638556294764011 - 0.08503820346290819 - 0.1374194639615466 - 1.0 - 0.21057893489306592 - 0.2646936786804572 - 0.27790871012280266 - 0.29027802989910717 - 0.27400555976615254 - 0.2823478131745678 - 0.25739544436992295 - 0.3014171939280134 - 0.2862214695233955 - 0.2856734533249879 - 0.2870107976688266 - 0.3709000837926645 - 0.3702167780750079 - 0.36556393540769305 - 0.37650336515785243 - 0.3699811227722488 - 0.36806220730606526 - 0.3696328229784335 - 0.3852970338255622 - 0.37157613433218695 - 0.368267862192135 - 0.3715516752706066 - 0.26093751350716654 - 0.24003989063421033 - 0.31112640151573373 - 0.2509815194812587 - 0.19256512170374224 - 0.2638556294764011 - 0.08503820346290819 - 0.1374194639615466 - 1.0 - 0.21057893489306592 - 0.2646936786804572 - 0.27790871012280266 - 0.29027802989910717 - 0.27400555976615254 - 0.2823478131745678 - 0.25739544436992295 - 0.3014171939280134 - 0.2862214695233955 - 0.2856734533249879 - 0.2870107976688266 - 0.3709000837926645 - 0.3702167780750079 - 0.36556393540769305 - 0.37650336515785243 - 0.3699811227722488 - 0.36806220730606526 - 0.3696328229784335 - 0.3852970338255622 - 0.37157613433218695 - 0.368267862192135 - 0.3715516752706066 - 0.26093751350716654 - 0.24003989063421033 - 0.31112640151573373 - 0.2509815194812587 - 0.19256512170374224 - 0.2638556294764011 - 0.08503820346290819 - 0.1374194639615466 - 1.0 - 0.21057893489306592 - 0.2646936786804572 - 0.27790871012280266 - 0.29027802989910717 - 0.27400555976615254 - 0.2823478131745678 - 0.25739544436992295 - 0.3014171939280134 - 0.2862214695233955 - 0.2856734533249879 - 0.2870107976688266 - 0.3709000837926645 - 0.3702167780750079 - 0.36556393540769305 - 0.37650336515785243 - 0.3699811227722488 - 0.36806220730606526 - 0.3696328229784335 - 0.3852970338255622 - 0.37157613433218695 - 0.368267862192135 - 0.3715516752706066 - 0.26093751350716654 - 0.24003989063421033 - 0.31112640151573373 - 0.2509815194812587 - 0.19256512170374224 - 0.2638556294764011 - 0.08503820346290819 - 0.1374194639615466 - 1.0 - 0.21057893489306592 - 0.2646936786804572 - 0.27790871012280266 - 0.29027802989910717 - 0.27400555976615254 - 0.2823478131745678 - 0.25739544436992295 - 0.3014171939280134 - 0.2862214695233955 - 0.2856734533249879 - 0.2870107976688266 - 0.3709000837926645 - 0.3702167780750079 - 0.36556393540769305 - 0.37650336515785243 - 0.3699811227722488 - 0.36806220730606526 - 0.3696328229784335 - 0.3852970338255622 - 0.37157613433218695 - 0.368267862192135 - 0.3715516752706066 - 0.26093751350716654 - 0.24003989063421033 - 0.31112640151573373 - 0.2509815194812587 - 0.19256512170374224 - 0.2638556294764011 - 0.08503820346290819 - 0.1374194639615466 - 1.0 - 0.21057893489306592 - 0.2646936786804572 - 0.27790871012280266 - 0.29027802989910717 - 0.27400555976615254 - 0.2823478131745678 - 0.25739544436992295 - 0.3014171939280134 - 0.2862214695233955 - 0.2856734533249879 - 0.2870107976688266 - 0.3709000837926645 - 0.3702167780750079 - 0.36556393540769305 - 0.37650336515785243 - 0.3699811227722488 - 0.36806220730606526 - 0.3696328229784335 - 0.3852970338255622 - 0.37157613433218695 - 0.368267862192135 - 0.3715516752706066 - 0.26093751350716654 - 0.24003989063421033 - 0.31112640151573373 - 0.2509815194812587 - 0.19256512170374224 - 0.2638556294764011 - 0.08503820346290819 - 0.1374194639615466 - 1.0 - 0.21057893489306592 - 0.2646936786804572 - 0.27790871012280266 - 0.29027802989910717 - 0.27400555976615254 - 0.2823478131745678 - 0.25739544436992295 - 0.3014171939280134 - 0.2862214695233955 - 0.2856734533249879 - 0.2870107976688266 - 0.3709000837926645 - 0.3702167780750079 - 0.36556393540769305 - 0.37650336515785243 - 0.3699811227722488 - 0.36806220730606526 - 0.3696328229784335 - 0.3852970338255622 - 0.37157613433218695 - 0.368267862192135 - 0.3715516752706066 - 0.26093751350716654 - 0.24003989063421033 - 0.31112640151573373 - 0.2509815194812587 - 0.19256512170374224 - 0.2638556294764011 - 0.08503820346290819 - 0.1374194639615466 - 1.0 - 0.21057893489306592 - 0.2646936786804572 - 0.27790871012280266 - 0.29027802989910717 - 0.27400555976615254 - 0.2823478131745678 - 0.25739544436992295 - 0.3014171939280134 - 0.2862214695233955 - 0.2856734533249879 - 0.2870107976688266 - 0.3709000837926645 - 0.3702167780750079 - 0.36556393540769305 - 0.37650336515785243 - 0.3699811227722488 - 0.36806220730606526 - 0.3696328229784335 - 0.3852970338255622 - 0.37157613433218695 - 0.368267862192135 - 0.3715516752706066 - 0.26093751350716654 - 0.24003989063421033 - 0.31112640151573373 - 0.2509815194812587 - 0.19256512170374224 - 0.2638556294764011 - 0.08503820346290819 - 0.1374194639615466 - 1.0 - 0.21057893489306592 - 0.2646936786804572 - 0.27790871012280266 - 0.29027802989910717 - 0.27400555976615254 - 0.2823478131745678 - 0.25739544436992295 - 0.3014171939280134 - 0.2862214695233955 - 0.2856734533249879 - 0.2870107976688266 - 0.3709000837926645 - 0.3702167780750079 - 0.36556393540769305 - 0.37650336515785243 - 0.3699811227722488 - 0.36806220730606526 - 0.3696328229784335 - 0.3852970338255622 - 0.37157613433218695 - 0.368267862192135 - 0.3715516752706066 - 0.26093751350716654 - 0.24003989063421033 - 0.31112640151573373 - 0.2509815194812587 - 0.19256512170374224 - 0.2638556294764011 - 0.08503820346290819 - 0.1374194639615466 - 1.0 - 0.21057893489306592 - 0.2646936786804572 - 0.27790871012280266 - 0.29027802989910717 - 0.27400555976615254 - 0.2823478131745678 - 0.25739544436992295 - 0.3014171939280134 - 0.2862214695233955 - 0.2856734533249879 - 0.2870107976688266 - 0.3709000837926645 - 0.3702167780750079 - 0.36556393540769305 - 0.37650336515785243 - 0.3699811227722488 - 0.36806220730606526 - 0.3696328229784335 - 0.3852970338255622 - 0.37157613433218695 - 0.368267862192135 - 0.3715516752706066 - 0.26093751350716654 - 0.24003989063421033 - 0.31112640151573373 - 0.2509815194812587 - 0.19256512170374224 - 0.2638556294764011 - 0.08503820346290819 - 0.1374194639615466 - 1.0 - 0.21057893489306592 - 0.2646936786804572 - 0.27790871012280266 - 0.29027802989910717 - 0.27400555976615254 - 0.2823478131745678 - 0.25739544436992295 - 0.3014171939280134 - 0.2862214695233955 - 0.2856734533249879 - 0.2870107976688266 - 0.3709000837926645 - 0.3702167780750079 - 0.36556393540769305 - 0.37650336515785243 - 0.3699811227722488 - 0.36806220730606526 - 0.3696328229784335 - 0.3852970338255622 - 0.37157613433218695 - 0.368267862192135 - 0.3715516752706066 - 0.26093751350716654 - 0.24003989063421033 - 0.31112640151573373 - 0.2509815194812587 - 0.19256512170374224 - 0.2638556294764011 - 0.08503820346290819 - 0.1374194639615466 - 1.0 - 0.21057893489306592 - 0.2646936786804572 - 0.27790871012280266 - 0.29027802989910717 - 0.27400555976615254 - 0.2823478131745678 - 0.25739544436992295 - 0.3014171939280134 - 0.2862214695233955 - 0.2856734533249879 - 0.2870107976688266 - 0.3709000837926645 - 0.3702167780750079 - 0.36556393540769305 - 0.37650336515785243 - 0.3699811227722488 - 0.36806220730606526 - 0.3696328229784335 - 0.3852970338255622 - 0.37157613433218695 - 0.368267862192135 - 0.3715516752706066 - 0.26093751350716654 - 0.24003989063421033 - 0.31112640151573373 - 0.2509815194812587 - 0.19256512170374224 - 0.2638556294764011 - 0.08503820346290819 - 0.1374194639615466 - 1.0 - 0.21057893489306592 - 0.2646936786804572 - 0.27790871012280266 - 0.29027802989910717 - 0.27400555976615254 - 0.2823478131745678 - 0.25739544436992295 - 0.3014171939280134 - 0.2862214695233955 - 0.2856734533249879 - 0.2870107976688266 - 0.3709000837926645 - 0.3702167780750079 - 0.36556393540769305 - 0.37650336515785243 - 0.3699811227722488 - 0.36806220730606526 - 0.3696328229784335 - 0.3852970338255622 - 0.37157613433218695 - 0.368267862192135 - 0.3715516752706066 - 0.26093751350716654 - 0.24003989063421033 - 0.31112640151573373 - 0.2509815194812587 - 0.19256512170374224 - 0.2638556294764011 - 0.08503820346290819 - 0.1374194639615466 - 1.0 - 0.21057893489306592 - 0.2646936786804572 - 0.27790871012280266 - 0.29027802989910717 - 0.27400555976615254 - 0.2823478131745678 - 0.25739544436992295 - 0.3014171939280134 - 0.2862214695233955 - 0.2856734533249879 - 0.2870107976688266 - 0.3709000837926645 - 0.3702167780750079 - 0.36556393540769305 - 0.37650336515785243 - 0.3699811227722488 - 0.36806220730606526 - 0.3696328229784335 - 0.3852970338255622 - 0.37157613433218695 - 0.368267862192135 - 0.3715516752706066 - 0.26093751350716654 - 0.24003989063421033 - 0.31112640151573373 - 0.2509815194812587 - 0.19256512170374224 - 0.2638556294764011 - 0.08503820346290819 - 0.1374194639615466 - 1.0 - 0.21057893489306592 - 0.2646936786804572 - 0.27790871012280266 - 0.29027802989910717 - 0.27400555976615254 - 0.2823478131745678 - 0.25739544436992295 - 0.3014171939280134 - 0.2862214695233955 - 0.2856734533249879 - 0.2870107976688266 - 0.3709000837926645 - 0.3702167780750079 - 0.36556393540769305 - 0.37650336515785243 - 0.3699811227722488 - 0.36806220730606526 - 0.3696328229784335 - 0.3852970338255622 - 0.37157613433218695 - 0.368267862192135 - 0.3715516752706066 - 0.26093751350716654 - 0.24003989063421033 - 0.31112640151573373 - 0.2509815194812587 - 0.19256512170374224 - 0.2638556294764011 - 0.08503820346290819 - 0.1374194639615466 - 1.0 - 0.21057893489306592 - 0.2646936786804572 - 0.27790871012280266 - 0.29027802989910717 - 0.27400555976615254 - 0.2823478131745678 - 0.25739544436992295 - 0.3014171939280134 - 0.2862214695233955 - 0.2856734533249879 - 0.2870107976688266 - 0.3709000837926645 - 0.3702167780750079 - 0.36556393540769305 - 0.37650336515785243 - 0.3699811227722488 - 0.36806220730606526 - 0.3696328229784335 - 0.3852970338255622 - 0.37157613433218695 - 0.368267862192135 - 0.3715516752706066 - 0.26093751350716654 - 0.24003989063421033 - 0.31112640151573373 - 0.2509815194812587 - 0.19256512170374224 - 0.2638556294764011 - 0.08503820346290819 - 0.1374194639615466 - 1.0 - 0.21057893489306592 - 0.2646936786804572 - 0.27790871012280266 - 0.29027802989910717 - 0.27400555976615254 - 0.2823478131745678 - 0.25739544436992295 - 0.3014171939280134 - 0.2862214695233955 - 0.2856734533249879 - 0.2870107976688266 - 0.3709000837926645 - 0.3702167780750079 - 0.36556393540769305 - 0.37650336515785243 - 0.3699811227722488 - 0.36806220730606526 - 0.3696328229784335 - 0.3852970338255622 - 0.37157613433218695 - 0.368267862192135 - 0.3715516752706066 - 0.26093751350716654 - 0.24003989063421033 - 0.31112640151573373 - 0.2509815194812587 - 0.19256512170374224 - 0.2638556294764011 - 0.08503820346290819 - 0.1374194639615466 - 1.0 - 0.21057893489306592 - 0.2646936786804572 - 0.27790871012280266 - 0.29027802989910717 - 0.27400555976615254 - 0.2823478131745678 - 0.25739544436992295 - 0.3014171939280134 - 0.2862214695233955 - 0.2856734533249879 - 0.2870107976688266 - 0.3709000837926645 - 0.3702167780750079 - 0.36556393540769305 - 0.37650336515785243 - 0.3699811227722488 - 0.36806220730606526 - 0.3696328229784335 - 0.3852970338255622 - 0.37157613433218695 - 0.368267862192135 - 0.3715516752706066 - 0.26093751350716654 - 0.24003989063421033 - 0.31112640151573373 - 0.2509815194812587 - 0.19256512170374224 - 0.2638556294764011 - 0.08503820346290819 - 0.1374194639615466 - 1.0 - 0.21057893489306592 - 0.2646936786804572 - 0.27790871012280266 - 0.29027802989910717 - 0.27400555976615254 - 0.2823478131745678 - 0.25739544436992295 - 0.3014171939280134 - 0.2862214695233955 - 0.2856734533249879 - 0.2870107976688266 - 0.3709000837926645 - 0.3702167780750079 - 0.36556393540769305 - 0.37650336515785243 - 0.3699811227722488 - 0.36806220730606526 - 0.3696328229784335 - 0.3852970338255622 - 0.37157613433218695 - 0.368267862192135 - 0.3715516752706066 - 0.26093751350716654 - 0.24003989063421033 - 0.31112640151573373 - 0.2509815194812587 - 0.19256512170374224 - 0.2638556294764011 - 0.08503820346290819 - 0.1374194639615466 - 1.0 - 0.21057893489306592 - 0.2646936786804572 - 0.27790871012280266 - 0.29027802989910717 - 0.27400555976615254 - 0.2823478131745678 - 0.25739544436992295 - 0.3014171939280134 - 0.2862214695233955 - 0.2856734533249879 - 0.2870107976688266 - 0.3709000837926645 - 0.3702167780750079 - 0.36556393540769305 - 0.37650336515785243 - 0.3699811227722488 - 0.36806220730606526 - 0.3696328229784335 - 0.3852970338255622 - 0.37157613433218695 - 0.368267862192135 - 0.3715516752706066 - 0.26093751350716654 - 0.24003989063421033 - 0.31112640151573373 - 0.2509815194812587 - 0.19256512170374224 - 0.2638556294764011 - 0.08503820346290819 - 0.1374194639615466 - 1.0 - 0.21057893489306592 - 0.2646936786804572 - 0.27790871012280266 - 0.29027802989910717 - 0.27400555976615254 - 0.2823478131745678 - 0.25739544436992295 - 0.3014171939280134 - 0.2862214695233955 - 0.2856734533249879 - 0.2870107976688266 - 0.3709000837926645 - 0.3702167780750079 - 0.36556393540769305 - 0.37650336515785243 - 0.3699811227722488 - 0.36806220730606526 - 0.3696328229784335 - 0.3852970338255622 - 0.37157613433218695 - 0.368267862192135 - 0.3715516752706066 - 0.26093751350716654 - 0.24003989063421033 - 0.31112640151573373 - 0.2509815194812587 - 0.19256512170374224 - 0.2638556294764011 - 0.08503820346290819 - 0.1374194639615466 - 1.0 - 0.21057893489306592 - 0.2646936786804572 - 0.27790871012280266 - 0.29027802989910717 - 0.27400555976615254 - 0.2823478131745678 - 0.25739544436992295 - 0.3014171939280134 - 0.2862214695233955 - 0.2856734533249879 - 0.2870107976688266 - 0.3709000837926645 - 0.3702167780750079 - 0.36556393540769305 - 0.37650336515785243 - 0.3699811227722488 - 0.36806220730606526 - 0.3696328229784335 - 0.3852970338255622 - 0.37157613433218695 - 0.368267862192135 - 0.3715516752706066 - 0.26093751350716654 - 0.24003989063421033 - 0.31112640151573373 - 0.2509815194812587 - 0.19256512170374224 - 0.2638556294764011 - 0.08503820346290819 - 0.1374194639615466 - 1.0 - 0.21057893489306592 - 0.2646936786804572 - 0.27790871012280266 - 0.29027802989910717 - 0.27400555976615254 - 0.2823478131745678 - 0.25739544436992295 - 0.3014171939280134 - 0.2862214695233955 - 0.2856734533249879 - 0.2870107976688266 - 0.3709000837926645 - 0.3702167780750079 - 0.36556393540769305 - 0.37650336515785243 - 0.3699811227722488 - 0.36806220730606526 - 0.3696328229784335 - 0.3852970338255622 - 0.37157613433218695 - 0.368267862192135 - 0.3715516752706066 - 0.26093751350716654 - 0.24003989063421033 - 0.31112640151573373 - 0.2509815194812587 - 0.19256512170374224 - 0.2638556294764011 - 0.08503820346290819 - 0.1374194639615466 - 1.0 - 0.21057893489306592 - 0.2646936786804572 - 0.27790871012280266 - 0.29027802989910717 - 0.27400555976615254 - 0.2823478131745678 - 0.25739544436992295 - 0.3014171939280134 - 0.2862214695233955 - 0.2856734533249879 - 0.2870107976688266 - 0.3709000837926645 - 0.3702167780750079 - 0.36556393540769305 - 0.37650336515785243 - 0.3699811227722488 - 0.36806220730606526 - 0.3696328229784335 - 0.3852970338255622 - 0.37157613433218695 - 0.368267862192135 - 0.3715516752706066 - 0.26093751350716654 - 0.24003989063421033 - 0.31112640151573373 - 0.2509815194812587 - 0.19256512170374224 - 0.2638556294764011 - 0.08503820346290819 - 0.1374194639615466 - 1.0 - 0.21057893489306592 - 0.2646936786804572 - 0.27790871012280266 - 0.29027802989910717 - 0.27400555976615254 - 0.2823478131745678 - 0.25739544436992295 - 0.3014171939280134 - 0.2862214695233955 - 0.2856734533249879 - 0.2870107976688266 - 0.3709000837926645 - 0.3702167780750079 - 0.36556393540769305 - 0.37650336515785243 - 0.3699811227722488 - 0.36806220730606526 - 0.3696328229784335 - 0.3852970338255622 - 0.37157613433218695 - 0.368267862192135 - 0.3715516752706066 - 0.26093751350716654 - 0.24003989063421033 - 0.31112640151573373 - 0.2509815194812587 - 0.19256512170374224 - 0.2638556294764011 - 0.08503820346290819 - 0.1374194639615466 - 1.0 - 0.21057893489306592 - 0.2646936786804572 - 0.27790871012280266 - 0.29027802989910717 - 0.27400555976615254 - 0.2823478131745678 - 0.25739544436992295 - 0.3014171939280134 - 0.2862214695233955 - 0.2856734533249879 - 0.2870107976688266 - 0.3709000837926645 - 0.3702167780750079 - 0.36556393540769305 - 0.37650336515785243 - 0.3699811227722488 - 0.36806220730606526 - 0.3696328229784335 - 0.3852970338255622 - 0.37157613433218695 - 0.368267862192135 - 0.3715516752706066 - 0.26093751350716654 - 0.24003989063421033 - 0.31112640151573373 - 0.2509815194812587 - 0.19256512170374224 - 0.2638556294764011 - 0.08503820346290819 - 0.1374194639615466 - 1.0 - 0.21057893489306592 - 0.2646936786804572 - 0.27790871012280266 - 0.29027802989910717 - 0.27400555976615254 - 0.2823478131745678 - 0.25739544436992295 - 0.3014171939280134 - 0.2862214695233955 - 0.2856734533249879 - 0.2870107976688266 - 0.3709000837926645 - 0.3702167780750079 - 0.36556393540769305 - 0.37650336515785243 - 0.3699811227722488 - 0.36806220730606526 - 0.3696328229784335 - 0.3852970338255622 - 0.37157613433218695 - 0.368267862192135 - 0.3715516752706066 - 0.26093751350716654 - 0.24003989063421033 - 0.31112640151573373 - 0.2509815194812587 - 0.19256512170374224 - 0.2638556294764011 - 0.08503820346290819 - 0.1374194639615466 - 1.0 - 0.21057893489306592 - 0.2646936786804572 - 0.27790871012280266 - 0.29027802989910717 - 0.27400555976615254 - 0.2823478131745678 - 0.25739544436992295 - 0.3014171939280134 - 0.2862214695233955 - 0.2856734533249879 - 0.2870107976688266 - 0.3709000837926645 - 0.3702167780750079 - 0.36556393540769305 - 0.37650336515785243 - 0.3699811227722488 - 0.36806220730606526 - 0.3696328229784335 - 0.3852970338255622 - 0.37157613433218695 - 0.368267862192135 - 0.3715516752706066 - 0.26093751350716654 - 0.24003989063421033 - 0.31112640151573373 - 0.2509815194812587 - 0.19256512170374224 - 0.2638556294764011 - 0.08503820346290819 - 0.1374194639615466 - 1.0 - 0.21057893489306592 - 0.2646936786804572 - 0.27790871012280266 - 0.29027802989910717 - 0.27400555976615254 - 0.2823478131745678 - 0.25739544436992295 - 0.3014171939280134 - 0.2862214695233955 - 0.2856734533249879 - 0.2870107976688266 - 0.3709000837926645 - 0.3702167780750079 - 0.36556393540769305 - 0.37650336515785243 - 0.3699811227722488 - 0.36806220730606526 - 0.3696328229784335 - 0.3852970338255622 - 0.37157613433218695 - 0.368267862192135 - 0.3715516752706066 - 0.26093751350716654 - 0.24003989063421033 - 0.31112640151573373 - 0.2509815194812587 - 0.19256512170374224 - 0.2638556294764011 - 0.08503820346290819 - 0.1374194639615466 - 1.0 - 0.21057893489306592 - 0.2646936786804572 - 0.27790871012280266 - 0.29027802989910717 - 0.27400555976615254 - 0.2823478131745678 - 0.25739544436992295 - 0.3014171939280134 - 0.2862214695233955 - 0.2856734533249879 - 0.2870107976688266 - 0.3709000837926645 - 0.3702167780750079 - 0.36556393540769305 - 0.37650336515785243 - 0.3699811227722488 - 0.36806220730606526 - 0.3696328229784335 - 0.3852970338255622 - 0.37157613433218695 - 0.368267862192135 - 0.3715516752706066 - 0.26093751350716654 - 0.24003989063421033 - 0.31112640151573373 - 0.2509815194812587 - 0.19256512170374224 - 0.2638556294764011 - 0.08503820346290819 - 0.1374194639615466 - 1.0 - 0.21057893489306592 - 0.2646936786804572 - 0.27790871012280266 - 0.29027802989910717 - 0.27400555976615254 - 0.2823478131745678 - 0.25739544436992295 - 0.3014171939280134 - 0.2862214695233955 - 0.2856734533249879 - 0.2870107976688266 - 0.3709000837926645 - 0.3702167780750079 - 0.36556393540769305 - 0.37650336515785243 - 0.3699811227722488 - 0.36806220730606526 - 0.3696328229784335 - 0.3852970338255622 - 0.37157613433218695 - 0.368267862192135 - 0.3715516752706066 - 0.26093751350716654 - 0.24003989063421033 - 0.31112640151573373 - 0.2509815194812587 - 0.19256512170374224 - 0.2638556294764011 - 0.08503820346290819 - 0.1374194639615466 - 1.0 - 0.21057893489306592 - 0.2646936786804572 - 0.27790871012280266 - 0.29027802989910717 - 0.27400555976615254 - 0.2823478131745678 - 0.25739544436992295 - 0.3014171939280134 - 0.2862214695233955 - 0.2856734533249879 - 0.2870107976688266 - 0.3709000837926645 - 0.3702167780750079 - 0.36556393540769305 - 0.37650336515785243 - 0.3699811227722488 - 0.36806220730606526 - 0.3696328229784335 - 0.3852970338255622 - 0.37157613433218695 - 0.368267862192135 - 0.3715516752706066 - 0.26093751350716654 - 0.24003989063421033 - 0.31112640151573373 - 0.2509815194812587 - 0.19256512170374224 - 0.2638556294764011 - 0.08503820346290819 - 0.1374194639615466 - 1.0 - 0.21057893489306592 - 0.2646936786804572 - 0.27790871012280266 - 0.29027802989910717 - 0.27400555976615254 - 0.2823478131745678 - 0.25739544436992295 - 0.3014171939280134 - 0.2862214695233955 - 0.2856734533249879 - 0.2870107976688266 - 0.3709000837926645 - 0.3702167780750079 - 0.36556393540769305 - 0.37650336515785243 - 0.3699811227722488 - 0.36806220730606526 - 0.3696328229784335 - 0.3852970338255622 - 0.37157613433218695 - 0.368267862192135 - 0.3715516752706066 - 0.26093751350716654 - 0.24003989063421033 - 0.31112640151573373 - 0.2509815194812587 - 0.19256512170374224 - 0.2638556294764011 - 0.08503820346290819 - 0.1374194639615466 - 1.0 - 0.21057893489306592 - 0.2646936786804572 - 0.27790871012280266 - 0.29027802989910717 - 0.27400555976615254 - 0.2823478131745678 - 0.25739544436992295 - 0.3014171939280134 - 0.2862214695233955 - 0.2856734533249879 - 0.2870107976688266 - 0.3709000837926645 - 0.3702167780750079 - 0.36556393540769305 - 0.37650336515785243 - 0.3699811227722488 - 0.36806220730606526 - 0.3696328229784335 - 0.3852970338255622 - 0.37157613433218695 - 0.368267862192135 - 0.3715516752706066 - 0.26093751350716654 - 0.24003989063421033 - 0.31112640151573373 - 0.2509815194812587 - 0.19256512170374224 - 0.2638556294764011 - 0.08503820346290819 - 0.1374194639615466 - 1.0 - 0.21057893489306592 - 0.2646936786804572 - 0.27790871012280266 - 0.29027802989910717 - 0.27400555976615254 - 0.2823478131745678 - 0.25739544436992295 - 0.3014171939280134 - 0.2862214695233955 - 0.2856734533249879 - 0.2870107976688266 - 0.3709000837926645 - 0.3702167780750079 - 0.36556393540769305 - 0.37650336515785243 - 0.3699811227722488 - 0.36806220730606526 - 0.3696328229784335 - 0.3852970338255622 - 0.37157613433218695 - 0.368267862192135 - 0.3715516752706066 - 0.26093751350716654 - 0.24003989063421033 - 0.31112640151573373 - 0.2509815194812587 - 0.19256512170374224 - 0.2638556294764011 - 0.08503820346290819 - 0.1374194639615466 - 1.0 - 0.21057893489306592 - 0.2646936786804572 - 0.27790871012280266 - 0.29027802989910717 - 0.27400555976615254 - 0.2823478131745678 - 0.25739544436992295 - 0.3014171939280134 - 0.2862214695233955 - 0.2856734533249879 - 0.2870107976688266 - 0.3709000837926645 - 0.3702167780750079 - 0.36556393540769305 - 0.37650336515785243 - 0.3699811227722488 - 0.36806220730606526 - 0.3696328229784335 - 0.3852970338255622 - 0.37157613433218695 - 0.368267862192135 - 0.3715516752706066 - 0.26093751350716654 - 0.24003989063421033 - 0.31112640151573373 - 0.2509815194812587 - 0.19256512170374224 - 0.2638556294764011 - 0.08503820346290819 - 0.1374194639615466 - 1.0 - 0.21057893489306592 - 0.2646936786804572 - 0.27790871012280266 - 0.29027802989910717 - 0.27400555976615254 - 0.2823478131745678 - 0.25739544436992295 - 0.3014171939280134 - 0.2862214695233955 - 0.2856734533249879 - 0.2870107976688266 - 0.3709000837926645 - 0.3702167780750079 - 0.36556393540769305 - 0.37650336515785243 - 0.3699811227722488 - 0.36806220730606526 - 0.3696328229784335 - 0.3852970338255622 - 0.37157613433218695 - 0.368267862192135 - 0.3715516752706066 - 0.26093751350716654 - 0.24003989063421033 - 0.31112640151573373 - 0.2509815194812587 - 0.19256512170374224 - 0.2638556294764011 - 0.08503820346290819 - 0.1374194639615466 - 1.0 - 0.21057893489306592 - 0.2646936786804572 - 0.27790871012280266 - 0.29027802989910717 - 0.27400555976615254 - 0.2823478131745678 - 0.25739544436992295 - 0.3014171939280134 - 0.2862214695233955 - 0.2856734533249879 - 0.2870107976688266 - 0.3709000837926645 - 0.3702167780750079 - 0.36556393540769305 - 0.37650336515785243 - 0.3699811227722488 - 0.36806220730606526 - 0.3696328229784335 - 0.3852970338255622 - 0.37157613433218695 - 0.368267862192135 - 0.3715516752706066 - 0.26093751350716654 - 0.24003989063421033 - 0.31112640151573373 - 0.2509815194812587 - 0.19256512170374224 - 0.2638556294764011 - 0.08503820346290819 - 0.1374194639615466 - 1.0 - 0.21057893489306592 - 0.2646936786804572 - 0.27790871012280266 - 0.29027802989910717 - 0.27400555976615254 - 0.2823478131745678 - 0.25739544436992295 - 0.3014171939280134 - 0.2862214695233955 - 0.2856734533249879 - 0.2870107976688266 - 0.3709000837926645 - 0.3702167780750079 - 0.36556393540769305 - 0.37650336515785243 - 0.3699811227722488 - 0.36806220730606526 - 0.3696328229784335 - 0.3852970338255622 - 0.37157613433218695 - 0.368267862192135 - 0.3715516752706066 - 0.26093751350716654 - 0.24003989063421033 - 0.31112640151573373 - 0.2509815194812587 - 0.19256512170374224 - 0.2638556294764011 - 0.08503820346290819 - 0.1374194639615466 - 1.0 - 0.21057893489306592 - 0.2646936786804572 - 0.27790871012280266 - 0.29027802989910717 - 0.27400555976615254 - 0.2823478131745678 - 0.25739544436992295 - 0.3014171939280134 - 0.2862214695233955 - 0.2856734533249879 - 0.2870107976688266 - 0.3709000837926645 - 0.3702167780750079 - 0.36556393540769305 - 0.37650336515785243 - 0.3699811227722488 - 0.36806220730606526 - 0.3696328229784335 - 0.3852970338255622 - 0.37157613433218695 - 0.368267862192135 - 0.3715516752706066 - 0.26093751350716654 - 0.24003989063421033 - 0.31112640151573373 - 0.2509815194812587 - 0.19256512170374224 - 0.2638556294764011 - 0.08503820346290819 - 0.1374194639615466 - 1.0 - 0.21057893489306592 - 0.2646936786804572 - 0.27790871012280266 - 0.29027802989910717 - 0.27400555976615254 - 0.2823478131745678 - 0.25739544436992295 - 0.3014171939280134 - 0.2862214695233955 - 0.2856734533249879 - 0.2870107976688266 - 0.3709000837926645 - 0.3702167780750079 - 0.36556393540769305 - 0.37650336515785243 - 0.3699811227722488 - 0.36806220730606526 - 0.3696328229784335 - 0.3852970338255622 - 0.37157613433218695 - 0.368267862192135 - 0.3715516752706066 - 0.26093751350716654 - 0.24003989063421033 - 0.31112640151573373 - 0.2509815194812587 - 0.19256512170374224 - 0.2638556294764011 - 0.08503820346290819 - 0.1374194639615466 - 1.0 - 0.21057893489306592 - 0.2646936786804572 - 0.27790871012280266 - 0.29027802989910717 - 0.27400555976615254 - 0.2823478131745678 - 0.25739544436992295 - 0.3014171939280134 - 0.2862214695233955 - 0.2856734533249879 - 0.2870107976688266 - 0.3709000837926645 - 0.3702167780750079 - 0.36556393540769305 - 0.37650336515785243 - 0.3699811227722488 - 0.36806220730606526 - 0.3696328229784335 - 0.3852970338255622 - 0.37157613433218695 - 0.368267862192135 - 0.3715516752706066 - 0.26093751350716654 - 0.24003989063421033 - 0.31112640151573373 - 0.2509815194812587 - 0.19256512170374224 - 0.2638556294764011 - 0.08503820346290819 - 0.1374194639615466 - 1.0 - 0.21057893489306592 - 0.2646936786804572 - 0.27790871012280266 - 0.29027802989910717 - 0.27400555976615254 - 0.2823478131745678 - 0.25739544436992295 - 0.3014171939280134 - 0.2862214695233955 - 0.2856734533249879 - 0.2870107976688266 - 0.3709000837926645 - 0.3702167780750079 - 0.36556393540769305 - 0.37650336515785243 - 0.3699811227722488 - 0.36806220730606526 - 0.3696328229784335 - 0.3852970338255622 - 0.37157613433218695 - 0.368267862192135 - 0.3715516752706066 - 0.26093751350716654 - 0.24003989063421033 - 0.31112640151573373 - 0.2509815194812587 - 0.19256512170374224 - 0.2638556294764011 - 0.08503820346290819 - 0.1374194639615466 - 1.0 - 0.21057893489306592 - 0.2646936786804572 - 0.27790871012280266 - 0.29027802989910717 - 0.27400555976615254 - 0.2823478131745678 - 0.25739544436992295 - 0.3014171939280134 - 0.2862214695233955 - 0.2856734533249879 - 0.2870107976688266 - 0.3709000837926645 - 0.3702167780750079 - 0.36556393540769305 - 0.37650336515785243 - 0.3699811227722488 - 0.36806220730606526 - 0.3696328229784335 - 0.3852970338255622 - 0.37157613433218695 - 0.368267862192135 - 0.3715516752706066 - 0.26093751350716654 - 0.24003989063421033 - 0.31112640151573373 - 0.2509815194812587 - 0.19256512170374224 - 0.2638556294764011 - 0.08503820346290819 - 0.1374194639615466 - 1.0 - 0.21057893489306592 - 0.2646936786804572 - 0.27790871012280266 - 0.29027802989910717 - 0.27400555976615254 - 0.2823478131745678 - 0.25739544436992295 - 0.3014171939280134 - 0.2862214695233955 - 0.2856734533249879 - 0.2870107976688266 - 0.3709000837926645 - 0.3702167780750079 - 0.36556393540769305 - 0.37650336515785243 - 0.3699811227722488 - 0.36806220730606526 - 0.3696328229784335 - 0.3852970338255622 - 0.37157613433218695 - 0.368267862192135 - 0.3715516752706066 - 0.26093751350716654 - 0.24003989063421033 - 0.31112640151573373 - 0.2509815194812587 - 0.19256512170374224 - 0.2638556294764011 - 0.08503820346290819 - 0.1374194639615466 - 1.0 - 0.21057893489306592 - 0.2646936786804572 - 0.27790871012280266 - 0.29027802989910717 - 0.27400555976615254 - 0.2823478131745678 - 0.25739544436992295 - 0.3014171939280134 - 0.2862214695233955 - 0.2856734533249879 - 0.2870107976688266 - 0.3709000837926645 - 0.3702167780750079 - 0.36556393540769305 - 0.37650336515785243 - 0.3699811227722488 - 0.36806220730606526 - 0.3696328229784335 - 0.3852970338255622 - 0.37157613433218695 - 0.368267862192135 - 0.3715516752706066 - 0.26093751350716654 - 0.24003989063421033 - 0.31112640151573373 - 0.2509815194812587 - 0.19256512170374224 - 0.2638556294764011 - 0.08503820346290819 - 0.1374194639615466 - 1.0 - 0.21057893489306592 - 0.2646936786804572 - 0.27790871012280266 - 0.29027802989910717 - 0.27400555976615254 - 0.2823478131745678 - 0.25739544436992295 - 0.3014171939280134 - 0.2862214695233955 - 0.2856734533249879 - 0.2870107976688266 - 0.3709000837926645 - 0.3702167780750079 - 0.36556393540769305 - 0.37650336515785243 - 0.3699811227722488 - 0.36806220730606526 - 0.3696328229784335 - 0.3852970338255622 - 0.37157613433218695 - 0.368267862192135 - 0.3715516752706066 - 0.26093751350716654 - 0.24003989063421033 - 0.31112640151573373 - 0.2509815194812587 - 0.19256512170374224 - 0.2638556294764011 - 0.08503820346290819 - 0.1374194639615466 - 1.0 - 0.21057893489306592 - 0.2646936786804572 - 0.27790871012280266 - 0.29027802989910717 - 0.27400555976615254 - 0.2823478131745678 - 0.25739544436992295 - 0.3014171939280134 - 0.2862214695233955 - 0.2856734533249879 - 0.2870107976688266 - 0.3709000837926645 - 0.3702167780750079 - 0.36556393540769305 - 0.37650336515785243 - 0.3699811227722488 - 0.36806220730606526 - 0.3696328229784335 - 0.3852970338255622 - 0.37157613433218695 - 0.368267862192135 - 0.3715516752706066 - 0.26093751350716654 - 0.24003989063421033 - 0.31112640151573373 - 0.2509815194812587 - 0.19256512170374224 - 0.2638556294764011 - 0.08503820346290819 - 0.1374194639615466 - 1.0 - 0.21057893489306592 - 0.2646936786804572 - 0.27790871012280266 - 0.29027802989910717 - 0.27400555976615254 - 0.2823478131745678 - 0.25739544436992295 - 0.3014171939280134 - 0.2862214695233955 - 0.2856734533249879 - 0.2870107976688266 - 0.3709000837926645 - 0.3702167780750079 - 0.36556393540769305 - 0.37650336515785243 - 0.3699811227722488 - 0.36806220730606526 - 0.3696328229784335 - 0.3852970338255622 - 0.37157613433218695 - 0.368267862192135 - 0.3715516752706066 - 0.26093751350716654 - 0.24003989063421033 - 0.31112640151573373 - 0.2509815194812587 - 0.19256512170374224 - 0.2638556294764011 - 0.08503820346290819 - 0.1374194639615466 - 1.0 - 0.21057893489306592 - 0.2646936786804572 - 0.27790871012280266 - 0.29027802989910717 - 0.27400555976615254 - 0.2823478131745678 - 0.25739544436992295 - 0.3014171939280134 - 0.2862214695233955 - 0.2856734533249879 - 0.2870107976688266 - 0.3709000837926645 - 0.3702167780750079 - 0.36556393540769305 - 0.37650336515785243 - 0.3699811227722488 - 0.36806220730606526 - 0.3696328229784335 - 0.3852970338255622 - 0.37157613433218695 - 0.368267862192135 - 0.3715516752706066 - 0.26093751350716654 - 0.24003989063421033 - 0.31112640151573373 - 0.2509815194812587 - 0.19256512170374224 - 0.2638556294764011 - 0.08503820346290819 - 0.1374194639615466 - 1.0 - 0.21057893489306592 - 0.2646936786804572 - 0.27790871012280266 - 0.29027802989910717 - 0.27400555976615254 - 0.2823478131745678 - 0.25739544436992295 - 0.3014171939280134 - 0.2862214695233955 - 0.2856734533249879 - 0.2870107976688266 - 0.3709000837926645 - 0.3702167780750079 - 0.36556393540769305 - 0.37650336515785243 - 0.3699811227722488 - 0.36806220730606526 - 0.3696328229784335 - 0.3852970338255622 - 0.37157613433218695 - 0.368267862192135 - 0.3715516752706066 - 0.26093751350716654 - 0.24003989063421033 - 0.31112640151573373 - 0.2509815194812587 - 0.19256512170374224 - 0.2638556294764011 - 0.08503820346290819 - 0.1374194639615466 - 1.0 - 0.21057893489306592 - 0.2646936786804572 - 0.27790871012280266 - 0.29027802989910717 - 0.27400555976615254 - 0.2823478131745678 - 0.25739544436992295 - 0.3014171939280134 - 0.2862214695233955 - 0.2856734533249879 - 0.2870107976688266 - 0.3709000837926645 - 0.3702167780750079 - 0.36556393540769305 - 0.37650336515785243 - 0.3699811227722488 - 0.36806220730606526 - 0.3696328229784335 - 0.3852970338255622 - 0.37157613433218695 - 0.368267862192135 - 0.3715516752706066 - 0.26093751350716654 - 0.24003989063421033 - 0.31112640151573373 - 0.2509815194812587 - 0.19256512170374224 - 0.2638556294764011 - 0.08503820346290819 - 0.1374194639615466 - 1.0 - 0.21057893489306592 - 0.2646936786804572 - 0.27790871012280266 - 0.29027802989910717 - 0.27400555976615254 - 0.2823478131745678 - 0.25739544436992295 - 0.3014171939280134 - 0.2862214695233955 - 0.2856734533249879 - 0.2870107976688266 - 0.3709000837926645 - 0.3702167780750079 - 0.36556393540769305 - 0.37650336515785243 - 0.3699811227722488 - 0.36806220730606526 - 0.3696328229784335 - 0.3852970338255622 - 0.37157613433218695 - 0.368267862192135 - 0.3715516752706066 - 0.26093751350716654 - 0.24003989063421033 - 0.31112640151573373 - 0.2509815194812587 - 0.19256512170374224 - 0.2638556294764011 - 0.08503820346290819 - 0.1374194639615466 - 1.0 - 0.21057893489306592 - 0.2646936786804572 - 0.27790871012280266 - 0.29027802989910717 - 0.27400555976615254 - 0.2823478131745678 - 0.25739544436992295 - 0.3014171939280134 - 0.2862214695233955 - 0.2856734533249879 - 0.2870107976688266 - 0.3709000837926645 - 0.3702167780750079 - 0.36556393540769305 - 0.37650336515785243 - 0.3699811227722488 - 0.36806220730606526 - 0.3696328229784335 - 0.3852970338255622 - 0.37157613433218695 - 0.368267862192135 - 0.3715516752706066 - 0.26093751350716654 - 0.24003989063421033 - 0.31112640151573373 - 0.2509815194812587 - 0.19256512170374224 - 0.2638556294764011 - 0.08503820346290819 - 0.1374194639615466 - 1.0 - 0.21057893489306592 - 0.2646936786804572 - 0.27790871012280266 - 0.29027802989910717 - 0.27400555976615254 - 0.2823478131745678 - 0.25739544436992295 - 0.3014171939280134 - 0.2862214695233955 - 0.2856734533249879 - 0.2870107976688266 - 0.3709000837926645 - 0.3702167780750079 - 0.36556393540769305 - 0.37650336515785243 - 0.3699811227722488 - 0.36806220730606526 - 0.3696328229784335 - 0.3852970338255622 - 0.37157613433218695 - 0.368267862192135 - 0.3715516752706066 - 0.26093751350716654 - 0.24003989063421033 - 0.31112640151573373 - 0.2509815194812587 - 0.19256512170374224 - 0.2638556294764011 - 0.08503820346290819 - 0.1374194639615466 - 1.0 - 0.21057893489306592 - 0.2646936786804572 - 0.27790871012280266 - 0.29027802989910717 - 0.27400555976615254 - 0.2823478131745678 - 0.25739544436992295 - 0.3014171939280134 - 0.2862214695233955 - 0.2856734533249879 - 0.2870107976688266 - 0.3709000837926645 - 0.3702167780750079 - 0.36556393540769305 - 0.37650336515785243 - 0.3699811227722488 - 0.36806220730606526 - 0.3696328229784335 - 0.3852970338255622 - 0.37157613433218695 - 0.368267862192135 - 0.3715516752706066 - 0.26093751350716654 - 0.24003989063421033 - 0.31112640151573373 - 0.2509815194812587 - 0.19256512170374224 - 0.2638556294764011 - 0.08503820346290819 - 0.1374194639615466 - 1.0 - 0.21057893489306592 - 0.2646936786804572 - 0.27790871012280266 - 0.29027802989910717 - 0.27400555976615254 - 0.2823478131745678 - 0.25739544436992295 - 0.3014171939280134 - 0.2862214695233955 - 0.2856734533249879 - 0.2870107976688266 - 0.3709000837926645 - 0.3702167780750079 - 0.36556393540769305 - 0.37650336515785243 - 0.3699811227722488 - 0.36806220730606526 - 0.3696328229784335 - 0.3852970338255622 - 0.37157613433218695 - 0.368267862192135 - 0.3715516752706066 - 0.26093751350716654 - 0.24003989063421033 - 0.31112640151573373 - 0.2509815194812587 - 0.19256512170374224 - 0.2638556294764011 - 0.08503820346290819 - 0.1374194639615466 - 1.0 - 0.21057893489306592 - 0.2646936786804572 - 0.27790871012280266 - 0.29027802989910717 - 0.27400555976615254 - 0.2823478131745678 - 0.25739544436992295 - 0.3014171939280134 - 0.2862214695233955 - 0.2856734533249879 - 0.2870107976688266 - 0.3709000837926645 - 0.3702167780750079 - 0.36556393540769305 - 0.37650336515785243 - 0.3699811227722488 - 0.36806220730606526 - 0.3696328229784335 - 0.3852970338255622 - 0.37157613433218695 - 0.368267862192135 - 0.3715516752706066 - 0.26093751350716654 - 0.24003989063421033 - 0.31112640151573373 - 0.2509815194812587 - 0.19256512170374224 - 0.2638556294764011 - 0.08503820346290819 - 0.1374194639615466 - 1.0 - 0.21057893489306592 - 0.2646936786804572 - 0.27790871012280266 - 0.29027802989910717 - 0.27400555976615254 - 0.2823478131745678 - 0.25739544436992295 - 0.3014171939280134 - 0.2862214695233955 - 0.2856734533249879 - 0.2870107976688266 - 0.3709000837926645 - 0.3702167780750079 - 0.36556393540769305 - 0.37650336515785243 - 0.3699811227722488 - 0.36806220730606526 - 0.3696328229784335 - 0.3852970338255622 - 0.37157613433218695 - 0.368267862192135 - 0.3715516752706066 - 0.26093751350716654 - 0.24003989063421033 - 0.31112640151573373 - 0.2509815194812587 - 0.19256512170374224 - 0.2638556294764011 - 0.08503820346290819 - 0.1374194639615466 - 1.0 - 0.21057893489306592 - 0.2646936786804572 - 0.27790871012280266 - 0.29027802989910717 - 0.27400555976615254 - 0.2823478131745678 - 0.25739544436992295 - 0.3014171939280134 - 0.2862214695233955 - 0.2856734533249879 - 0.2870107976688266 - 0.3709000837926645 - 0.3702167780750079 - 0.36556393540769305 - 0.37650336515785243 - 0.3699811227722488 - 0.36806220730606526 - 0.3696328229784335 - 0.3852970338255622 - 0.37157613433218695 - 0.368267862192135 - 0.3715516752706066 - 0.26093751350716654 - 0.24003989063421033 - 0.31112640151573373 - 0.2509815194812587 - 0.19256512170374224 - 0.2638556294764011 - 0.08503820346290819 - 0.1374194639615466 - 1.0 - 0.21057893489306592 - 0.2646936786804572 - 0.27790871012280266 - 0.29027802989910717 - 0.27400555976615254 - 0.2823478131745678 - 0.25739544436992295 - 0.3014171939280134 - 0.2862214695233955 - 0.2856734533249879 - 0.2870107976688266 - 0.3709000837926645 - 0.3702167780750079 - 0.36556393540769305 - 0.37650336515785243 - 0.3699811227722488 - 0.36806220730606526 - 0.3696328229784335 - 0.3852970338255622 - 0.37157613433218695 - 0.368267862192135 - 0.3715516752706066 - 0.26093751350716654 - 0.24003989063421033 - 0.31112640151573373 - 0.2509815194812587 - 0.19256512170374224 - 0.2638556294764011 - 0.08503820346290819 - 0.1374194639615466 - 1.0 - 0.21057893489306592 - 0.2646936786804572 - 0.27790871012280266 - 0.29027802989910717 - 0.27400555976615254 - 0.2823478131745678 - 0.25739544436992295 - 0.3014171939280134 - 0.2862214695233955 - 0.2856734533249879 - 0.2870107976688266 - 0.3709000837926645 - 0.3702167780750079 - 0.36556393540769305 - 0.37650336515785243 - 0.3699811227722488 - 0.36806220730606526 - 0.3696328229784335 - 0.3852970338255622 - 0.37157613433218695 - 0.368267862192135 - 0.3715516752706066 - 0.26093751350716654 - 0.24003989063421033 - 0.31112640151573373 - 0.2509815194812587 - 0.19256512170374224 - 0.2638556294764011 - 0.08503820346290819 - 0.1374194639615466 - 1.0 - 0.21057893489306592 - 0.2646936786804572 - 0.27790871012280266 - 0.29027802989910717 - 0.27400555976615254 - 0.2823478131745678 - 0.25739544436992295 - 0.3014171939280134 - 0.2862214695233955 - 0.2856734533249879 - 0.2870107976688266 - 0.3709000837926645 - 0.3702167780750079 - 0.36556393540769305 - 0.37650336515785243 - 0.3699811227722488 - 0.36806220730606526 - 0.3696328229784335 - 0.3852970338255622 - 0.37157613433218695 - 0.368267862192135 - 0.3715516752706066 - 0.26093751350716654 - 0.24003989063421033 - 0.31112640151573373 - 0.2509815194812587 - 0.19256512170374224 - 0.2638556294764011 - 0.08503820346290819 - 0.1374194639615466 - 1.0 - 0.21057893489306592 - 0.2646936786804572 - 0.27790871012280266 - 0.29027802989910717 - 0.27400555976615254 - 0.2823478131745678 - 0.25739544436992295 - 0.3014171939280134 - 0.2862214695233955 - 0.2856734533249879 - 0.2870107976688266 - 0.3709000837926645 - 0.3702167780750079 - 0.36556393540769305 - 0.37650336515785243 - 0.3699811227722488 - 0.36806220730606526 - 0.3696328229784335 - 0.3852970338255622 - 0.37157613433218695 - 0.368267862192135 - 0.3715516752706066 - 0.26093751350716654 - 0.24003989063421033 - 0.31112640151573373 - 0.2509815194812587 - 0.19256512170374224 - 0.2638556294764011 - 0.08503820346290819 - 0.1374194639615466 - 1.0 - 0.21057893489306592 - 0.2646936786804572 - 0.27790871012280266 - 0.29027802989910717 - 0.27400555976615254 - 0.2823478131745678 - 0.25739544436992295 - 0.3014171939280134 - 0.2862214695233955 - 0.2856734533249879 - 0.2870107976688266 - 0.3709000837926645 - 0.3702167780750079 - 0.36556393540769305 - 0.37650336515785243 - 0.3699811227722488 - 0.36806220730606526 - 0.3696328229784335 - 0.3852970338255622 - 0.37157613433218695 - 0.368267862192135 - 0.3715516752706066 - 0.26093751350716654 - 0.24003989063421033 - 0.31112640151573373 - 0.2509815194812587 - 0.19256512170374224 - 0.2638556294764011 - 0.08503820346290819 - 0.1374194639615466 - 1.0 - 0.21057893489306592 - 0.2646936786804572 - 0.27790871012280266 - 0.29027802989910717 - 0.27400555976615254 - 0.2823478131745678 - 0.25739544436992295 - 0.3014171939280134 - 0.2862214695233955 - 0.2856734533249879 - 0.2870107976688266 - 0.3709000837926645 - 0.3702167780750079 - 0.36556393540769305 - 0.37650336515785243 - 0.3699811227722488 - 0.36806220730606526 - 0.3696328229784335 - 0.3852970338255622 - 0.37157613433218695 - 0.368267862192135 - 0.3715516752706066 - 0.26093751350716654 - 0.24003989063421033 - 0.31112640151573373 - 0.2509815194812587 - 0.19256512170374224 - 0.2638556294764011 - 0.08503820346290819 - 0.1374194639615466 - 1.0 - 0.21057893489306592 - 0.2646936786804572 - 0.27790871012280266 - 0.29027802989910717 - 0.27400555976615254 - 0.2823478131745678 - 0.25739544436992295 - 0.3014171939280134 - 0.2862214695233955 - 0.2856734533249879 - 0.2870107976688266 - 0.3709000837926645 - 0.3702167780750079 - 0.36556393540769305 - 0.37650336515785243 - 0.3699811227722488 - 0.36806220730606526 - 0.3696328229784335 - 0.3852970338255622 - 0.37157613433218695 - 0.368267862192135 - 0.3715516752706066 - 0.26093751350716654 - 0.24003989063421033 - 0.31112640151573373 - 0.2509815194812587 - 0.19256512170374224 - 0.2638556294764011 - 0.08503820346290819 - 0.1374194639615466 - 1.0 - 0.21057893489306592 - 0.2646936786804572 - 0.27790871012280266 - 0.29027802989910717 - 0.27400555976615254 - 0.2823478131745678 - 0.25739544436992295 - 0.3014171939280134 - 0.2862214695233955 - 0.2856734533249879 - 0.2870107976688266 - 0.3709000837926645 - 0.3702167780750079 - 0.36556393540769305 - 0.37650336515785243 - 0.3699811227722488 - 0.36806220730606526 - 0.3696328229784335 - 0.3852970338255622 - 0.37157613433218695 - 0.368267862192135 - 0.3715516752706066 - 0.26093751350716654 - 0.24003989063421033 - 0.31112640151573373 - 0.2509815194812587 - 0.19256512170374224 - 0.2638556294764011 - 0.08503820346290819 - 0.1374194639615466 - 1.0 - 0.21057893489306592 - 0.2646936786804572 - 0.27790871012280266 - 0.29027802989910717 - 0.27400555976615254 - 0.2823478131745678 - 0.25739544436992295 - 0.3014171939280134 - 0.2862214695233955 - 0.2856734533249879 - 0.2870107976688266 - 0.3709000837926645 - 0.3702167780750079 - 0.36556393540769305 - 0.37650336515785243 - 0.3699811227722488 - 0.36806220730606526 - 0.3696328229784335 - 0.3852970338255622 - 0.37157613433218695 - 0.368267862192135 - 0.3715516752706066 - 0.26093751350716654 - 0.24003989063421033 - 0.31112640151573373 - 0.2509815194812587 - 0.19256512170374224 - 0.2638556294764011 - 0.08503820346290819 - 0.1374194639615466 - 1.0 - 0.21057893489306592 - 0.2646936786804572 - 0.27790871012280266 - 0.29027802989910717 - 0.27400555976615254 - 0.2823478131745678 - 0.25739544436992295 - 0.3014171939280134 - 0.2862214695233955 - 0.2856734533249879 - 0.2870107976688266 - 0.3709000837926645 - 0.3702167780750079 - 0.36556393540769305 - 0.37650336515785243 - 0.3699811227722488 - 0.36806220730606526 - 0.3696328229784335 - 0.3852970338255622 - 0.37157613433218695 - 0.368267862192135 - 0.3715516752706066 - 0.26093751350716654 - 0.24003989063421033 - 0.31112640151573373 - 0.2509815194812587 - 0.19256512170374224 - 0.2638556294764011 - 0.08503820346290819 - 0.1374194639615466 - 1.0 - 0.21057893489306592 - 0.2646936786804572 - 0.27790871012280266 - 0.29027802989910717 - 0.27400555976615254 - 0.2823478131745678 - 0.25739544436992295 - 0.3014171939280134 - 0.2862214695233955 - 0.2856734533249879 - 0.2870107976688266 - 0.3709000837926645 - 0.3702167780750079 - 0.36556393540769305 - 0.37650336515785243 - 0.3699811227722488 - 0.36806220730606526 - 0.3696328229784335 - 0.3852970338255622 - 0.37157613433218695 - 0.368267862192135 - 0.3715516752706066 - 0.26093751350716654 - 0.24003989063421033 - 0.31112640151573373 - 0.2509815194812587 - 0.19256512170374224 - 0.2638556294764011 - 0.08503820346290819 - 0.1374194639615466 - 1.0 - 0.21057893489306592 - 0.2646936786804572 - 0.27790871012280266 - 0.29027802989910717 - 0.27400555976615254 - 0.2823478131745678 - 0.25739544436992295 - 0.3014171939280134 - 0.2862214695233955 - 0.2856734533249879 - 0.2870107976688266 - 0.3709000837926645 - 0.3702167780750079 - 0.36556393540769305 - 0.37650336515785243 - 0.3699811227722488 - 0.36806220730606526 - 0.3696328229784335 - 0.3852970338255622 - 0.37157613433218695 - 0.368267862192135 - 0.3715516752706066 - 0.26093751350716654 - 0.24003989063421033 - 0.31112640151573373 - 0.2509815194812587 - 0.19256512170374224 - 0.2638556294764011 - 0.08503820346290819 - 0.1374194639615466 - 1.0 - 0.21057893489306592 - 0.2646936786804572 - 0.27790871012280266 - 0.29027802989910717 - 0.27400555976615254 - 0.2823478131745678 - 0.25739544436992295 - 0.3014171939280134 - 0.2862214695233955 - 0.2856734533249879 - 0.2870107976688266 - 0.3709000837926645 - 0.3702167780750079 - 0.36556393540769305 - 0.37650336515785243 - 0.3699811227722488 - 0.36806220730606526 - 0.3696328229784335 - 0.3852970338255622 - 0.37157613433218695 - 0.368267862192135 - 0.3715516752706066 - 0.26093751350716654 - 0.24003989063421033 - 0.31112640151573373 - 0.2509815194812587 - 0.19256512170374224 - 0.2638556294764011 - 0.08503820346290819 - 0.1374194639615466 - 1.0 - 0.21057893489306592 - 0.2646936786804572 - 0.27790871012280266 - 0.29027802989910717 - 0.27400555976615254 - 0.2823478131745678 - 0.25739544436992295 - 0.3014171939280134 - 0.2862214695233955 - 0.2856734533249879 - 0.2870107976688266 - 0.3709000837926645 - 0.3702167780750079 - 0.36556393540769305 - 0.37650336515785243 - 0.3699811227722488 - 0.36806220730606526 - 0.3696328229784335 - 0.3852970338255622 - 0.37157613433218695 - 0.368267862192135 - 0.3715516752706066 - 0.26093751350716654 - 0.24003989063421033 - 0.31112640151573373 - 0.2509815194812587 - 0.19256512170374224 - 0.2638556294764011 - 0.08503820346290819 - 0.1374194639615466 - 1.0 - 0.21057893489306592 - 0.2646936786804572 - 0.27790871012280266 - 0.29027802989910717 - 0.27400555976615254 - 0.2823478131745678 - 0.25739544436992295 - 0.3014171939280134 - 0.2862214695233955 - 0.2856734533249879 - 0.2870107976688266 - 0.3709000837926645 - 0.3702167780750079 - 0.36556393540769305 - 0.37650336515785243 - 0.3699811227722488 - 0.36806220730606526 - 0.3696328229784335 - 0.3852970338255622 - 0.37157613433218695 - 0.368267862192135 - 0.3715516752706066 - 0.26093751350716654 - 0.24003989063421033 - 0.31112640151573373 - 0.2509815194812587 - 0.19256512170374224 - 0.2638556294764011 - 0.08503820346290819 - 0.1374194639615466 - 1.0 - 0.21057893489306592 - 0.2646936786804572 - 0.27790871012280266 - 0.29027802989910717 - 0.27400555976615254 - 0.2823478131745678 - 0.25739544436992295 - 0.3014171939280134 - 0.2862214695233955 - 0.2856734533249879 - 0.2870107976688266 - 0.3709000837926645 - 0.3702167780750079 - 0.36556393540769305 - 0.37650336515785243 - 0.3699811227722488 - 0.36806220730606526 - 0.3696328229784335 - 0.3852970338255622 - 0.37157613433218695 - 0.368267862192135 - 0.3715516752706066 - 0.26093751350716654 - 0.24003989063421033 - 0.31112640151573373 - 0.2509815194812587 - 0.19256512170374224 - 0.2638556294764011 - 0.08503820346290819 - 0.1374194639615466 - 1.0 - 0.21057893489306592 - 0.2646936786804572 - 0.27790871012280266 - 0.29027802989910717 - 0.27400555976615254 - 0.2823478131745678 - 0.25739544436992295 - 0.3014171939280134 - 0.2862214695233955 - 0.2856734533249879 - 0.2870107976688266 - 0.3709000837926645 - 0.3702167780750079 - 0.36556393540769305 - 0.37650336515785243 - 0.3699811227722488 - 0.36806220730606526 - 0.3696328229784335 - 0.3852970338255622 - 0.37157613433218695 - 0.368267862192135 - 0.3715516752706066 - 0.26093751350716654 - 0.24003989063421033 - 0.31112640151573373 - 0.2509815194812587 - 0.19256512170374224 - 0.2638556294764011 - 0.08503820346290819 - 0.1374194639615466 - 1.0 - 0.21057893489306592 - 0.2646936786804572 - 0.27790871012280266 - 0.29027802989910717 - 0.27400555976615254 - 0.2823478131745678 - 0.25739544436992295 - 0.3014171939280134 - 0.2862214695233955 - 0.2856734533249879 - 0.2870107976688266 - 0.3709000837926645 - 0.3702167780750079 - 0.36556393540769305 - 0.37650336515785243 - 0.3699811227722488 - 0.36806220730606526 - 0.3696328229784335 - 0.3852970338255622 - 0.37157613433218695 - 0.368267862192135 - 0.3715516752706066 - 0.26093751350716654 - 0.24003989063421033 - 0.31112640151573373 - 0.2509815194812587 - 0.19256512170374224 - 0.2638556294764011 - 0.08503820346290819 - 0.1374194639615466 - 1.0 - 0.21057893489306592 - 0.2646936786804572 - 0.27790871012280266 - 0.29027802989910717 - 0.27400555976615254 - 0.2823478131745678 - 0.25739544436992295 - 0.3014171939280134 - 0.2862214695233955 - 0.2856734533249879 - 0.2870107976688266 - 0.3709000837926645 - 0.3702167780750079 - 0.36556393540769305 - 0.37650336515785243 - 0.3699811227722488 - 0.36806220730606526 - 0.3696328229784335 - 0.3852970338255622 - 0.37157613433218695 - 0.368267862192135 - 0.3715516752706066 - 0.26093751350716654 - 0.24003989063421033 - 0.31112640151573373 - 0.2509815194812587 - 0.19256512170374224 - 0.2638556294764011 - 0.08503820346290819 - 0.1374194639615466 - 1.0 - 0.21057893489306592 - task: type: Clustering dataset: name: MTEB ArxivClusteringS2S type: mteb/arxiv-clustering-s2s config: default split: test revision: f910caf1a6075f7329cdf8c1a6135696f37dbd53 metrics: - type: v_measure value: 21.06388933035354 - type: v_measures value: - 0.15139426348464108 - 0.1723972791290331 - 0.17283164578167945 - 0.16480634318126675 - 0.16569873939027066 - 0.1728549819933171 - 0.17524195492901368 - 0.18366858039747846 - 0.16933886504858436 - 0.16720515987637327 - 0.23635288879364383 - 0.23516065130475095 - 0.23711945768749756 - 0.24435956439029374 - 0.24042600701040173 - 0.23215638321332788 - 0.23458643115209107 - 0.24946576681768332 - 0.2350071814521417 - 0.23906840961229672 - 0.2381730684068399 - 0.14161450056618247 - 0.16111253325078148 - 0.1961351147776721 - 0.1410367521003569 - 0.14337306941509392 - 0.164137728457383 - 0.046549912102592315 - 0.0965914522844279 - 1.0 - 0.12194100640248183 - 0.15139426348464108 - 0.1723972791290331 - 0.17283164578167945 - 0.16480634318126675 - 0.16569873939027066 - 0.1728549819933171 - 0.17524195492901368 - 0.18366858039747846 - 0.16933886504858436 - 0.16720515987637327 - 0.23635288879364383 - 0.23516065130475095 - 0.23711945768749756 - 0.24435956439029374 - 0.24042600701040173 - 0.23215638321332788 - 0.23458643115209107 - 0.24946576681768332 - 0.2350071814521417 - 0.23906840961229672 - 0.2381730684068399 - 0.14161450056618247 - 0.16111253325078148 - 0.1961351147776721 - 0.1410367521003569 - 0.14337306941509392 - 0.164137728457383 - 0.046549912102592315 - 0.0965914522844279 - 1.0 - 0.12194100640248183 - 0.15139426348464108 - 0.1723972791290331 - 0.17283164578167945 - 0.16480634318126675 - 0.16569873939027066 - 0.1728549819933171 - 0.17524195492901368 - 0.18366858039747846 - 0.16933886504858436 - 0.16720515987637327 - 0.23635288879364383 - 0.23516065130475095 - 0.23711945768749756 - 0.24435956439029374 - 0.24042600701040173 - 0.23215638321332788 - 0.23458643115209107 - 0.24946576681768332 - 0.2350071814521417 - 0.23906840961229672 - 0.2381730684068399 - 0.14161450056618247 - 0.16111253325078148 - 0.1961351147776721 - 0.1410367521003569 - 0.14337306941509392 - 0.164137728457383 - 0.046549912102592315 - 0.0965914522844279 - 1.0 - 0.12194100640248183 - 0.15139426348464108 - 0.1723972791290331 - 0.17283164578167945 - 0.16480634318126675 - 0.16569873939027066 - 0.1728549819933171 - 0.17524195492901368 - 0.18366858039747846 - 0.16933886504858436 - 0.16720515987637327 - 0.23635288879364383 - 0.23516065130475095 - 0.23711945768749756 - 0.24435956439029374 - 0.24042600701040173 - 0.23215638321332788 - 0.23458643115209107 - 0.24946576681768332 - 0.2350071814521417 - 0.23906840961229672 - 0.2381730684068399 - 0.14161450056618247 - 0.16111253325078148 - 0.1961351147776721 - 0.1410367521003569 - 0.14337306941509392 - 0.164137728457383 - 0.046549912102592315 - 0.0965914522844279 - 1.0 - 0.12194100640248183 - 0.15139426348464108 - 0.1723972791290331 - 0.17283164578167945 - 0.16480634318126675 - 0.16569873939027066 - 0.1728549819933171 - 0.17524195492901368 - 0.18366858039747846 - 0.16933886504858436 - 0.16720515987637327 - 0.23635288879364383 - 0.23516065130475095 - 0.23711945768749756 - 0.24435956439029374 - 0.24042600701040173 - 0.23215638321332788 - 0.23458643115209107 - 0.24946576681768332 - 0.2350071814521417 - 0.23906840961229672 - 0.2381730684068399 - 0.14161450056618247 - 0.16111253325078148 - 0.1961351147776721 - 0.1410367521003569 - 0.14337306941509392 - 0.164137728457383 - 0.046549912102592315 - 0.0965914522844279 - 1.0 - 0.12194100640248183 - 0.15139426348464108 - 0.1723972791290331 - 0.17283164578167945 - 0.16480634318126675 - 0.16569873939027066 - 0.1728549819933171 - 0.17524195492901368 - 0.18366858039747846 - 0.16933886504858436 - 0.16720515987637327 - 0.23635288879364383 - 0.23516065130475095 - 0.23711945768749756 - 0.24435956439029374 - 0.24042600701040173 - 0.23215638321332788 - 0.23458643115209107 - 0.24946576681768332 - 0.2350071814521417 - 0.23906840961229672 - 0.2381730684068399 - 0.14161450056618247 - 0.16111253325078148 - 0.1961351147776721 - 0.1410367521003569 - 0.14337306941509392 - 0.164137728457383 - 0.046549912102592315 - 0.0965914522844279 - 1.0 - 0.12194100640248183 - 0.15139426348464108 - 0.1723972791290331 - 0.17283164578167945 - 0.16480634318126675 - 0.16569873939027066 - 0.1728549819933171 - 0.17524195492901368 - 0.18366858039747846 - 0.16933886504858436 - 0.16720515987637327 - 0.23635288879364383 - 0.23516065130475095 - 0.23711945768749756 - 0.24435956439029374 - 0.24042600701040173 - 0.23215638321332788 - 0.23458643115209107 - 0.24946576681768332 - 0.2350071814521417 - 0.23906840961229672 - 0.2381730684068399 - 0.14161450056618247 - 0.16111253325078148 - 0.1961351147776721 - 0.1410367521003569 - 0.14337306941509392 - 0.164137728457383 - 0.046549912102592315 - 0.0965914522844279 - 1.0 - 0.12194100640248183 - 0.15139426348464108 - 0.1723972791290331 - 0.17283164578167945 - 0.16480634318126675 - 0.16569873939027066 - 0.1728549819933171 - 0.17524195492901368 - 0.18366858039747846 - 0.16933886504858436 - 0.16720515987637327 - 0.23635288879364383 - 0.23516065130475095 - 0.23711945768749756 - 0.24435956439029374 - 0.24042600701040173 - 0.23215638321332788 - 0.23458643115209107 - 0.24946576681768332 - 0.2350071814521417 - 0.23906840961229672 - 0.2381730684068399 - 0.14161450056618247 - 0.16111253325078148 - 0.1961351147776721 - 0.1410367521003569 - 0.14337306941509392 - 0.164137728457383 - 0.046549912102592315 - 0.0965914522844279 - 1.0 - 0.12194100640248183 - 0.15139426348464108 - 0.1723972791290331 - 0.17283164578167945 - 0.16480634318126675 - 0.16569873939027066 - 0.1728549819933171 - 0.17524195492901368 - 0.18366858039747846 - 0.16933886504858436 - 0.16720515987637327 - 0.23635288879364383 - 0.23516065130475095 - 0.23711945768749756 - 0.24435956439029374 - 0.24042600701040173 - 0.23215638321332788 - 0.23458643115209107 - 0.24946576681768332 - 0.2350071814521417 - 0.23906840961229672 - 0.2381730684068399 - 0.14161450056618247 - 0.16111253325078148 - 0.1961351147776721 - 0.1410367521003569 - 0.14337306941509392 - 0.164137728457383 - 0.046549912102592315 - 0.0965914522844279 - 1.0 - 0.12194100640248183 - 0.15139426348464108 - 0.1723972791290331 - 0.17283164578167945 - 0.16480634318126675 - 0.16569873939027066 - 0.1728549819933171 - 0.17524195492901368 - 0.18366858039747846 - 0.16933886504858436 - 0.16720515987637327 - 0.23635288879364383 - 0.23516065130475095 - 0.23711945768749756 - 0.24435956439029374 - 0.24042600701040173 - 0.23215638321332788 - 0.23458643115209107 - 0.24946576681768332 - 0.2350071814521417 - 0.23906840961229672 - 0.2381730684068399 - 0.14161450056618247 - 0.16111253325078148 - 0.1961351147776721 - 0.1410367521003569 - 0.14337306941509392 - 0.164137728457383 - 0.046549912102592315 - 0.0965914522844279 - 1.0 - 0.12194100640248183 - 0.15139426348464108 - 0.1723972791290331 - 0.17283164578167945 - 0.16480634318126675 - 0.16569873939027066 - 0.1728549819933171 - 0.17524195492901368 - 0.18366858039747846 - 0.16933886504858436 - 0.16720515987637327 - 0.23635288879364383 - 0.23516065130475095 - 0.23711945768749756 - 0.24435956439029374 - 0.24042600701040173 - 0.23215638321332788 - 0.23458643115209107 - 0.24946576681768332 - 0.2350071814521417 - 0.23906840961229672 - 0.2381730684068399 - 0.14161450056618247 - 0.16111253325078148 - 0.1961351147776721 - 0.1410367521003569 - 0.14337306941509392 - 0.164137728457383 - 0.046549912102592315 - 0.0965914522844279 - 1.0 - 0.12194100640248183 - 0.15139426348464108 - 0.1723972791290331 - 0.17283164578167945 - 0.16480634318126675 - 0.16569873939027066 - 0.1728549819933171 - 0.17524195492901368 - 0.18366858039747846 - 0.16933886504858436 - 0.16720515987637327 - 0.23635288879364383 - 0.23516065130475095 - 0.23711945768749756 - 0.24435956439029374 - 0.24042600701040173 - 0.23215638321332788 - 0.23458643115209107 - 0.24946576681768332 - 0.2350071814521417 - 0.23906840961229672 - 0.2381730684068399 - 0.14161450056618247 - 0.16111253325078148 - 0.1961351147776721 - 0.1410367521003569 - 0.14337306941509392 - 0.164137728457383 - 0.046549912102592315 - 0.0965914522844279 - 1.0 - 0.12194100640248183 - 0.15139426348464108 - 0.1723972791290331 - 0.17283164578167945 - 0.16480634318126675 - 0.16569873939027066 - 0.1728549819933171 - 0.17524195492901368 - 0.18366858039747846 - 0.16933886504858436 - 0.16720515987637327 - 0.23635288879364383 - 0.23516065130475095 - 0.23711945768749756 - 0.24435956439029374 - 0.24042600701040173 - 0.23215638321332788 - 0.23458643115209107 - 0.24946576681768332 - 0.2350071814521417 - 0.23906840961229672 - 0.2381730684068399 - 0.14161450056618247 - 0.16111253325078148 - 0.1961351147776721 - 0.1410367521003569 - 0.14337306941509392 - 0.164137728457383 - 0.046549912102592315 - 0.0965914522844279 - 1.0 - 0.12194100640248183 - 0.15139426348464108 - 0.1723972791290331 - 0.17283164578167945 - 0.16480634318126675 - 0.16569873939027066 - 0.1728549819933171 - 0.17524195492901368 - 0.18366858039747846 - 0.16933886504858436 - 0.16720515987637327 - 0.23635288879364383 - 0.23516065130475095 - 0.23711945768749756 - 0.24435956439029374 - 0.24042600701040173 - 0.23215638321332788 - 0.23458643115209107 - 0.24946576681768332 - 0.2350071814521417 - 0.23906840961229672 - 0.2381730684068399 - 0.14161450056618247 - 0.16111253325078148 - 0.1961351147776721 - 0.1410367521003569 - 0.14337306941509392 - 0.164137728457383 - 0.046549912102592315 - 0.0965914522844279 - 1.0 - 0.12194100640248183 - 0.15139426348464108 - 0.1723972791290331 - 0.17283164578167945 - 0.16480634318126675 - 0.16569873939027066 - 0.1728549819933171 - 0.17524195492901368 - 0.18366858039747846 - 0.16933886504858436 - 0.16720515987637327 - 0.23635288879364383 - 0.23516065130475095 - 0.23711945768749756 - 0.24435956439029374 - 0.24042600701040173 - 0.23215638321332788 - 0.23458643115209107 - 0.24946576681768332 - 0.2350071814521417 - 0.23906840961229672 - 0.2381730684068399 - 0.14161450056618247 - 0.16111253325078148 - 0.1961351147776721 - 0.1410367521003569 - 0.14337306941509392 - 0.164137728457383 - 0.046549912102592315 - 0.0965914522844279 - 1.0 - 0.12194100640248183 - 0.15139426348464108 - 0.1723972791290331 - 0.17283164578167945 - 0.16480634318126675 - 0.16569873939027066 - 0.1728549819933171 - 0.17524195492901368 - 0.18366858039747846 - 0.16933886504858436 - 0.16720515987637327 - 0.23635288879364383 - 0.23516065130475095 - 0.23711945768749756 - 0.24435956439029374 - 0.24042600701040173 - 0.23215638321332788 - 0.23458643115209107 - 0.24946576681768332 - 0.2350071814521417 - 0.23906840961229672 - 0.2381730684068399 - 0.14161450056618247 - 0.16111253325078148 - 0.1961351147776721 - 0.1410367521003569 - 0.14337306941509392 - 0.164137728457383 - 0.046549912102592315 - 0.0965914522844279 - 1.0 - 0.12194100640248183 - 0.15139426348464108 - 0.1723972791290331 - 0.17283164578167945 - 0.16480634318126675 - 0.16569873939027066 - 0.1728549819933171 - 0.17524195492901368 - 0.18366858039747846 - 0.16933886504858436 - 0.16720515987637327 - 0.23635288879364383 - 0.23516065130475095 - 0.23711945768749756 - 0.24435956439029374 - 0.24042600701040173 - 0.23215638321332788 - 0.23458643115209107 - 0.24946576681768332 - 0.2350071814521417 - 0.23906840961229672 - 0.2381730684068399 - 0.14161450056618247 - 0.16111253325078148 - 0.1961351147776721 - 0.1410367521003569 - 0.14337306941509392 - 0.164137728457383 - 0.046549912102592315 - 0.0965914522844279 - 1.0 - 0.12194100640248183 - 0.15139426348464108 - 0.1723972791290331 - 0.17283164578167945 - 0.16480634318126675 - 0.16569873939027066 - 0.1728549819933171 - 0.17524195492901368 - 0.18366858039747846 - 0.16933886504858436 - 0.16720515987637327 - 0.23635288879364383 - 0.23516065130475095 - 0.23711945768749756 - 0.24435956439029374 - 0.24042600701040173 - 0.23215638321332788 - 0.23458643115209107 - 0.24946576681768332 - 0.2350071814521417 - 0.23906840961229672 - 0.2381730684068399 - 0.14161450056618247 - 0.16111253325078148 - 0.1961351147776721 - 0.1410367521003569 - 0.14337306941509392 - 0.164137728457383 - 0.046549912102592315 - 0.0965914522844279 - 1.0 - 0.12194100640248183 - 0.15139426348464108 - 0.1723972791290331 - 0.17283164578167945 - 0.16480634318126675 - 0.16569873939027066 - 0.1728549819933171 - 0.17524195492901368 - 0.18366858039747846 - 0.16933886504858436 - 0.16720515987637327 - 0.23635288879364383 - 0.23516065130475095 - 0.23711945768749756 - 0.24435956439029374 - 0.24042600701040173 - 0.23215638321332788 - 0.23458643115209107 - 0.24946576681768332 - 0.2350071814521417 - 0.23906840961229672 - 0.2381730684068399 - 0.14161450056618247 - 0.16111253325078148 - 0.1961351147776721 - 0.1410367521003569 - 0.14337306941509392 - 0.164137728457383 - 0.046549912102592315 - 0.0965914522844279 - 1.0 - 0.12194100640248183 - 0.15139426348464108 - 0.1723972791290331 - 0.17283164578167945 - 0.16480634318126675 - 0.16569873939027066 - 0.1728549819933171 - 0.17524195492901368 - 0.18366858039747846 - 0.16933886504858436 - 0.16720515987637327 - 0.23635288879364383 - 0.23516065130475095 - 0.23711945768749756 - 0.24435956439029374 - 0.24042600701040173 - 0.23215638321332788 - 0.23458643115209107 - 0.24946576681768332 - 0.2350071814521417 - 0.23906840961229672 - 0.2381730684068399 - 0.14161450056618247 - 0.16111253325078148 - 0.1961351147776721 - 0.1410367521003569 - 0.14337306941509392 - 0.164137728457383 - 0.046549912102592315 - 0.0965914522844279 - 1.0 - 0.12194100640248183 - 0.15139426348464108 - 0.1723972791290331 - 0.17283164578167945 - 0.16480634318126675 - 0.16569873939027066 - 0.1728549819933171 - 0.17524195492901368 - 0.18366858039747846 - 0.16933886504858436 - 0.16720515987637327 - 0.23635288879364383 - 0.23516065130475095 - 0.23711945768749756 - 0.24435956439029374 - 0.24042600701040173 - 0.23215638321332788 - 0.23458643115209107 - 0.24946576681768332 - 0.2350071814521417 - 0.23906840961229672 - 0.2381730684068399 - 0.14161450056618247 - 0.16111253325078148 - 0.1961351147776721 - 0.1410367521003569 - 0.14337306941509392 - 0.164137728457383 - 0.046549912102592315 - 0.0965914522844279 - 1.0 - 0.12194100640248183 - 0.15139426348464108 - 0.1723972791290331 - 0.17283164578167945 - 0.16480634318126675 - 0.16569873939027066 - 0.1728549819933171 - 0.17524195492901368 - 0.18366858039747846 - 0.16933886504858436 - 0.16720515987637327 - 0.23635288879364383 - 0.23516065130475095 - 0.23711945768749756 - 0.24435956439029374 - 0.24042600701040173 - 0.23215638321332788 - 0.23458643115209107 - 0.24946576681768332 - 0.2350071814521417 - 0.23906840961229672 - 0.2381730684068399 - 0.14161450056618247 - 0.16111253325078148 - 0.1961351147776721 - 0.1410367521003569 - 0.14337306941509392 - 0.164137728457383 - 0.046549912102592315 - 0.0965914522844279 - 1.0 - 0.12194100640248183 - 0.15139426348464108 - 0.1723972791290331 - 0.17283164578167945 - 0.16480634318126675 - 0.16569873939027066 - 0.1728549819933171 - 0.17524195492901368 - 0.18366858039747846 - 0.16933886504858436 - 0.16720515987637327 - 0.23635288879364383 - 0.23516065130475095 - 0.23711945768749756 - 0.24435956439029374 - 0.24042600701040173 - 0.23215638321332788 - 0.23458643115209107 - 0.24946576681768332 - 0.2350071814521417 - 0.23906840961229672 - 0.2381730684068399 - 0.14161450056618247 - 0.16111253325078148 - 0.1961351147776721 - 0.1410367521003569 - 0.14337306941509392 - 0.164137728457383 - 0.046549912102592315 - 0.0965914522844279 - 1.0 - 0.12194100640248183 - 0.15139426348464108 - 0.1723972791290331 - 0.17283164578167945 - 0.16480634318126675 - 0.16569873939027066 - 0.1728549819933171 - 0.17524195492901368 - 0.18366858039747846 - 0.16933886504858436 - 0.16720515987637327 - 0.23635288879364383 - 0.23516065130475095 - 0.23711945768749756 - 0.24435956439029374 - 0.24042600701040173 - 0.23215638321332788 - 0.23458643115209107 - 0.24946576681768332 - 0.2350071814521417 - 0.23906840961229672 - 0.2381730684068399 - 0.14161450056618247 - 0.16111253325078148 - 0.1961351147776721 - 0.1410367521003569 - 0.14337306941509392 - 0.164137728457383 - 0.046549912102592315 - 0.0965914522844279 - 1.0 - 0.12194100640248183 - 0.15139426348464108 - 0.1723972791290331 - 0.17283164578167945 - 0.16480634318126675 - 0.16569873939027066 - 0.1728549819933171 - 0.17524195492901368 - 0.18366858039747846 - 0.16933886504858436 - 0.16720515987637327 - 0.23635288879364383 - 0.23516065130475095 - 0.23711945768749756 - 0.24435956439029374 - 0.24042600701040173 - 0.23215638321332788 - 0.23458643115209107 - 0.24946576681768332 - 0.2350071814521417 - 0.23906840961229672 - 0.2381730684068399 - 0.14161450056618247 - 0.16111253325078148 - 0.1961351147776721 - 0.1410367521003569 - 0.14337306941509392 - 0.164137728457383 - 0.046549912102592315 - 0.0965914522844279 - 1.0 - 0.12194100640248183 - 0.15139426348464108 - 0.1723972791290331 - 0.17283164578167945 - 0.16480634318126675 - 0.16569873939027066 - 0.1728549819933171 - 0.17524195492901368 - 0.18366858039747846 - 0.16933886504858436 - 0.16720515987637327 - 0.23635288879364383 - 0.23516065130475095 - 0.23711945768749756 - 0.24435956439029374 - 0.24042600701040173 - 0.23215638321332788 - 0.23458643115209107 - 0.24946576681768332 - 0.2350071814521417 - 0.23906840961229672 - 0.2381730684068399 - 0.14161450056618247 - 0.16111253325078148 - 0.1961351147776721 - 0.1410367521003569 - 0.14337306941509392 - 0.164137728457383 - 0.046549912102592315 - 0.0965914522844279 - 1.0 - 0.12194100640248183 - 0.15139426348464108 - 0.1723972791290331 - 0.17283164578167945 - 0.16480634318126675 - 0.16569873939027066 - 0.1728549819933171 - 0.17524195492901368 - 0.18366858039747846 - 0.16933886504858436 - 0.16720515987637327 - 0.23635288879364383 - 0.23516065130475095 - 0.23711945768749756 - 0.24435956439029374 - 0.24042600701040173 - 0.23215638321332788 - 0.23458643115209107 - 0.24946576681768332 - 0.2350071814521417 - 0.23906840961229672 - 0.2381730684068399 - 0.14161450056618247 - 0.16111253325078148 - 0.1961351147776721 - 0.1410367521003569 - 0.14337306941509392 - 0.164137728457383 - 0.046549912102592315 - 0.0965914522844279 - 1.0 - 0.12194100640248183 - 0.15139426348464108 - 0.1723972791290331 - 0.17283164578167945 - 0.16480634318126675 - 0.16569873939027066 - 0.1728549819933171 - 0.17524195492901368 - 0.18366858039747846 - 0.16933886504858436 - 0.16720515987637327 - 0.23635288879364383 - 0.23516065130475095 - 0.23711945768749756 - 0.24435956439029374 - 0.24042600701040173 - 0.23215638321332788 - 0.23458643115209107 - 0.24946576681768332 - 0.2350071814521417 - 0.23906840961229672 - 0.2381730684068399 - 0.14161450056618247 - 0.16111253325078148 - 0.1961351147776721 - 0.1410367521003569 - 0.14337306941509392 - 0.164137728457383 - 0.046549912102592315 - 0.0965914522844279 - 1.0 - 0.12194100640248183 - 0.15139426348464108 - 0.1723972791290331 - 0.17283164578167945 - 0.16480634318126675 - 0.16569873939027066 - 0.1728549819933171 - 0.17524195492901368 - 0.18366858039747846 - 0.16933886504858436 - 0.16720515987637327 - 0.23635288879364383 - 0.23516065130475095 - 0.23711945768749756 - 0.24435956439029374 - 0.24042600701040173 - 0.23215638321332788 - 0.23458643115209107 - 0.24946576681768332 - 0.2350071814521417 - 0.23906840961229672 - 0.2381730684068399 - 0.14161450056618247 - 0.16111253325078148 - 0.1961351147776721 - 0.1410367521003569 - 0.14337306941509392 - 0.164137728457383 - 0.046549912102592315 - 0.0965914522844279 - 1.0 - 0.12194100640248183 - 0.15139426348464108 - 0.1723972791290331 - 0.17283164578167945 - 0.16480634318126675 - 0.16569873939027066 - 0.1728549819933171 - 0.17524195492901368 - 0.18366858039747846 - 0.16933886504858436 - 0.16720515987637327 - 0.23635288879364383 - 0.23516065130475095 - 0.23711945768749756 - 0.24435956439029374 - 0.24042600701040173 - 0.23215638321332788 - 0.23458643115209107 - 0.24946576681768332 - 0.2350071814521417 - 0.23906840961229672 - 0.2381730684068399 - 0.14161450056618247 - 0.16111253325078148 - 0.1961351147776721 - 0.1410367521003569 - 0.14337306941509392 - 0.164137728457383 - 0.046549912102592315 - 0.0965914522844279 - 1.0 - 0.12194100640248183 - 0.15139426348464108 - 0.1723972791290331 - 0.17283164578167945 - 0.16480634318126675 - 0.16569873939027066 - 0.1728549819933171 - 0.17524195492901368 - 0.18366858039747846 - 0.16933886504858436 - 0.16720515987637327 - 0.23635288879364383 - 0.23516065130475095 - 0.23711945768749756 - 0.24435956439029374 - 0.24042600701040173 - 0.23215638321332788 - 0.23458643115209107 - 0.24946576681768332 - 0.2350071814521417 - 0.23906840961229672 - 0.2381730684068399 - 0.14161450056618247 - 0.16111253325078148 - 0.1961351147776721 - 0.1410367521003569 - 0.14337306941509392 - 0.164137728457383 - 0.046549912102592315 - 0.0965914522844279 - 1.0 - 0.12194100640248183 - 0.15139426348464108 - 0.1723972791290331 - 0.17283164578167945 - 0.16480634318126675 - 0.16569873939027066 - 0.1728549819933171 - 0.17524195492901368 - 0.18366858039747846 - 0.16933886504858436 - 0.16720515987637327 - 0.23635288879364383 - 0.23516065130475095 - 0.23711945768749756 - 0.24435956439029374 - 0.24042600701040173 - 0.23215638321332788 - 0.23458643115209107 - 0.24946576681768332 - 0.2350071814521417 - 0.23906840961229672 - 0.2381730684068399 - 0.14161450056618247 - 0.16111253325078148 - 0.1961351147776721 - 0.1410367521003569 - 0.14337306941509392 - 0.164137728457383 - 0.046549912102592315 - 0.0965914522844279 - 1.0 - 0.12194100640248183 - 0.15139426348464108 - 0.1723972791290331 - 0.17283164578167945 - 0.16480634318126675 - 0.16569873939027066 - 0.1728549819933171 - 0.17524195492901368 - 0.18366858039747846 - 0.16933886504858436 - 0.16720515987637327 - 0.23635288879364383 - 0.23516065130475095 - 0.23711945768749756 - 0.24435956439029374 - 0.24042600701040173 - 0.23215638321332788 - 0.23458643115209107 - 0.24946576681768332 - 0.2350071814521417 - 0.23906840961229672 - 0.2381730684068399 - 0.14161450056618247 - 0.16111253325078148 - 0.1961351147776721 - 0.1410367521003569 - 0.14337306941509392 - 0.164137728457383 - 0.046549912102592315 - 0.0965914522844279 - 1.0 - 0.12194100640248183 - 0.15139426348464108 - 0.1723972791290331 - 0.17283164578167945 - 0.16480634318126675 - 0.16569873939027066 - 0.1728549819933171 - 0.17524195492901368 - 0.18366858039747846 - 0.16933886504858436 - 0.16720515987637327 - 0.23635288879364383 - 0.23516065130475095 - 0.23711945768749756 - 0.24435956439029374 - 0.24042600701040173 - 0.23215638321332788 - 0.23458643115209107 - 0.24946576681768332 - 0.2350071814521417 - 0.23906840961229672 - 0.2381730684068399 - 0.14161450056618247 - 0.16111253325078148 - 0.1961351147776721 - 0.1410367521003569 - 0.14337306941509392 - 0.164137728457383 - 0.046549912102592315 - 0.0965914522844279 - 1.0 - 0.12194100640248183 - 0.15139426348464108 - 0.1723972791290331 - 0.17283164578167945 - 0.16480634318126675 - 0.16569873939027066 - 0.1728549819933171 - 0.17524195492901368 - 0.18366858039747846 - 0.16933886504858436 - 0.16720515987637327 - 0.23635288879364383 - 0.23516065130475095 - 0.23711945768749756 - 0.24435956439029374 - 0.24042600701040173 - 0.23215638321332788 - 0.23458643115209107 - 0.24946576681768332 - 0.2350071814521417 - 0.23906840961229672 - 0.2381730684068399 - 0.14161450056618247 - 0.16111253325078148 - 0.1961351147776721 - 0.1410367521003569 - 0.14337306941509392 - 0.164137728457383 - 0.046549912102592315 - 0.0965914522844279 - 1.0 - 0.12194100640248183 - 0.15139426348464108 - 0.1723972791290331 - 0.17283164578167945 - 0.16480634318126675 - 0.16569873939027066 - 0.1728549819933171 - 0.17524195492901368 - 0.18366858039747846 - 0.16933886504858436 - 0.16720515987637327 - 0.23635288879364383 - 0.23516065130475095 - 0.23711945768749756 - 0.24435956439029374 - 0.24042600701040173 - 0.23215638321332788 - 0.23458643115209107 - 0.24946576681768332 - 0.2350071814521417 - 0.23906840961229672 - 0.2381730684068399 - 0.14161450056618247 - 0.16111253325078148 - 0.1961351147776721 - 0.1410367521003569 - 0.14337306941509392 - 0.164137728457383 - 0.046549912102592315 - 0.0965914522844279 - 1.0 - 0.12194100640248183 - 0.15139426348464108 - 0.1723972791290331 - 0.17283164578167945 - 0.16480634318126675 - 0.16569873939027066 - 0.1728549819933171 - 0.17524195492901368 - 0.18366858039747846 - 0.16933886504858436 - 0.16720515987637327 - 0.23635288879364383 - 0.23516065130475095 - 0.23711945768749756 - 0.24435956439029374 - 0.24042600701040173 - 0.23215638321332788 - 0.23458643115209107 - 0.24946576681768332 - 0.2350071814521417 - 0.23906840961229672 - 0.2381730684068399 - 0.14161450056618247 - 0.16111253325078148 - 0.1961351147776721 - 0.1410367521003569 - 0.14337306941509392 - 0.164137728457383 - 0.046549912102592315 - 0.0965914522844279 - 1.0 - 0.12194100640248183 - 0.15139426348464108 - 0.1723972791290331 - 0.17283164578167945 - 0.16480634318126675 - 0.16569873939027066 - 0.1728549819933171 - 0.17524195492901368 - 0.18366858039747846 - 0.16933886504858436 - 0.16720515987637327 - 0.23635288879364383 - 0.23516065130475095 - 0.23711945768749756 - 0.24435956439029374 - 0.24042600701040173 - 0.23215638321332788 - 0.23458643115209107 - 0.24946576681768332 - 0.2350071814521417 - 0.23906840961229672 - 0.2381730684068399 - 0.14161450056618247 - 0.16111253325078148 - 0.1961351147776721 - 0.1410367521003569 - 0.14337306941509392 - 0.164137728457383 - 0.046549912102592315 - 0.0965914522844279 - 1.0 - 0.12194100640248183 - 0.15139426348464108 - 0.1723972791290331 - 0.17283164578167945 - 0.16480634318126675 - 0.16569873939027066 - 0.1728549819933171 - 0.17524195492901368 - 0.18366858039747846 - 0.16933886504858436 - 0.16720515987637327 - 0.23635288879364383 - 0.23516065130475095 - 0.23711945768749756 - 0.24435956439029374 - 0.24042600701040173 - 0.23215638321332788 - 0.23458643115209107 - 0.24946576681768332 - 0.2350071814521417 - 0.23906840961229672 - 0.2381730684068399 - 0.14161450056618247 - 0.16111253325078148 - 0.1961351147776721 - 0.1410367521003569 - 0.14337306941509392 - 0.164137728457383 - 0.046549912102592315 - 0.0965914522844279 - 1.0 - 0.12194100640248183 - 0.15139426348464108 - 0.1723972791290331 - 0.17283164578167945 - 0.16480634318126675 - 0.16569873939027066 - 0.1728549819933171 - 0.17524195492901368 - 0.18366858039747846 - 0.16933886504858436 - 0.16720515987637327 - 0.23635288879364383 - 0.23516065130475095 - 0.23711945768749756 - 0.24435956439029374 - 0.24042600701040173 - 0.23215638321332788 - 0.23458643115209107 - 0.24946576681768332 - 0.2350071814521417 - 0.23906840961229672 - 0.2381730684068399 - 0.14161450056618247 - 0.16111253325078148 - 0.1961351147776721 - 0.1410367521003569 - 0.14337306941509392 - 0.164137728457383 - 0.046549912102592315 - 0.0965914522844279 - 1.0 - 0.12194100640248183 - 0.15139426348464108 - 0.1723972791290331 - 0.17283164578167945 - 0.16480634318126675 - 0.16569873939027066 - 0.1728549819933171 - 0.17524195492901368 - 0.18366858039747846 - 0.16933886504858436 - 0.16720515987637327 - 0.23635288879364383 - 0.23516065130475095 - 0.23711945768749756 - 0.24435956439029374 - 0.24042600701040173 - 0.23215638321332788 - 0.23458643115209107 - 0.24946576681768332 - 0.2350071814521417 - 0.23906840961229672 - 0.2381730684068399 - 0.14161450056618247 - 0.16111253325078148 - 0.1961351147776721 - 0.1410367521003569 - 0.14337306941509392 - 0.164137728457383 - 0.046549912102592315 - 0.0965914522844279 - 1.0 - 0.12194100640248183 - 0.15139426348464108 - 0.1723972791290331 - 0.17283164578167945 - 0.16480634318126675 - 0.16569873939027066 - 0.1728549819933171 - 0.17524195492901368 - 0.18366858039747846 - 0.16933886504858436 - 0.16720515987637327 - 0.23635288879364383 - 0.23516065130475095 - 0.23711945768749756 - 0.24435956439029374 - 0.24042600701040173 - 0.23215638321332788 - 0.23458643115209107 - 0.24946576681768332 - 0.2350071814521417 - 0.23906840961229672 - 0.2381730684068399 - 0.14161450056618247 - 0.16111253325078148 - 0.1961351147776721 - 0.1410367521003569 - 0.14337306941509392 - 0.164137728457383 - 0.046549912102592315 - 0.0965914522844279 - 1.0 - 0.12194100640248183 - 0.15139426348464108 - 0.1723972791290331 - 0.17283164578167945 - 0.16480634318126675 - 0.16569873939027066 - 0.1728549819933171 - 0.17524195492901368 - 0.18366858039747846 - 0.16933886504858436 - 0.16720515987637327 - 0.23635288879364383 - 0.23516065130475095 - 0.23711945768749756 - 0.24435956439029374 - 0.24042600701040173 - 0.23215638321332788 - 0.23458643115209107 - 0.24946576681768332 - 0.2350071814521417 - 0.23906840961229672 - 0.2381730684068399 - 0.14161450056618247 - 0.16111253325078148 - 0.1961351147776721 - 0.1410367521003569 - 0.14337306941509392 - 0.164137728457383 - 0.046549912102592315 - 0.0965914522844279 - 1.0 - 0.12194100640248183 - 0.15139426348464108 - 0.1723972791290331 - 0.17283164578167945 - 0.16480634318126675 - 0.16569873939027066 - 0.1728549819933171 - 0.17524195492901368 - 0.18366858039747846 - 0.16933886504858436 - 0.16720515987637327 - 0.23635288879364383 - 0.23516065130475095 - 0.23711945768749756 - 0.24435956439029374 - 0.24042600701040173 - 0.23215638321332788 - 0.23458643115209107 - 0.24946576681768332 - 0.2350071814521417 - 0.23906840961229672 - 0.2381730684068399 - 0.14161450056618247 - 0.16111253325078148 - 0.1961351147776721 - 0.1410367521003569 - 0.14337306941509392 - 0.164137728457383 - 0.046549912102592315 - 0.0965914522844279 - 1.0 - 0.12194100640248183 - 0.15139426348464108 - 0.1723972791290331 - 0.17283164578167945 - 0.16480634318126675 - 0.16569873939027066 - 0.1728549819933171 - 0.17524195492901368 - 0.18366858039747846 - 0.16933886504858436 - 0.16720515987637327 - 0.23635288879364383 - 0.23516065130475095 - 0.23711945768749756 - 0.24435956439029374 - 0.24042600701040173 - 0.23215638321332788 - 0.23458643115209107 - 0.24946576681768332 - 0.2350071814521417 - 0.23906840961229672 - 0.2381730684068399 - 0.14161450056618247 - 0.16111253325078148 - 0.1961351147776721 - 0.1410367521003569 - 0.14337306941509392 - 0.164137728457383 - 0.046549912102592315 - 0.0965914522844279 - 1.0 - 0.12194100640248183 - 0.15139426348464108 - 0.1723972791290331 - 0.17283164578167945 - 0.16480634318126675 - 0.16569873939027066 - 0.1728549819933171 - 0.17524195492901368 - 0.18366858039747846 - 0.16933886504858436 - 0.16720515987637327 - 0.23635288879364383 - 0.23516065130475095 - 0.23711945768749756 - 0.24435956439029374 - 0.24042600701040173 - 0.23215638321332788 - 0.23458643115209107 - 0.24946576681768332 - 0.2350071814521417 - 0.23906840961229672 - 0.2381730684068399 - 0.14161450056618247 - 0.16111253325078148 - 0.1961351147776721 - 0.1410367521003569 - 0.14337306941509392 - 0.164137728457383 - 0.046549912102592315 - 0.0965914522844279 - 1.0 - 0.12194100640248183 - 0.15139426348464108 - 0.1723972791290331 - 0.17283164578167945 - 0.16480634318126675 - 0.16569873939027066 - 0.1728549819933171 - 0.17524195492901368 - 0.18366858039747846 - 0.16933886504858436 - 0.16720515987637327 - 0.23635288879364383 - 0.23516065130475095 - 0.23711945768749756 - 0.24435956439029374 - 0.24042600701040173 - 0.23215638321332788 - 0.23458643115209107 - 0.24946576681768332 - 0.2350071814521417 - 0.23906840961229672 - 0.2381730684068399 - 0.14161450056618247 - 0.16111253325078148 - 0.1961351147776721 - 0.1410367521003569 - 0.14337306941509392 - 0.164137728457383 - 0.046549912102592315 - 0.0965914522844279 - 1.0 - 0.12194100640248183 - 0.15139426348464108 - 0.1723972791290331 - 0.17283164578167945 - 0.16480634318126675 - 0.16569873939027066 - 0.1728549819933171 - 0.17524195492901368 - 0.18366858039747846 - 0.16933886504858436 - 0.16720515987637327 - 0.23635288879364383 - 0.23516065130475095 - 0.23711945768749756 - 0.24435956439029374 - 0.24042600701040173 - 0.23215638321332788 - 0.23458643115209107 - 0.24946576681768332 - 0.2350071814521417 - 0.23906840961229672 - 0.2381730684068399 - 0.14161450056618247 - 0.16111253325078148 - 0.1961351147776721 - 0.1410367521003569 - 0.14337306941509392 - 0.164137728457383 - 0.046549912102592315 - 0.0965914522844279 - 1.0 - 0.12194100640248183 - 0.15139426348464108 - 0.1723972791290331 - 0.17283164578167945 - 0.16480634318126675 - 0.16569873939027066 - 0.1728549819933171 - 0.17524195492901368 - 0.18366858039747846 - 0.16933886504858436 - 0.16720515987637327 - 0.23635288879364383 - 0.23516065130475095 - 0.23711945768749756 - 0.24435956439029374 - 0.24042600701040173 - 0.23215638321332788 - 0.23458643115209107 - 0.24946576681768332 - 0.2350071814521417 - 0.23906840961229672 - 0.2381730684068399 - 0.14161450056618247 - 0.16111253325078148 - 0.1961351147776721 - 0.1410367521003569 - 0.14337306941509392 - 0.164137728457383 - 0.046549912102592315 - 0.0965914522844279 - 1.0 - 0.12194100640248183 - 0.15139426348464108 - 0.1723972791290331 - 0.17283164578167945 - 0.16480634318126675 - 0.16569873939027066 - 0.1728549819933171 - 0.17524195492901368 - 0.18366858039747846 - 0.16933886504858436 - 0.16720515987637327 - 0.23635288879364383 - 0.23516065130475095 - 0.23711945768749756 - 0.24435956439029374 - 0.24042600701040173 - 0.23215638321332788 - 0.23458643115209107 - 0.24946576681768332 - 0.2350071814521417 - 0.23906840961229672 - 0.2381730684068399 - 0.14161450056618247 - 0.16111253325078148 - 0.1961351147776721 - 0.1410367521003569 - 0.14337306941509392 - 0.164137728457383 - 0.046549912102592315 - 0.0965914522844279 - 1.0 - 0.12194100640248183 - 0.15139426348464108 - 0.1723972791290331 - 0.17283164578167945 - 0.16480634318126675 - 0.16569873939027066 - 0.1728549819933171 - 0.17524195492901368 - 0.18366858039747846 - 0.16933886504858436 - 0.16720515987637327 - 0.23635288879364383 - 0.23516065130475095 - 0.23711945768749756 - 0.24435956439029374 - 0.24042600701040173 - 0.23215638321332788 - 0.23458643115209107 - 0.24946576681768332 - 0.2350071814521417 - 0.23906840961229672 - 0.2381730684068399 - 0.14161450056618247 - 0.16111253325078148 - 0.1961351147776721 - 0.1410367521003569 - 0.14337306941509392 - 0.164137728457383 - 0.046549912102592315 - 0.0965914522844279 - 1.0 - 0.12194100640248183 - 0.15139426348464108 - 0.1723972791290331 - 0.17283164578167945 - 0.16480634318126675 - 0.16569873939027066 - 0.1728549819933171 - 0.17524195492901368 - 0.18366858039747846 - 0.16933886504858436 - 0.16720515987637327 - 0.23635288879364383 - 0.23516065130475095 - 0.23711945768749756 - 0.24435956439029374 - 0.24042600701040173 - 0.23215638321332788 - 0.23458643115209107 - 0.24946576681768332 - 0.2350071814521417 - 0.23906840961229672 - 0.2381730684068399 - 0.14161450056618247 - 0.16111253325078148 - 0.1961351147776721 - 0.1410367521003569 - 0.14337306941509392 - 0.164137728457383 - 0.046549912102592315 - 0.0965914522844279 - 1.0 - 0.12194100640248183 - 0.15139426348464108 - 0.1723972791290331 - 0.17283164578167945 - 0.16480634318126675 - 0.16569873939027066 - 0.1728549819933171 - 0.17524195492901368 - 0.18366858039747846 - 0.16933886504858436 - 0.16720515987637327 - 0.23635288879364383 - 0.23516065130475095 - 0.23711945768749756 - 0.24435956439029374 - 0.24042600701040173 - 0.23215638321332788 - 0.23458643115209107 - 0.24946576681768332 - 0.2350071814521417 - 0.23906840961229672 - 0.2381730684068399 - 0.14161450056618247 - 0.16111253325078148 - 0.1961351147776721 - 0.1410367521003569 - 0.14337306941509392 - 0.164137728457383 - 0.046549912102592315 - 0.0965914522844279 - 1.0 - 0.12194100640248183 - 0.15139426348464108 - 0.1723972791290331 - 0.17283164578167945 - 0.16480634318126675 - 0.16569873939027066 - 0.1728549819933171 - 0.17524195492901368 - 0.18366858039747846 - 0.16933886504858436 - 0.16720515987637327 - 0.23635288879364383 - 0.23516065130475095 - 0.23711945768749756 - 0.24435956439029374 - 0.24042600701040173 - 0.23215638321332788 - 0.23458643115209107 - 0.24946576681768332 - 0.2350071814521417 - 0.23906840961229672 - 0.2381730684068399 - 0.14161450056618247 - 0.16111253325078148 - 0.1961351147776721 - 0.1410367521003569 - 0.14337306941509392 - 0.164137728457383 - 0.046549912102592315 - 0.0965914522844279 - 1.0 - 0.12194100640248183 - 0.15139426348464108 - 0.1723972791290331 - 0.17283164578167945 - 0.16480634318126675 - 0.16569873939027066 - 0.1728549819933171 - 0.17524195492901368 - 0.18366858039747846 - 0.16933886504858436 - 0.16720515987637327 - 0.23635288879364383 - 0.23516065130475095 - 0.23711945768749756 - 0.24435956439029374 - 0.24042600701040173 - 0.23215638321332788 - 0.23458643115209107 - 0.24946576681768332 - 0.2350071814521417 - 0.23906840961229672 - 0.2381730684068399 - 0.14161450056618247 - 0.16111253325078148 - 0.1961351147776721 - 0.1410367521003569 - 0.14337306941509392 - 0.164137728457383 - 0.046549912102592315 - 0.0965914522844279 - 1.0 - 0.12194100640248183 - 0.15139426348464108 - 0.1723972791290331 - 0.17283164578167945 - 0.16480634318126675 - 0.16569873939027066 - 0.1728549819933171 - 0.17524195492901368 - 0.18366858039747846 - 0.16933886504858436 - 0.16720515987637327 - 0.23635288879364383 - 0.23516065130475095 - 0.23711945768749756 - 0.24435956439029374 - 0.24042600701040173 - 0.23215638321332788 - 0.23458643115209107 - 0.24946576681768332 - 0.2350071814521417 - 0.23906840961229672 - 0.2381730684068399 - 0.14161450056618247 - 0.16111253325078148 - 0.1961351147776721 - 0.1410367521003569 - 0.14337306941509392 - 0.164137728457383 - 0.046549912102592315 - 0.0965914522844279 - 1.0 - 0.12194100640248183 - 0.15139426348464108 - 0.1723972791290331 - 0.17283164578167945 - 0.16480634318126675 - 0.16569873939027066 - 0.1728549819933171 - 0.17524195492901368 - 0.18366858039747846 - 0.16933886504858436 - 0.16720515987637327 - 0.23635288879364383 - 0.23516065130475095 - 0.23711945768749756 - 0.24435956439029374 - 0.24042600701040173 - 0.23215638321332788 - 0.23458643115209107 - 0.24946576681768332 - 0.2350071814521417 - 0.23906840961229672 - 0.2381730684068399 - 0.14161450056618247 - 0.16111253325078148 - 0.1961351147776721 - 0.1410367521003569 - 0.14337306941509392 - 0.164137728457383 - 0.046549912102592315 - 0.0965914522844279 - 1.0 - 0.12194100640248183 - 0.15139426348464108 - 0.1723972791290331 - 0.17283164578167945 - 0.16480634318126675 - 0.16569873939027066 - 0.1728549819933171 - 0.17524195492901368 - 0.18366858039747846 - 0.16933886504858436 - 0.16720515987637327 - 0.23635288879364383 - 0.23516065130475095 - 0.23711945768749756 - 0.24435956439029374 - 0.24042600701040173 - 0.23215638321332788 - 0.23458643115209107 - 0.24946576681768332 - 0.2350071814521417 - 0.23906840961229672 - 0.2381730684068399 - 0.14161450056618247 - 0.16111253325078148 - 0.1961351147776721 - 0.1410367521003569 - 0.14337306941509392 - 0.164137728457383 - 0.046549912102592315 - 0.0965914522844279 - 1.0 - 0.12194100640248183 - 0.15139426348464108 - 0.1723972791290331 - 0.17283164578167945 - 0.16480634318126675 - 0.16569873939027066 - 0.1728549819933171 - 0.17524195492901368 - 0.18366858039747846 - 0.16933886504858436 - 0.16720515987637327 - 0.23635288879364383 - 0.23516065130475095 - 0.23711945768749756 - 0.24435956439029374 - 0.24042600701040173 - 0.23215638321332788 - 0.23458643115209107 - 0.24946576681768332 - 0.2350071814521417 - 0.23906840961229672 - 0.2381730684068399 - 0.14161450056618247 - 0.16111253325078148 - 0.1961351147776721 - 0.1410367521003569 - 0.14337306941509392 - 0.164137728457383 - 0.046549912102592315 - 0.0965914522844279 - 1.0 - 0.12194100640248183 - 0.15139426348464108 - 0.1723972791290331 - 0.17283164578167945 - 0.16480634318126675 - 0.16569873939027066 - 0.1728549819933171 - 0.17524195492901368 - 0.18366858039747846 - 0.16933886504858436 - 0.16720515987637327 - 0.23635288879364383 - 0.23516065130475095 - 0.23711945768749756 - 0.24435956439029374 - 0.24042600701040173 - 0.23215638321332788 - 0.23458643115209107 - 0.24946576681768332 - 0.2350071814521417 - 0.23906840961229672 - 0.2381730684068399 - 0.14161450056618247 - 0.16111253325078148 - 0.1961351147776721 - 0.1410367521003569 - 0.14337306941509392 - 0.164137728457383 - 0.046549912102592315 - 0.0965914522844279 - 1.0 - 0.12194100640248183 - 0.15139426348464108 - 0.1723972791290331 - 0.17283164578167945 - 0.16480634318126675 - 0.16569873939027066 - 0.1728549819933171 - 0.17524195492901368 - 0.18366858039747846 - 0.16933886504858436 - 0.16720515987637327 - 0.23635288879364383 - 0.23516065130475095 - 0.23711945768749756 - 0.24435956439029374 - 0.24042600701040173 - 0.23215638321332788 - 0.23458643115209107 - 0.24946576681768332 - 0.2350071814521417 - 0.23906840961229672 - 0.2381730684068399 - 0.14161450056618247 - 0.16111253325078148 - 0.1961351147776721 - 0.1410367521003569 - 0.14337306941509392 - 0.164137728457383 - 0.046549912102592315 - 0.0965914522844279 - 1.0 - 0.12194100640248183 - 0.15139426348464108 - 0.1723972791290331 - 0.17283164578167945 - 0.16480634318126675 - 0.16569873939027066 - 0.1728549819933171 - 0.17524195492901368 - 0.18366858039747846 - 0.16933886504858436 - 0.16720515987637327 - 0.23635288879364383 - 0.23516065130475095 - 0.23711945768749756 - 0.24435956439029374 - 0.24042600701040173 - 0.23215638321332788 - 0.23458643115209107 - 0.24946576681768332 - 0.2350071814521417 - 0.23906840961229672 - 0.2381730684068399 - 0.14161450056618247 - 0.16111253325078148 - 0.1961351147776721 - 0.1410367521003569 - 0.14337306941509392 - 0.164137728457383 - 0.046549912102592315 - 0.0965914522844279 - 1.0 - 0.12194100640248183 - 0.15139426348464108 - 0.1723972791290331 - 0.17283164578167945 - 0.16480634318126675 - 0.16569873939027066 - 0.1728549819933171 - 0.17524195492901368 - 0.18366858039747846 - 0.16933886504858436 - 0.16720515987637327 - 0.23635288879364383 - 0.23516065130475095 - 0.23711945768749756 - 0.24435956439029374 - 0.24042600701040173 - 0.23215638321332788 - 0.23458643115209107 - 0.24946576681768332 - 0.2350071814521417 - 0.23906840961229672 - 0.2381730684068399 - 0.14161450056618247 - 0.16111253325078148 - 0.1961351147776721 - 0.1410367521003569 - 0.14337306941509392 - 0.164137728457383 - 0.046549912102592315 - 0.0965914522844279 - 1.0 - 0.12194100640248183 - 0.15139426348464108 - 0.1723972791290331 - 0.17283164578167945 - 0.16480634318126675 - 0.16569873939027066 - 0.1728549819933171 - 0.17524195492901368 - 0.18366858039747846 - 0.16933886504858436 - 0.16720515987637327 - 0.23635288879364383 - 0.23516065130475095 - 0.23711945768749756 - 0.24435956439029374 - 0.24042600701040173 - 0.23215638321332788 - 0.23458643115209107 - 0.24946576681768332 - 0.2350071814521417 - 0.23906840961229672 - 0.2381730684068399 - 0.14161450056618247 - 0.16111253325078148 - 0.1961351147776721 - 0.1410367521003569 - 0.14337306941509392 - 0.164137728457383 - 0.046549912102592315 - 0.0965914522844279 - 1.0 - 0.12194100640248183 - 0.15139426348464108 - 0.1723972791290331 - 0.17283164578167945 - 0.16480634318126675 - 0.16569873939027066 - 0.1728549819933171 - 0.17524195492901368 - 0.18366858039747846 - 0.16933886504858436 - 0.16720515987637327 - 0.23635288879364383 - 0.23516065130475095 - 0.23711945768749756 - 0.24435956439029374 - 0.24042600701040173 - 0.23215638321332788 - 0.23458643115209107 - 0.24946576681768332 - 0.2350071814521417 - 0.23906840961229672 - 0.2381730684068399 - 0.14161450056618247 - 0.16111253325078148 - 0.1961351147776721 - 0.1410367521003569 - 0.14337306941509392 - 0.164137728457383 - 0.046549912102592315 - 0.0965914522844279 - 1.0 - 0.12194100640248183 - 0.15139426348464108 - 0.1723972791290331 - 0.17283164578167945 - 0.16480634318126675 - 0.16569873939027066 - 0.1728549819933171 - 0.17524195492901368 - 0.18366858039747846 - 0.16933886504858436 - 0.16720515987637327 - 0.23635288879364383 - 0.23516065130475095 - 0.23711945768749756 - 0.24435956439029374 - 0.24042600701040173 - 0.23215638321332788 - 0.23458643115209107 - 0.24946576681768332 - 0.2350071814521417 - 0.23906840961229672 - 0.2381730684068399 - 0.14161450056618247 - 0.16111253325078148 - 0.1961351147776721 - 0.1410367521003569 - 0.14337306941509392 - 0.164137728457383 - 0.046549912102592315 - 0.0965914522844279 - 1.0 - 0.12194100640248183 - 0.15139426348464108 - 0.1723972791290331 - 0.17283164578167945 - 0.16480634318126675 - 0.16569873939027066 - 0.1728549819933171 - 0.17524195492901368 - 0.18366858039747846 - 0.16933886504858436 - 0.16720515987637327 - 0.23635288879364383 - 0.23516065130475095 - 0.23711945768749756 - 0.24435956439029374 - 0.24042600701040173 - 0.23215638321332788 - 0.23458643115209107 - 0.24946576681768332 - 0.2350071814521417 - 0.23906840961229672 - 0.2381730684068399 - 0.14161450056618247 - 0.16111253325078148 - 0.1961351147776721 - 0.1410367521003569 - 0.14337306941509392 - 0.164137728457383 - 0.046549912102592315 - 0.0965914522844279 - 1.0 - 0.12194100640248183 - 0.15139426348464108 - 0.1723972791290331 - 0.17283164578167945 - 0.16480634318126675 - 0.16569873939027066 - 0.1728549819933171 - 0.17524195492901368 - 0.18366858039747846 - 0.16933886504858436 - 0.16720515987637327 - 0.23635288879364383 - 0.23516065130475095 - 0.23711945768749756 - 0.24435956439029374 - 0.24042600701040173 - 0.23215638321332788 - 0.23458643115209107 - 0.24946576681768332 - 0.2350071814521417 - 0.23906840961229672 - 0.2381730684068399 - 0.14161450056618247 - 0.16111253325078148 - 0.1961351147776721 - 0.1410367521003569 - 0.14337306941509392 - 0.164137728457383 - 0.046549912102592315 - 0.0965914522844279 - 1.0 - 0.12194100640248183 - 0.15139426348464108 - 0.1723972791290331 - 0.17283164578167945 - 0.16480634318126675 - 0.16569873939027066 - 0.1728549819933171 - 0.17524195492901368 - 0.18366858039747846 - 0.16933886504858436 - 0.16720515987637327 - 0.23635288879364383 - 0.23516065130475095 - 0.23711945768749756 - 0.24435956439029374 - 0.24042600701040173 - 0.23215638321332788 - 0.23458643115209107 - 0.24946576681768332 - 0.2350071814521417 - 0.23906840961229672 - 0.2381730684068399 - 0.14161450056618247 - 0.16111253325078148 - 0.1961351147776721 - 0.1410367521003569 - 0.14337306941509392 - 0.164137728457383 - 0.046549912102592315 - 0.0965914522844279 - 1.0 - 0.12194100640248183 - 0.15139426348464108 - 0.1723972791290331 - 0.17283164578167945 - 0.16480634318126675 - 0.16569873939027066 - 0.1728549819933171 - 0.17524195492901368 - 0.18366858039747846 - 0.16933886504858436 - 0.16720515987637327 - 0.23635288879364383 - 0.23516065130475095 - 0.23711945768749756 - 0.24435956439029374 - 0.24042600701040173 - 0.23215638321332788 - 0.23458643115209107 - 0.24946576681768332 - 0.2350071814521417 - 0.23906840961229672 - 0.2381730684068399 - 0.14161450056618247 - 0.16111253325078148 - 0.1961351147776721 - 0.1410367521003569 - 0.14337306941509392 - 0.164137728457383 - 0.046549912102592315 - 0.0965914522844279 - 1.0 - 0.12194100640248183 - 0.15139426348464108 - 0.1723972791290331 - 0.17283164578167945 - 0.16480634318126675 - 0.16569873939027066 - 0.1728549819933171 - 0.17524195492901368 - 0.18366858039747846 - 0.16933886504858436 - 0.16720515987637327 - 0.23635288879364383 - 0.23516065130475095 - 0.23711945768749756 - 0.24435956439029374 - 0.24042600701040173 - 0.23215638321332788 - 0.23458643115209107 - 0.24946576681768332 - 0.2350071814521417 - 0.23906840961229672 - 0.2381730684068399 - 0.14161450056618247 - 0.16111253325078148 - 0.1961351147776721 - 0.1410367521003569 - 0.14337306941509392 - 0.164137728457383 - 0.046549912102592315 - 0.0965914522844279 - 1.0 - 0.12194100640248183 - 0.15139426348464108 - 0.1723972791290331 - 0.17283164578167945 - 0.16480634318126675 - 0.16569873939027066 - 0.1728549819933171 - 0.17524195492901368 - 0.18366858039747846 - 0.16933886504858436 - 0.16720515987637327 - 0.23635288879364383 - 0.23516065130475095 - 0.23711945768749756 - 0.24435956439029374 - 0.24042600701040173 - 0.23215638321332788 - 0.23458643115209107 - 0.24946576681768332 - 0.2350071814521417 - 0.23906840961229672 - 0.2381730684068399 - 0.14161450056618247 - 0.16111253325078148 - 0.1961351147776721 - 0.1410367521003569 - 0.14337306941509392 - 0.164137728457383 - 0.046549912102592315 - 0.0965914522844279 - 1.0 - 0.12194100640248183 - 0.15139426348464108 - 0.1723972791290331 - 0.17283164578167945 - 0.16480634318126675 - 0.16569873939027066 - 0.1728549819933171 - 0.17524195492901368 - 0.18366858039747846 - 0.16933886504858436 - 0.16720515987637327 - 0.23635288879364383 - 0.23516065130475095 - 0.23711945768749756 - 0.24435956439029374 - 0.24042600701040173 - 0.23215638321332788 - 0.23458643115209107 - 0.24946576681768332 - 0.2350071814521417 - 0.23906840961229672 - 0.2381730684068399 - 0.14161450056618247 - 0.16111253325078148 - 0.1961351147776721 - 0.1410367521003569 - 0.14337306941509392 - 0.164137728457383 - 0.046549912102592315 - 0.0965914522844279 - 1.0 - 0.12194100640248183 - 0.15139426348464108 - 0.1723972791290331 - 0.17283164578167945 - 0.16480634318126675 - 0.16569873939027066 - 0.1728549819933171 - 0.17524195492901368 - 0.18366858039747846 - 0.16933886504858436 - 0.16720515987637327 - 0.23635288879364383 - 0.23516065130475095 - 0.23711945768749756 - 0.24435956439029374 - 0.24042600701040173 - 0.23215638321332788 - 0.23458643115209107 - 0.24946576681768332 - 0.2350071814521417 - 0.23906840961229672 - 0.2381730684068399 - 0.14161450056618247 - 0.16111253325078148 - 0.1961351147776721 - 0.1410367521003569 - 0.14337306941509392 - 0.164137728457383 - 0.046549912102592315 - 0.0965914522844279 - 1.0 - 0.12194100640248183 - 0.15139426348464108 - 0.1723972791290331 - 0.17283164578167945 - 0.16480634318126675 - 0.16569873939027066 - 0.1728549819933171 - 0.17524195492901368 - 0.18366858039747846 - 0.16933886504858436 - 0.16720515987637327 - 0.23635288879364383 - 0.23516065130475095 - 0.23711945768749756 - 0.24435956439029374 - 0.24042600701040173 - 0.23215638321332788 - 0.23458643115209107 - 0.24946576681768332 - 0.2350071814521417 - 0.23906840961229672 - 0.2381730684068399 - 0.14161450056618247 - 0.16111253325078148 - 0.1961351147776721 - 0.1410367521003569 - 0.14337306941509392 - 0.164137728457383 - 0.046549912102592315 - 0.0965914522844279 - 1.0 - 0.12194100640248183 - 0.15139426348464108 - 0.1723972791290331 - 0.17283164578167945 - 0.16480634318126675 - 0.16569873939027066 - 0.1728549819933171 - 0.17524195492901368 - 0.18366858039747846 - 0.16933886504858436 - 0.16720515987637327 - 0.23635288879364383 - 0.23516065130475095 - 0.23711945768749756 - 0.24435956439029374 - 0.24042600701040173 - 0.23215638321332788 - 0.23458643115209107 - 0.24946576681768332 - 0.2350071814521417 - 0.23906840961229672 - 0.2381730684068399 - 0.14161450056618247 - 0.16111253325078148 - 0.1961351147776721 - 0.1410367521003569 - 0.14337306941509392 - 0.164137728457383 - 0.046549912102592315 - 0.0965914522844279 - 1.0 - 0.12194100640248183 - 0.15139426348464108 - 0.1723972791290331 - 0.17283164578167945 - 0.16480634318126675 - 0.16569873939027066 - 0.1728549819933171 - 0.17524195492901368 - 0.18366858039747846 - 0.16933886504858436 - 0.16720515987637327 - 0.23635288879364383 - 0.23516065130475095 - 0.23711945768749756 - 0.24435956439029374 - 0.24042600701040173 - 0.23215638321332788 - 0.23458643115209107 - 0.24946576681768332 - 0.2350071814521417 - 0.23906840961229672 - 0.2381730684068399 - 0.14161450056618247 - 0.16111253325078148 - 0.1961351147776721 - 0.1410367521003569 - 0.14337306941509392 - 0.164137728457383 - 0.046549912102592315 - 0.0965914522844279 - 1.0 - 0.12194100640248183 - 0.15139426348464108 - 0.1723972791290331 - 0.17283164578167945 - 0.16480634318126675 - 0.16569873939027066 - 0.1728549819933171 - 0.17524195492901368 - 0.18366858039747846 - 0.16933886504858436 - 0.16720515987637327 - 0.23635288879364383 - 0.23516065130475095 - 0.23711945768749756 - 0.24435956439029374 - 0.24042600701040173 - 0.23215638321332788 - 0.23458643115209107 - 0.24946576681768332 - 0.2350071814521417 - 0.23906840961229672 - 0.2381730684068399 - 0.14161450056618247 - 0.16111253325078148 - 0.1961351147776721 - 0.1410367521003569 - 0.14337306941509392 - 0.164137728457383 - 0.046549912102592315 - 0.0965914522844279 - 1.0 - 0.12194100640248183 - 0.15139426348464108 - 0.1723972791290331 - 0.17283164578167945 - 0.16480634318126675 - 0.16569873939027066 - 0.1728549819933171 - 0.17524195492901368 - 0.18366858039747846 - 0.16933886504858436 - 0.16720515987637327 - 0.23635288879364383 - 0.23516065130475095 - 0.23711945768749756 - 0.24435956439029374 - 0.24042600701040173 - 0.23215638321332788 - 0.23458643115209107 - 0.24946576681768332 - 0.2350071814521417 - 0.23906840961229672 - 0.2381730684068399 - 0.14161450056618247 - 0.16111253325078148 - 0.1961351147776721 - 0.1410367521003569 - 0.14337306941509392 - 0.164137728457383 - 0.046549912102592315 - 0.0965914522844279 - 1.0 - 0.12194100640248183 - 0.15139426348464108 - 0.1723972791290331 - 0.17283164578167945 - 0.16480634318126675 - 0.16569873939027066 - 0.1728549819933171 - 0.17524195492901368 - 0.18366858039747846 - 0.16933886504858436 - 0.16720515987637327 - 0.23635288879364383 - 0.23516065130475095 - 0.23711945768749756 - 0.24435956439029374 - 0.24042600701040173 - 0.23215638321332788 - 0.23458643115209107 - 0.24946576681768332 - 0.2350071814521417 - 0.23906840961229672 - 0.2381730684068399 - 0.14161450056618247 - 0.16111253325078148 - 0.1961351147776721 - 0.1410367521003569 - 0.14337306941509392 - 0.164137728457383 - 0.046549912102592315 - 0.0965914522844279 - 1.0 - 0.12194100640248183 - 0.15139426348464108 - 0.1723972791290331 - 0.17283164578167945 - 0.16480634318126675 - 0.16569873939027066 - 0.1728549819933171 - 0.17524195492901368 - 0.18366858039747846 - 0.16933886504858436 - 0.16720515987637327 - 0.23635288879364383 - 0.23516065130475095 - 0.23711945768749756 - 0.24435956439029374 - 0.24042600701040173 - 0.23215638321332788 - 0.23458643115209107 - 0.24946576681768332 - 0.2350071814521417 - 0.23906840961229672 - 0.2381730684068399 - 0.14161450056618247 - 0.16111253325078148 - 0.1961351147776721 - 0.1410367521003569 - 0.14337306941509392 - 0.164137728457383 - 0.046549912102592315 - 0.0965914522844279 - 1.0 - 0.12194100640248183 - 0.15139426348464108 - 0.1723972791290331 - 0.17283164578167945 - 0.16480634318126675 - 0.16569873939027066 - 0.1728549819933171 - 0.17524195492901368 - 0.18366858039747846 - 0.16933886504858436 - 0.16720515987637327 - 0.23635288879364383 - 0.23516065130475095 - 0.23711945768749756 - 0.24435956439029374 - 0.24042600701040173 - 0.23215638321332788 - 0.23458643115209107 - 0.24946576681768332 - 0.2350071814521417 - 0.23906840961229672 - 0.2381730684068399 - 0.14161450056618247 - 0.16111253325078148 - 0.1961351147776721 - 0.1410367521003569 - 0.14337306941509392 - 0.164137728457383 - 0.046549912102592315 - 0.0965914522844279 - 1.0 - 0.12194100640248183 - 0.15139426348464108 - 0.1723972791290331 - 0.17283164578167945 - 0.16480634318126675 - 0.16569873939027066 - 0.1728549819933171 - 0.17524195492901368 - 0.18366858039747846 - 0.16933886504858436 - 0.16720515987637327 - 0.23635288879364383 - 0.23516065130475095 - 0.23711945768749756 - 0.24435956439029374 - 0.24042600701040173 - 0.23215638321332788 - 0.23458643115209107 - 0.24946576681768332 - 0.2350071814521417 - 0.23906840961229672 - 0.2381730684068399 - 0.14161450056618247 - 0.16111253325078148 - 0.1961351147776721 - 0.1410367521003569 - 0.14337306941509392 - 0.164137728457383 - 0.046549912102592315 - 0.0965914522844279 - 1.0 - 0.12194100640248183 - 0.15139426348464108 - 0.1723972791290331 - 0.17283164578167945 - 0.16480634318126675 - 0.16569873939027066 - 0.1728549819933171 - 0.17524195492901368 - 0.18366858039747846 - 0.16933886504858436 - 0.16720515987637327 - 0.23635288879364383 - 0.23516065130475095 - 0.23711945768749756 - 0.24435956439029374 - 0.24042600701040173 - 0.23215638321332788 - 0.23458643115209107 - 0.24946576681768332 - 0.2350071814521417 - 0.23906840961229672 - 0.2381730684068399 - 0.14161450056618247 - 0.16111253325078148 - 0.1961351147776721 - 0.1410367521003569 - 0.14337306941509392 - 0.164137728457383 - 0.046549912102592315 - 0.0965914522844279 - 1.0 - 0.12194100640248183 - 0.15139426348464108 - 0.1723972791290331 - 0.17283164578167945 - 0.16480634318126675 - 0.16569873939027066 - 0.1728549819933171 - 0.17524195492901368 - 0.18366858039747846 - 0.16933886504858436 - 0.16720515987637327 - 0.23635288879364383 - 0.23516065130475095 - 0.23711945768749756 - 0.24435956439029374 - 0.24042600701040173 - 0.23215638321332788 - 0.23458643115209107 - 0.24946576681768332 - 0.2350071814521417 - 0.23906840961229672 - 0.2381730684068399 - 0.14161450056618247 - 0.16111253325078148 - 0.1961351147776721 - 0.1410367521003569 - 0.14337306941509392 - 0.164137728457383 - 0.046549912102592315 - 0.0965914522844279 - 1.0 - 0.12194100640248183 - 0.15139426348464108 - 0.1723972791290331 - 0.17283164578167945 - 0.16480634318126675 - 0.16569873939027066 - 0.1728549819933171 - 0.17524195492901368 - 0.18366858039747846 - 0.16933886504858436 - 0.16720515987637327 - 0.23635288879364383 - 0.23516065130475095 - 0.23711945768749756 - 0.24435956439029374 - 0.24042600701040173 - 0.23215638321332788 - 0.23458643115209107 - 0.24946576681768332 - 0.2350071814521417 - 0.23906840961229672 - 0.2381730684068399 - 0.14161450056618247 - 0.16111253325078148 - 0.1961351147776721 - 0.1410367521003569 - 0.14337306941509392 - 0.164137728457383 - 0.046549912102592315 - 0.0965914522844279 - 1.0 - 0.12194100640248183 - 0.15139426348464108 - 0.1723972791290331 - 0.17283164578167945 - 0.16480634318126675 - 0.16569873939027066 - 0.1728549819933171 - 0.17524195492901368 - 0.18366858039747846 - 0.16933886504858436 - 0.16720515987637327 - 0.23635288879364383 - 0.23516065130475095 - 0.23711945768749756 - 0.24435956439029374 - 0.24042600701040173 - 0.23215638321332788 - 0.23458643115209107 - 0.24946576681768332 - 0.2350071814521417 - 0.23906840961229672 - 0.2381730684068399 - 0.14161450056618247 - 0.16111253325078148 - 0.1961351147776721 - 0.1410367521003569 - 0.14337306941509392 - 0.164137728457383 - 0.046549912102592315 - 0.0965914522844279 - 1.0 - 0.12194100640248183 - 0.15139426348464108 - 0.1723972791290331 - 0.17283164578167945 - 0.16480634318126675 - 0.16569873939027066 - 0.1728549819933171 - 0.17524195492901368 - 0.18366858039747846 - 0.16933886504858436 - 0.16720515987637327 - 0.23635288879364383 - 0.23516065130475095 - 0.23711945768749756 - 0.24435956439029374 - 0.24042600701040173 - 0.23215638321332788 - 0.23458643115209107 - 0.24946576681768332 - 0.2350071814521417 - 0.23906840961229672 - 0.2381730684068399 - 0.14161450056618247 - 0.16111253325078148 - 0.1961351147776721 - 0.1410367521003569 - 0.14337306941509392 - 0.164137728457383 - 0.046549912102592315 - 0.0965914522844279 - 1.0 - 0.12194100640248183 - 0.15139426348464108 - 0.1723972791290331 - 0.17283164578167945 - 0.16480634318126675 - 0.16569873939027066 - 0.1728549819933171 - 0.17524195492901368 - 0.18366858039747846 - 0.16933886504858436 - 0.16720515987637327 - 0.23635288879364383 - 0.23516065130475095 - 0.23711945768749756 - 0.24435956439029374 - 0.24042600701040173 - 0.23215638321332788 - 0.23458643115209107 - 0.24946576681768332 - 0.2350071814521417 - 0.23906840961229672 - 0.2381730684068399 - 0.14161450056618247 - 0.16111253325078148 - 0.1961351147776721 - 0.1410367521003569 - 0.14337306941509392 - 0.164137728457383 - 0.046549912102592315 - 0.0965914522844279 - 1.0 - 0.12194100640248183 - 0.15139426348464108 - 0.1723972791290331 - 0.17283164578167945 - 0.16480634318126675 - 0.16569873939027066 - 0.1728549819933171 - 0.17524195492901368 - 0.18366858039747846 - 0.16933886504858436 - 0.16720515987637327 - 0.23635288879364383 - 0.23516065130475095 - 0.23711945768749756 - 0.24435956439029374 - 0.24042600701040173 - 0.23215638321332788 - 0.23458643115209107 - 0.24946576681768332 - 0.2350071814521417 - 0.23906840961229672 - 0.2381730684068399 - 0.14161450056618247 - 0.16111253325078148 - 0.1961351147776721 - 0.1410367521003569 - 0.14337306941509392 - 0.164137728457383 - 0.046549912102592315 - 0.0965914522844279 - 1.0 - 0.12194100640248183 - 0.15139426348464108 - 0.1723972791290331 - 0.17283164578167945 - 0.16480634318126675 - 0.16569873939027066 - 0.1728549819933171 - 0.17524195492901368 - 0.18366858039747846 - 0.16933886504858436 - 0.16720515987637327 - 0.23635288879364383 - 0.23516065130475095 - 0.23711945768749756 - 0.24435956439029374 - 0.24042600701040173 - 0.23215638321332788 - 0.23458643115209107 - 0.24946576681768332 - 0.2350071814521417 - 0.23906840961229672 - 0.2381730684068399 - 0.14161450056618247 - 0.16111253325078148 - 0.1961351147776721 - 0.1410367521003569 - 0.14337306941509392 - 0.164137728457383 - 0.046549912102592315 - 0.0965914522844279 - 1.0 - 0.12194100640248183 - 0.15139426348464108 - 0.1723972791290331 - 0.17283164578167945 - 0.16480634318126675 - 0.16569873939027066 - 0.1728549819933171 - 0.17524195492901368 - 0.18366858039747846 - 0.16933886504858436 - 0.16720515987637327 - 0.23635288879364383 - 0.23516065130475095 - 0.23711945768749756 - 0.24435956439029374 - 0.24042600701040173 - 0.23215638321332788 - 0.23458643115209107 - 0.24946576681768332 - 0.2350071814521417 - 0.23906840961229672 - 0.2381730684068399 - 0.14161450056618247 - 0.16111253325078148 - 0.1961351147776721 - 0.1410367521003569 - 0.14337306941509392 - 0.164137728457383 - 0.046549912102592315 - 0.0965914522844279 - 1.0 - 0.12194100640248183 - 0.15139426348464108 - 0.1723972791290331 - 0.17283164578167945 - 0.16480634318126675 - 0.16569873939027066 - 0.1728549819933171 - 0.17524195492901368 - 0.18366858039747846 - 0.16933886504858436 - 0.16720515987637327 - 0.23635288879364383 - 0.23516065130475095 - 0.23711945768749756 - 0.24435956439029374 - 0.24042600701040173 - 0.23215638321332788 - 0.23458643115209107 - 0.24946576681768332 - 0.2350071814521417 - 0.23906840961229672 - 0.2381730684068399 - 0.14161450056618247 - 0.16111253325078148 - 0.1961351147776721 - 0.1410367521003569 - 0.14337306941509392 - 0.164137728457383 - 0.046549912102592315 - 0.0965914522844279 - 1.0 - 0.12194100640248183 - 0.15139426348464108 - 0.1723972791290331 - 0.17283164578167945 - 0.16480634318126675 - 0.16569873939027066 - 0.1728549819933171 - 0.17524195492901368 - 0.18366858039747846 - 0.16933886504858436 - 0.16720515987637327 - 0.23635288879364383 - 0.23516065130475095 - 0.23711945768749756 - 0.24435956439029374 - 0.24042600701040173 - 0.23215638321332788 - 0.23458643115209107 - 0.24946576681768332 - 0.2350071814521417 - 0.23906840961229672 - 0.2381730684068399 - 0.14161450056618247 - 0.16111253325078148 - 0.1961351147776721 - 0.1410367521003569 - 0.14337306941509392 - 0.164137728457383 - 0.046549912102592315 - 0.0965914522844279 - 1.0 - 0.12194100640248183 - 0.15139426348464108 - 0.1723972791290331 - 0.17283164578167945 - 0.16480634318126675 - 0.16569873939027066 - 0.1728549819933171 - 0.17524195492901368 - 0.18366858039747846 - 0.16933886504858436 - 0.16720515987637327 - 0.23635288879364383 - 0.23516065130475095 - 0.23711945768749756 - 0.24435956439029374 - 0.24042600701040173 - 0.23215638321332788 - 0.23458643115209107 - 0.24946576681768332 - 0.2350071814521417 - 0.23906840961229672 - 0.2381730684068399 - 0.14161450056618247 - 0.16111253325078148 - 0.1961351147776721 - 0.1410367521003569 - 0.14337306941509392 - 0.164137728457383 - 0.046549912102592315 - 0.0965914522844279 - 1.0 - 0.12194100640248183 - 0.15139426348464108 - 0.1723972791290331 - 0.17283164578167945 - 0.16480634318126675 - 0.16569873939027066 - 0.1728549819933171 - 0.17524195492901368 - 0.18366858039747846 - 0.16933886504858436 - 0.16720515987637327 - 0.23635288879364383 - 0.23516065130475095 - 0.23711945768749756 - 0.24435956439029374 - 0.24042600701040173 - 0.23215638321332788 - 0.23458643115209107 - 0.24946576681768332 - 0.2350071814521417 - 0.23906840961229672 - 0.2381730684068399 - 0.14161450056618247 - 0.16111253325078148 - 0.1961351147776721 - 0.1410367521003569 - 0.14337306941509392 - 0.164137728457383 - 0.046549912102592315 - 0.0965914522844279 - 1.0 - 0.12194100640248183 - 0.15139426348464108 - 0.1723972791290331 - 0.17283164578167945 - 0.16480634318126675 - 0.16569873939027066 - 0.1728549819933171 - 0.17524195492901368 - 0.18366858039747846 - 0.16933886504858436 - 0.16720515987637327 - 0.23635288879364383 - 0.23516065130475095 - 0.23711945768749756 - 0.24435956439029374 - 0.24042600701040173 - 0.23215638321332788 - 0.23458643115209107 - 0.24946576681768332 - 0.2350071814521417 - 0.23906840961229672 - 0.2381730684068399 - 0.14161450056618247 - 0.16111253325078148 - 0.1961351147776721 - 0.1410367521003569 - 0.14337306941509392 - 0.164137728457383 - 0.046549912102592315 - 0.0965914522844279 - 1.0 - 0.12194100640248183 - 0.15139426348464108 - 0.1723972791290331 - 0.17283164578167945 - 0.16480634318126675 - 0.16569873939027066 - 0.1728549819933171 - 0.17524195492901368 - 0.18366858039747846 - 0.16933886504858436 - 0.16720515987637327 - 0.23635288879364383 - 0.23516065130475095 - 0.23711945768749756 - 0.24435956439029374 - 0.24042600701040173 - 0.23215638321332788 - 0.23458643115209107 - 0.24946576681768332 - 0.2350071814521417 - 0.23906840961229672 - 0.2381730684068399 - 0.14161450056618247 - 0.16111253325078148 - 0.1961351147776721 - 0.1410367521003569 - 0.14337306941509392 - 0.164137728457383 - 0.046549912102592315 - 0.0965914522844279 - 1.0 - 0.12194100640248183 - 0.15139426348464108 - 0.1723972791290331 - 0.17283164578167945 - 0.16480634318126675 - 0.16569873939027066 - 0.1728549819933171 - 0.17524195492901368 - 0.18366858039747846 - 0.16933886504858436 - 0.16720515987637327 - 0.23635288879364383 - 0.23516065130475095 - 0.23711945768749756 - 0.24435956439029374 - 0.24042600701040173 - 0.23215638321332788 - 0.23458643115209107 - 0.24946576681768332 - 0.2350071814521417 - 0.23906840961229672 - 0.2381730684068399 - 0.14161450056618247 - 0.16111253325078148 - 0.1961351147776721 - 0.1410367521003569 - 0.14337306941509392 - 0.164137728457383 - 0.046549912102592315 - 0.0965914522844279 - 1.0 - 0.12194100640248183 - 0.15139426348464108 - 0.1723972791290331 - 0.17283164578167945 - 0.16480634318126675 - 0.16569873939027066 - 0.1728549819933171 - 0.17524195492901368 - 0.18366858039747846 - 0.16933886504858436 - 0.16720515987637327 - 0.23635288879364383 - 0.23516065130475095 - 0.23711945768749756 - 0.24435956439029374 - 0.24042600701040173 - 0.23215638321332788 - 0.23458643115209107 - 0.24946576681768332 - 0.2350071814521417 - 0.23906840961229672 - 0.2381730684068399 - 0.14161450056618247 - 0.16111253325078148 - 0.1961351147776721 - 0.1410367521003569 - 0.14337306941509392 - 0.164137728457383 - 0.046549912102592315 - 0.0965914522844279 - 1.0 - 0.12194100640248183 - task: type: Reranking dataset: name: MTEB AskUbuntuDupQuestions type: mteb/askubuntudupquestions-reranking config: default split: test revision: 2000358ca161889fa9c082cb41daa8dcfb161a54 metrics: - type: map value: 53.770982215325056 - type: mrr value: 68.00400123114805 - task: type: STS dataset: name: MTEB BIOSSES type: mteb/biosses-sts config: default split: test revision: d3fb88f8f02e40887cd149695127462bbcf29b4a metrics: - type: cos_sim_pearson value: 77.20301104745533 - type: cos_sim_spearman value: 77.59453912854975 - type: euclidean_pearson value: 74.21678798189272 - type: euclidean_spearman value: 74.9956847311664 - type: manhattan_pearson value: 74.55059214013183 - type: manhattan_spearman value: 75.51557609531613 - task: type: Classification dataset: name: MTEB Banking77Classification type: mteb/banking77 config: default split: test revision: 0fd18e25b25c072e09e0d92ab615fda904d66300 metrics: - type: accuracy value: 77.9512987012987 - type: f1 value: 77.89256430400536 - task: type: Clustering dataset: name: MTEB BiorxivClusteringP2P type: mteb/biorxiv-clustering-p2p config: default split: test revision: 65b79d1d13f80053f67aca9498d9402c2d9f1f40 metrics: - type: v_measure value: 29.83922611010262 - type: v_measures value: - 0.29324346631343595 - 0.2922357214987931 - 0.2950587109611168 - 0.2960401478358995 - 0.2873870207712407 - 0.29649976178620835 - 0.3055622039732096 - 0.3127947496618221 - 0.2974633994658177 - 0.307637428742718 - 0.29324346631343595 - 0.2922357214987931 - 0.2950587109611168 - 0.2960401478358995 - 0.2873870207712407 - 0.29649976178620835 - 0.3055622039732096 - 0.3127947496618221 - 0.2974633994658177 - 0.307637428742718 - 0.29324346631343595 - 0.2922357214987931 - 0.2950587109611168 - 0.2960401478358995 - 0.2873870207712407 - 0.29649976178620835 - 0.3055622039732096 - 0.3127947496618221 - 0.2974633994658177 - 0.307637428742718 - 0.29324346631343595 - 0.2922357214987931 - 0.2950587109611168 - 0.2960401478358995 - 0.2873870207712407 - 0.29649976178620835 - 0.3055622039732096 - 0.3127947496618221 - 0.2974633994658177 - 0.307637428742718 - 0.29324346631343595 - 0.2922357214987931 - 0.2950587109611168 - 0.2960401478358995 - 0.2873870207712407 - 0.29649976178620835 - 0.3055622039732096 - 0.3127947496618221 - 0.2974633994658177 - 0.307637428742718 - 0.29324346631343595 - 0.2922357214987931 - 0.2950587109611168 - 0.2960401478358995 - 0.2873870207712407 - 0.29649976178620835 - 0.3055622039732096 - 0.3127947496618221 - 0.2974633994658177 - 0.307637428742718 - 0.29324346631343595 - 0.2922357214987931 - 0.2950587109611168 - 0.2960401478358995 - 0.2873870207712407 - 0.29649976178620835 - 0.3055622039732096 - 0.3127947496618221 - 0.2974633994658177 - 0.307637428742718 - 0.29324346631343595 - 0.2922357214987931 - 0.2950587109611168 - 0.2960401478358995 - 0.2873870207712407 - 0.29649976178620835 - 0.3055622039732096 - 0.3127947496618221 - 0.2974633994658177 - 0.307637428742718 - 0.29324346631343595 - 0.2922357214987931 - 0.2950587109611168 - 0.2960401478358995 - 0.2873870207712407 - 0.29649976178620835 - 0.3055622039732096 - 0.3127947496618221 - 0.2974633994658177 - 0.307637428742718 - 0.29324346631343595 - 0.2922357214987931 - 0.2950587109611168 - 0.2960401478358995 - 0.2873870207712407 - 0.29649976178620835 - 0.3055622039732096 - 0.3127947496618221 - 0.2974633994658177 - 0.307637428742718 - 0.29324346631343595 - 0.2922357214987931 - 0.2950587109611168 - 0.2960401478358995 - 0.2873870207712407 - 0.29649976178620835 - 0.3055622039732096 - 0.3127947496618221 - 0.2974633994658177 - 0.307637428742718 - 0.29324346631343595 - 0.2922357214987931 - 0.2950587109611168 - 0.2960401478358995 - 0.2873870207712407 - 0.29649976178620835 - 0.3055622039732096 - 0.3127947496618221 - 0.2974633994658177 - 0.307637428742718 - 0.29324346631343595 - 0.2922357214987931 - 0.2950587109611168 - 0.2960401478358995 - 0.2873870207712407 - 0.29649976178620835 - 0.3055622039732096 - 0.3127947496618221 - 0.2974633994658177 - 0.307637428742718 - 0.29324346631343595 - 0.2922357214987931 - 0.2950587109611168 - 0.2960401478358995 - 0.2873870207712407 - 0.29649976178620835 - 0.3055622039732096 - 0.3127947496618221 - 0.2974633994658177 - 0.307637428742718 - 0.29324346631343595 - 0.2922357214987931 - 0.2950587109611168 - 0.2960401478358995 - 0.2873870207712407 - 0.29649976178620835 - 0.3055622039732096 - 0.3127947496618221 - 0.2974633994658177 - 0.307637428742718 - 0.29324346631343595 - 0.2922357214987931 - 0.2950587109611168 - 0.2960401478358995 - 0.2873870207712407 - 0.29649976178620835 - 0.3055622039732096 - 0.3127947496618221 - 0.2974633994658177 - 0.307637428742718 - 0.29324346631343595 - 0.2922357214987931 - 0.2950587109611168 - 0.2960401478358995 - 0.2873870207712407 - 0.29649976178620835 - 0.3055622039732096 - 0.3127947496618221 - 0.2974633994658177 - 0.307637428742718 - 0.29324346631343595 - 0.2922357214987931 - 0.2950587109611168 - 0.2960401478358995 - 0.2873870207712407 - 0.29649976178620835 - 0.3055622039732096 - 0.3127947496618221 - 0.2974633994658177 - 0.307637428742718 - 0.29324346631343595 - 0.2922357214987931 - 0.2950587109611168 - 0.2960401478358995 - 0.2873870207712407 - 0.29649976178620835 - 0.3055622039732096 - 0.3127947496618221 - 0.2974633994658177 - 0.307637428742718 - 0.29324346631343595 - 0.2922357214987931 - 0.2950587109611168 - 0.2960401478358995 - 0.2873870207712407 - 0.29649976178620835 - 0.3055622039732096 - 0.3127947496618221 - 0.2974633994658177 - 0.307637428742718 - 0.29324346631343595 - 0.2922357214987931 - 0.2950587109611168 - 0.2960401478358995 - 0.2873870207712407 - 0.29649976178620835 - 0.3055622039732096 - 0.3127947496618221 - 0.2974633994658177 - 0.307637428742718 - 0.29324346631343595 - 0.2922357214987931 - 0.2950587109611168 - 0.2960401478358995 - 0.2873870207712407 - 0.29649976178620835 - 0.3055622039732096 - 0.3127947496618221 - 0.2974633994658177 - 0.307637428742718 - 0.29324346631343595 - 0.2922357214987931 - 0.2950587109611168 - 0.2960401478358995 - 0.2873870207712407 - 0.29649976178620835 - 0.3055622039732096 - 0.3127947496618221 - 0.2974633994658177 - 0.307637428742718 - 0.29324346631343595 - 0.2922357214987931 - 0.2950587109611168 - 0.2960401478358995 - 0.2873870207712407 - 0.29649976178620835 - 0.3055622039732096 - 0.3127947496618221 - 0.2974633994658177 - 0.307637428742718 - 0.29324346631343595 - 0.2922357214987931 - 0.2950587109611168 - 0.2960401478358995 - 0.2873870207712407 - 0.29649976178620835 - 0.3055622039732096 - 0.3127947496618221 - 0.2974633994658177 - 0.307637428742718 - 0.29324346631343595 - 0.2922357214987931 - 0.2950587109611168 - 0.2960401478358995 - 0.2873870207712407 - 0.29649976178620835 - 0.3055622039732096 - 0.3127947496618221 - 0.2974633994658177 - 0.307637428742718 - 0.29324346631343595 - 0.2922357214987931 - 0.2950587109611168 - 0.2960401478358995 - 0.2873870207712407 - 0.29649976178620835 - 0.3055622039732096 - 0.3127947496618221 - 0.2974633994658177 - 0.307637428742718 - 0.29324346631343595 - 0.2922357214987931 - 0.2950587109611168 - 0.2960401478358995 - 0.2873870207712407 - 0.29649976178620835 - 0.3055622039732096 - 0.3127947496618221 - 0.2974633994658177 - 0.307637428742718 - 0.29324346631343595 - 0.2922357214987931 - 0.2950587109611168 - 0.2960401478358995 - 0.2873870207712407 - 0.29649976178620835 - 0.3055622039732096 - 0.3127947496618221 - 0.2974633994658177 - 0.307637428742718 - 0.29324346631343595 - 0.2922357214987931 - 0.2950587109611168 - 0.2960401478358995 - 0.2873870207712407 - 0.29649976178620835 - 0.3055622039732096 - 0.3127947496618221 - 0.2974633994658177 - 0.307637428742718 - 0.29324346631343595 - 0.2922357214987931 - 0.2950587109611168 - 0.2960401478358995 - 0.2873870207712407 - 0.29649976178620835 - 0.3055622039732096 - 0.3127947496618221 - 0.2974633994658177 - 0.307637428742718 - 0.29324346631343595 - 0.2922357214987931 - 0.2950587109611168 - 0.2960401478358995 - 0.2873870207712407 - 0.29649976178620835 - 0.3055622039732096 - 0.3127947496618221 - 0.2974633994658177 - 0.307637428742718 - 0.29324346631343595 - 0.2922357214987931 - 0.2950587109611168 - 0.2960401478358995 - 0.2873870207712407 - 0.29649976178620835 - 0.3055622039732096 - 0.3127947496618221 - 0.2974633994658177 - 0.307637428742718 - 0.29324346631343595 - 0.2922357214987931 - 0.2950587109611168 - 0.2960401478358995 - 0.2873870207712407 - 0.29649976178620835 - 0.3055622039732096 - 0.3127947496618221 - 0.2974633994658177 - 0.307637428742718 - 0.29324346631343595 - 0.2922357214987931 - 0.2950587109611168 - 0.2960401478358995 - 0.2873870207712407 - 0.29649976178620835 - 0.3055622039732096 - 0.3127947496618221 - 0.2974633994658177 - 0.307637428742718 - 0.29324346631343595 - 0.2922357214987931 - 0.2950587109611168 - 0.2960401478358995 - 0.2873870207712407 - 0.29649976178620835 - 0.3055622039732096 - 0.3127947496618221 - 0.2974633994658177 - 0.307637428742718 - 0.29324346631343595 - 0.2922357214987931 - 0.2950587109611168 - 0.2960401478358995 - 0.2873870207712407 - 0.29649976178620835 - 0.3055622039732096 - 0.3127947496618221 - 0.2974633994658177 - 0.307637428742718 - 0.29324346631343595 - 0.2922357214987931 - 0.2950587109611168 - 0.2960401478358995 - 0.2873870207712407 - 0.29649976178620835 - 0.3055622039732096 - 0.3127947496618221 - 0.2974633994658177 - 0.307637428742718 - 0.29324346631343595 - 0.2922357214987931 - 0.2950587109611168 - 0.2960401478358995 - 0.2873870207712407 - 0.29649976178620835 - 0.3055622039732096 - 0.3127947496618221 - 0.2974633994658177 - 0.307637428742718 - 0.29324346631343595 - 0.2922357214987931 - 0.2950587109611168 - 0.2960401478358995 - 0.2873870207712407 - 0.29649976178620835 - 0.3055622039732096 - 0.3127947496618221 - 0.2974633994658177 - 0.307637428742718 - 0.29324346631343595 - 0.2922357214987931 - 0.2950587109611168 - 0.2960401478358995 - 0.2873870207712407 - 0.29649976178620835 - 0.3055622039732096 - 0.3127947496618221 - 0.2974633994658177 - 0.307637428742718 - 0.29324346631343595 - 0.2922357214987931 - 0.2950587109611168 - 0.2960401478358995 - 0.2873870207712407 - 0.29649976178620835 - 0.3055622039732096 - 0.3127947496618221 - 0.2974633994658177 - 0.307637428742718 - 0.29324346631343595 - 0.2922357214987931 - 0.2950587109611168 - 0.2960401478358995 - 0.2873870207712407 - 0.29649976178620835 - 0.3055622039732096 - 0.3127947496618221 - 0.2974633994658177 - 0.307637428742718 - 0.29324346631343595 - 0.2922357214987931 - 0.2950587109611168 - 0.2960401478358995 - 0.2873870207712407 - 0.29649976178620835 - 0.3055622039732096 - 0.3127947496618221 - 0.2974633994658177 - 0.307637428742718 - 0.29324346631343595 - 0.2922357214987931 - 0.2950587109611168 - 0.2960401478358995 - 0.2873870207712407 - 0.29649976178620835 - 0.3055622039732096 - 0.3127947496618221 - 0.2974633994658177 - 0.307637428742718 - 0.29324346631343595 - 0.2922357214987931 - 0.2950587109611168 - 0.2960401478358995 - 0.2873870207712407 - 0.29649976178620835 - 0.3055622039732096 - 0.3127947496618221 - 0.2974633994658177 - 0.307637428742718 - 0.29324346631343595 - 0.2922357214987931 - 0.2950587109611168 - 0.2960401478358995 - 0.2873870207712407 - 0.29649976178620835 - 0.3055622039732096 - 0.3127947496618221 - 0.2974633994658177 - 0.307637428742718 - 0.29324346631343595 - 0.2922357214987931 - 0.2950587109611168 - 0.2960401478358995 - 0.2873870207712407 - 0.29649976178620835 - 0.3055622039732096 - 0.3127947496618221 - 0.2974633994658177 - 0.307637428742718 - 0.29324346631343595 - 0.2922357214987931 - 0.2950587109611168 - 0.2960401478358995 - 0.2873870207712407 - 0.29649976178620835 - 0.3055622039732096 - 0.3127947496618221 - 0.2974633994658177 - 0.307637428742718 - 0.29324346631343595 - 0.2922357214987931 - 0.2950587109611168 - 0.2960401478358995 - 0.2873870207712407 - 0.29649976178620835 - 0.3055622039732096 - 0.3127947496618221 - 0.2974633994658177 - 0.307637428742718 - 0.29324346631343595 - 0.2922357214987931 - 0.2950587109611168 - 0.2960401478358995 - 0.2873870207712407 - 0.29649976178620835 - 0.3055622039732096 - 0.3127947496618221 - 0.2974633994658177 - 0.307637428742718 - 0.29324346631343595 - 0.2922357214987931 - 0.2950587109611168 - 0.2960401478358995 - 0.2873870207712407 - 0.29649976178620835 - 0.3055622039732096 - 0.3127947496618221 - 0.2974633994658177 - 0.307637428742718 - 0.29324346631343595 - 0.2922357214987931 - 0.2950587109611168 - 0.2960401478358995 - 0.2873870207712407 - 0.29649976178620835 - 0.3055622039732096 - 0.3127947496618221 - 0.2974633994658177 - 0.307637428742718 - 0.29324346631343595 - 0.2922357214987931 - 0.2950587109611168 - 0.2960401478358995 - 0.2873870207712407 - 0.29649976178620835 - 0.3055622039732096 - 0.3127947496618221 - 0.2974633994658177 - 0.307637428742718 - 0.29324346631343595 - 0.2922357214987931 - 0.2950587109611168 - 0.2960401478358995 - 0.2873870207712407 - 0.29649976178620835 - 0.3055622039732096 - 0.3127947496618221 - 0.2974633994658177 - 0.307637428742718 - 0.29324346631343595 - 0.2922357214987931 - 0.2950587109611168 - 0.2960401478358995 - 0.2873870207712407 - 0.29649976178620835 - 0.3055622039732096 - 0.3127947496618221 - 0.2974633994658177 - 0.307637428742718 - 0.29324346631343595 - 0.2922357214987931 - 0.2950587109611168 - 0.2960401478358995 - 0.2873870207712407 - 0.29649976178620835 - 0.3055622039732096 - 0.3127947496618221 - 0.2974633994658177 - 0.307637428742718 - 0.29324346631343595 - 0.2922357214987931 - 0.2950587109611168 - 0.2960401478358995 - 0.2873870207712407 - 0.29649976178620835 - 0.3055622039732096 - 0.3127947496618221 - 0.2974633994658177 - 0.307637428742718 - 0.29324346631343595 - 0.2922357214987931 - 0.2950587109611168 - 0.2960401478358995 - 0.2873870207712407 - 0.29649976178620835 - 0.3055622039732096 - 0.3127947496618221 - 0.2974633994658177 - 0.307637428742718 - 0.29324346631343595 - 0.2922357214987931 - 0.2950587109611168 - 0.2960401478358995 - 0.2873870207712407 - 0.29649976178620835 - 0.3055622039732096 - 0.3127947496618221 - 0.2974633994658177 - 0.307637428742718 - 0.29324346631343595 - 0.2922357214987931 - 0.2950587109611168 - 0.2960401478358995 - 0.2873870207712407 - 0.29649976178620835 - 0.3055622039732096 - 0.3127947496618221 - 0.2974633994658177 - 0.307637428742718 - 0.29324346631343595 - 0.2922357214987931 - 0.2950587109611168 - 0.2960401478358995 - 0.2873870207712407 - 0.29649976178620835 - 0.3055622039732096 - 0.3127947496618221 - 0.2974633994658177 - 0.307637428742718 - 0.29324346631343595 - 0.2922357214987931 - 0.2950587109611168 - 0.2960401478358995 - 0.2873870207712407 - 0.29649976178620835 - 0.3055622039732096 - 0.3127947496618221 - 0.2974633994658177 - 0.307637428742718 - 0.29324346631343595 - 0.2922357214987931 - 0.2950587109611168 - 0.2960401478358995 - 0.2873870207712407 - 0.29649976178620835 - 0.3055622039732096 - 0.3127947496618221 - 0.2974633994658177 - 0.307637428742718 - 0.29324346631343595 - 0.2922357214987931 - 0.2950587109611168 - 0.2960401478358995 - 0.2873870207712407 - 0.29649976178620835 - 0.3055622039732096 - 0.3127947496618221 - 0.2974633994658177 - 0.307637428742718 - 0.29324346631343595 - 0.2922357214987931 - 0.2950587109611168 - 0.2960401478358995 - 0.2873870207712407 - 0.29649976178620835 - 0.3055622039732096 - 0.3127947496618221 - 0.2974633994658177 - 0.307637428742718 - 0.29324346631343595 - 0.2922357214987931 - 0.2950587109611168 - 0.2960401478358995 - 0.2873870207712407 - 0.29649976178620835 - 0.3055622039732096 - 0.3127947496618221 - 0.2974633994658177 - 0.307637428742718 - 0.29324346631343595 - 0.2922357214987931 - 0.2950587109611168 - 0.2960401478358995 - 0.2873870207712407 - 0.29649976178620835 - 0.3055622039732096 - 0.3127947496618221 - 0.2974633994658177 - 0.307637428742718 - 0.29324346631343595 - 0.2922357214987931 - 0.2950587109611168 - 0.2960401478358995 - 0.2873870207712407 - 0.29649976178620835 - 0.3055622039732096 - 0.3127947496618221 - 0.2974633994658177 - 0.307637428742718 - 0.29324346631343595 - 0.2922357214987931 - 0.2950587109611168 - 0.2960401478358995 - 0.2873870207712407 - 0.29649976178620835 - 0.3055622039732096 - 0.3127947496618221 - 0.2974633994658177 - 0.307637428742718 - 0.29324346631343595 - 0.2922357214987931 - 0.2950587109611168 - 0.2960401478358995 - 0.2873870207712407 - 0.29649976178620835 - 0.3055622039732096 - 0.3127947496618221 - 0.2974633994658177 - 0.307637428742718 - 0.29324346631343595 - 0.2922357214987931 - 0.2950587109611168 - 0.2960401478358995 - 0.2873870207712407 - 0.29649976178620835 - 0.3055622039732096 - 0.3127947496618221 - 0.2974633994658177 - 0.307637428742718 - 0.29324346631343595 - 0.2922357214987931 - 0.2950587109611168 - 0.2960401478358995 - 0.2873870207712407 - 0.29649976178620835 - 0.3055622039732096 - 0.3127947496618221 - 0.2974633994658177 - 0.307637428742718 - 0.29324346631343595 - 0.2922357214987931 - 0.2950587109611168 - 0.2960401478358995 - 0.2873870207712407 - 0.29649976178620835 - 0.3055622039732096 - 0.3127947496618221 - 0.2974633994658177 - 0.307637428742718 - 0.29324346631343595 - 0.2922357214987931 - 0.2950587109611168 - 0.2960401478358995 - 0.2873870207712407 - 0.29649976178620835 - 0.3055622039732096 - 0.3127947496618221 - 0.2974633994658177 - 0.307637428742718 - 0.29324346631343595 - 0.2922357214987931 - 0.2950587109611168 - 0.2960401478358995 - 0.2873870207712407 - 0.29649976178620835 - 0.3055622039732096 - 0.3127947496618221 - 0.2974633994658177 - 0.307637428742718 - 0.29324346631343595 - 0.2922357214987931 - 0.2950587109611168 - 0.2960401478358995 - 0.2873870207712407 - 0.29649976178620835 - 0.3055622039732096 - 0.3127947496618221 - 0.2974633994658177 - 0.307637428742718 - 0.29324346631343595 - 0.2922357214987931 - 0.2950587109611168 - 0.2960401478358995 - 0.2873870207712407 - 0.29649976178620835 - 0.3055622039732096 - 0.3127947496618221 - 0.2974633994658177 - 0.307637428742718 - 0.29324346631343595 - 0.2922357214987931 - 0.2950587109611168 - 0.2960401478358995 - 0.2873870207712407 - 0.29649976178620835 - 0.3055622039732096 - 0.3127947496618221 - 0.2974633994658177 - 0.307637428742718 - 0.29324346631343595 - 0.2922357214987931 - 0.2950587109611168 - 0.2960401478358995 - 0.2873870207712407 - 0.29649976178620835 - 0.3055622039732096 - 0.3127947496618221 - 0.2974633994658177 - 0.307637428742718 - 0.29324346631343595 - 0.2922357214987931 - 0.2950587109611168 - 0.2960401478358995 - 0.2873870207712407 - 0.29649976178620835 - 0.3055622039732096 - 0.3127947496618221 - 0.2974633994658177 - 0.307637428742718 - 0.29324346631343595 - 0.2922357214987931 - 0.2950587109611168 - 0.2960401478358995 - 0.2873870207712407 - 0.29649976178620835 - 0.3055622039732096 - 0.3127947496618221 - 0.2974633994658177 - 0.307637428742718 - 0.29324346631343595 - 0.2922357214987931 - 0.2950587109611168 - 0.2960401478358995 - 0.2873870207712407 - 0.29649976178620835 - 0.3055622039732096 - 0.3127947496618221 - 0.2974633994658177 - 0.307637428742718 - 0.29324346631343595 - 0.2922357214987931 - 0.2950587109611168 - 0.2960401478358995 - 0.2873870207712407 - 0.29649976178620835 - 0.3055622039732096 - 0.3127947496618221 - 0.2974633994658177 - 0.307637428742718 - 0.29324346631343595 - 0.2922357214987931 - 0.2950587109611168 - 0.2960401478358995 - 0.2873870207712407 - 0.29649976178620835 - 0.3055622039732096 - 0.3127947496618221 - 0.2974633994658177 - 0.307637428742718 - 0.29324346631343595 - 0.2922357214987931 - 0.2950587109611168 - 0.2960401478358995 - 0.2873870207712407 - 0.29649976178620835 - 0.3055622039732096 - 0.3127947496618221 - 0.2974633994658177 - 0.307637428742718 - 0.29324346631343595 - 0.2922357214987931 - 0.2950587109611168 - 0.2960401478358995 - 0.2873870207712407 - 0.29649976178620835 - 0.3055622039732096 - 0.3127947496618221 - 0.2974633994658177 - 0.307637428742718 - 0.29324346631343595 - 0.2922357214987931 - 0.2950587109611168 - 0.2960401478358995 - 0.2873870207712407 - 0.29649976178620835 - 0.3055622039732096 - 0.3127947496618221 - 0.2974633994658177 - 0.307637428742718 - 0.29324346631343595 - 0.2922357214987931 - 0.2950587109611168 - 0.2960401478358995 - 0.2873870207712407 - 0.29649976178620835 - 0.3055622039732096 - 0.3127947496618221 - 0.2974633994658177 - 0.307637428742718 - 0.29324346631343595 - 0.2922357214987931 - 0.2950587109611168 - 0.2960401478358995 - 0.2873870207712407 - 0.29649976178620835 - 0.3055622039732096 - 0.3127947496618221 - 0.2974633994658177 - 0.307637428742718 - 0.29324346631343595 - 0.2922357214987931 - 0.2950587109611168 - 0.2960401478358995 - 0.2873870207712407 - 0.29649976178620835 - 0.3055622039732096 - 0.3127947496618221 - 0.2974633994658177 - 0.307637428742718 - 0.29324346631343595 - 0.2922357214987931 - 0.2950587109611168 - 0.2960401478358995 - 0.2873870207712407 - 0.29649976178620835 - 0.3055622039732096 - 0.3127947496618221 - 0.2974633994658177 - 0.307637428742718 - 0.29324346631343595 - 0.2922357214987931 - 0.2950587109611168 - 0.2960401478358995 - 0.2873870207712407 - 0.29649976178620835 - 0.3055622039732096 - 0.3127947496618221 - 0.2974633994658177 - 0.307637428742718 - 0.29324346631343595 - 0.2922357214987931 - 0.2950587109611168 - 0.2960401478358995 - 0.2873870207712407 - 0.29649976178620835 - 0.3055622039732096 - 0.3127947496618221 - 0.2974633994658177 - 0.307637428742718 - 0.29324346631343595 - 0.2922357214987931 - 0.2950587109611168 - 0.2960401478358995 - 0.2873870207712407 - 0.29649976178620835 - 0.3055622039732096 - 0.3127947496618221 - 0.2974633994658177 - 0.307637428742718 - 0.29324346631343595 - 0.2922357214987931 - 0.2950587109611168 - 0.2960401478358995 - 0.2873870207712407 - 0.29649976178620835 - 0.3055622039732096 - 0.3127947496618221 - 0.2974633994658177 - 0.307637428742718 - 0.29324346631343595 - 0.2922357214987931 - 0.2950587109611168 - 0.2960401478358995 - 0.2873870207712407 - 0.29649976178620835 - 0.3055622039732096 - 0.3127947496618221 - 0.2974633994658177 - 0.307637428742718 - 0.29324346631343595 - 0.2922357214987931 - 0.2950587109611168 - 0.2960401478358995 - 0.2873870207712407 - 0.29649976178620835 - 0.3055622039732096 - 0.3127947496618221 - 0.2974633994658177 - 0.307637428742718 - 0.29324346631343595 - 0.2922357214987931 - 0.2950587109611168 - 0.2960401478358995 - 0.2873870207712407 - 0.29649976178620835 - 0.3055622039732096 - 0.3127947496618221 - 0.2974633994658177 - 0.307637428742718 - 0.29324346631343595 - 0.2922357214987931 - 0.2950587109611168 - 0.2960401478358995 - 0.2873870207712407 - 0.29649976178620835 - 0.3055622039732096 - 0.3127947496618221 - 0.2974633994658177 - 0.307637428742718 - task: type: Clustering dataset: name: MTEB BiorxivClusteringS2S type: mteb/biorxiv-clustering-s2s config: default split: test revision: 258694dd0231531bc1fd9de6ceb52a0853c6d908 metrics: - type: v_measure value: 18.34253917925029 - type: v_measures value: - 0.19663926944608978 - 0.17549804536847785 - 0.1747660797341959 - 0.1733985544939657 - 0.17204103363489412 - 0.18165752579382782 - 0.18835786592472062 - 0.18837179576029925 - 0.19741374109182327 - 0.18611000667673502 - 0.19663926944608978 - 0.17549804536847785 - 0.1747660797341959 - 0.1733985544939657 - 0.17204103363489412 - 0.18165752579382782 - 0.18835786592472062 - 0.18837179576029925 - 0.19741374109182327 - 0.18611000667673502 - 0.19663926944608978 - 0.17549804536847785 - 0.1747660797341959 - 0.1733985544939657 - 0.17204103363489412 - 0.18165752579382782 - 0.18835786592472062 - 0.18837179576029925 - 0.19741374109182327 - 0.18611000667673502 - 0.19663926944608978 - 0.17549804536847785 - 0.1747660797341959 - 0.1733985544939657 - 0.17204103363489412 - 0.18165752579382782 - 0.18835786592472062 - 0.18837179576029925 - 0.19741374109182327 - 0.18611000667673502 - 0.19663926944608978 - 0.17549804536847785 - 0.1747660797341959 - 0.1733985544939657 - 0.17204103363489412 - 0.18165752579382782 - 0.18835786592472062 - 0.18837179576029925 - 0.19741374109182327 - 0.18611000667673502 - 0.19663926944608978 - 0.17549804536847785 - 0.1747660797341959 - 0.1733985544939657 - 0.17204103363489412 - 0.18165752579382782 - 0.18835786592472062 - 0.18837179576029925 - 0.19741374109182327 - 0.18611000667673502 - 0.19663926944608978 - 0.17549804536847785 - 0.1747660797341959 - 0.1733985544939657 - 0.17204103363489412 - 0.18165752579382782 - 0.18835786592472062 - 0.18837179576029925 - 0.19741374109182327 - 0.18611000667673502 - 0.19663926944608978 - 0.17549804536847785 - 0.1747660797341959 - 0.1733985544939657 - 0.17204103363489412 - 0.18165752579382782 - 0.18835786592472062 - 0.18837179576029925 - 0.19741374109182327 - 0.18611000667673502 - 0.19663926944608978 - 0.17549804536847785 - 0.1747660797341959 - 0.1733985544939657 - 0.17204103363489412 - 0.18165752579382782 - 0.18835786592472062 - 0.18837179576029925 - 0.19741374109182327 - 0.18611000667673502 - 0.19663926944608978 - 0.17549804536847785 - 0.1747660797341959 - 0.1733985544939657 - 0.17204103363489412 - 0.18165752579382782 - 0.18835786592472062 - 0.18837179576029925 - 0.19741374109182327 - 0.18611000667673502 - 0.19663926944608978 - 0.17549804536847785 - 0.1747660797341959 - 0.1733985544939657 - 0.17204103363489412 - 0.18165752579382782 - 0.18835786592472062 - 0.18837179576029925 - 0.19741374109182327 - 0.18611000667673502 - 0.19663926944608978 - 0.17549804536847785 - 0.1747660797341959 - 0.1733985544939657 - 0.17204103363489412 - 0.18165752579382782 - 0.18835786592472062 - 0.18837179576029925 - 0.19741374109182327 - 0.18611000667673502 - 0.19663926944608978 - 0.17549804536847785 - 0.1747660797341959 - 0.1733985544939657 - 0.17204103363489412 - 0.18165752579382782 - 0.18835786592472062 - 0.18837179576029925 - 0.19741374109182327 - 0.18611000667673502 - 0.19663926944608978 - 0.17549804536847785 - 0.1747660797341959 - 0.1733985544939657 - 0.17204103363489412 - 0.18165752579382782 - 0.18835786592472062 - 0.18837179576029925 - 0.19741374109182327 - 0.18611000667673502 - 0.19663926944608978 - 0.17549804536847785 - 0.1747660797341959 - 0.1733985544939657 - 0.17204103363489412 - 0.18165752579382782 - 0.18835786592472062 - 0.18837179576029925 - 0.19741374109182327 - 0.18611000667673502 - 0.19663926944608978 - 0.17549804536847785 - 0.1747660797341959 - 0.1733985544939657 - 0.17204103363489412 - 0.18165752579382782 - 0.18835786592472062 - 0.18837179576029925 - 0.19741374109182327 - 0.18611000667673502 - 0.19663926944608978 - 0.17549804536847785 - 0.1747660797341959 - 0.1733985544939657 - 0.17204103363489412 - 0.18165752579382782 - 0.18835786592472062 - 0.18837179576029925 - 0.19741374109182327 - 0.18611000667673502 - 0.19663926944608978 - 0.17549804536847785 - 0.1747660797341959 - 0.1733985544939657 - 0.17204103363489412 - 0.18165752579382782 - 0.18835786592472062 - 0.18837179576029925 - 0.19741374109182327 - 0.18611000667673502 - 0.19663926944608978 - 0.17549804536847785 - 0.1747660797341959 - 0.1733985544939657 - 0.17204103363489412 - 0.18165752579382782 - 0.18835786592472062 - 0.18837179576029925 - 0.19741374109182327 - 0.18611000667673502 - 0.19663926944608978 - 0.17549804536847785 - 0.1747660797341959 - 0.1733985544939657 - 0.17204103363489412 - 0.18165752579382782 - 0.18835786592472062 - 0.18837179576029925 - 0.19741374109182327 - 0.18611000667673502 - 0.19663926944608978 - 0.17549804536847785 - 0.1747660797341959 - 0.1733985544939657 - 0.17204103363489412 - 0.18165752579382782 - 0.18835786592472062 - 0.18837179576029925 - 0.19741374109182327 - 0.18611000667673502 - 0.19663926944608978 - 0.17549804536847785 - 0.1747660797341959 - 0.1733985544939657 - 0.17204103363489412 - 0.18165752579382782 - 0.18835786592472062 - 0.18837179576029925 - 0.19741374109182327 - 0.18611000667673502 - 0.19663926944608978 - 0.17549804536847785 - 0.1747660797341959 - 0.1733985544939657 - 0.17204103363489412 - 0.18165752579382782 - 0.18835786592472062 - 0.18837179576029925 - 0.19741374109182327 - 0.18611000667673502 - 0.19663926944608978 - 0.17549804536847785 - 0.1747660797341959 - 0.1733985544939657 - 0.17204103363489412 - 0.18165752579382782 - 0.18835786592472062 - 0.18837179576029925 - 0.19741374109182327 - 0.18611000667673502 - 0.19663926944608978 - 0.17549804536847785 - 0.1747660797341959 - 0.1733985544939657 - 0.17204103363489412 - 0.18165752579382782 - 0.18835786592472062 - 0.18837179576029925 - 0.19741374109182327 - 0.18611000667673502 - 0.19663926944608978 - 0.17549804536847785 - 0.1747660797341959 - 0.1733985544939657 - 0.17204103363489412 - 0.18165752579382782 - 0.18835786592472062 - 0.18837179576029925 - 0.19741374109182327 - 0.18611000667673502 - 0.19663926944608978 - 0.17549804536847785 - 0.1747660797341959 - 0.1733985544939657 - 0.17204103363489412 - 0.18165752579382782 - 0.18835786592472062 - 0.18837179576029925 - 0.19741374109182327 - 0.18611000667673502 - 0.19663926944608978 - 0.17549804536847785 - 0.1747660797341959 - 0.1733985544939657 - 0.17204103363489412 - 0.18165752579382782 - 0.18835786592472062 - 0.18837179576029925 - 0.19741374109182327 - 0.18611000667673502 - 0.19663926944608978 - 0.17549804536847785 - 0.1747660797341959 - 0.1733985544939657 - 0.17204103363489412 - 0.18165752579382782 - 0.18835786592472062 - 0.18837179576029925 - 0.19741374109182327 - 0.18611000667673502 - 0.19663926944608978 - 0.17549804536847785 - 0.1747660797341959 - 0.1733985544939657 - 0.17204103363489412 - 0.18165752579382782 - 0.18835786592472062 - 0.18837179576029925 - 0.19741374109182327 - 0.18611000667673502 - 0.19663926944608978 - 0.17549804536847785 - 0.1747660797341959 - 0.1733985544939657 - 0.17204103363489412 - 0.18165752579382782 - 0.18835786592472062 - 0.18837179576029925 - 0.19741374109182327 - 0.18611000667673502 - 0.19663926944608978 - 0.17549804536847785 - 0.1747660797341959 - 0.1733985544939657 - 0.17204103363489412 - 0.18165752579382782 - 0.18835786592472062 - 0.18837179576029925 - 0.19741374109182327 - 0.18611000667673502 - 0.19663926944608978 - 0.17549804536847785 - 0.1747660797341959 - 0.1733985544939657 - 0.17204103363489412 - 0.18165752579382782 - 0.18835786592472062 - 0.18837179576029925 - 0.19741374109182327 - 0.18611000667673502 - 0.19663926944608978 - 0.17549804536847785 - 0.1747660797341959 - 0.1733985544939657 - 0.17204103363489412 - 0.18165752579382782 - 0.18835786592472062 - 0.18837179576029925 - 0.19741374109182327 - 0.18611000667673502 - 0.19663926944608978 - 0.17549804536847785 - 0.1747660797341959 - 0.1733985544939657 - 0.17204103363489412 - 0.18165752579382782 - 0.18835786592472062 - 0.18837179576029925 - 0.19741374109182327 - 0.18611000667673502 - 0.19663926944608978 - 0.17549804536847785 - 0.1747660797341959 - 0.1733985544939657 - 0.17204103363489412 - 0.18165752579382782 - 0.18835786592472062 - 0.18837179576029925 - 0.19741374109182327 - 0.18611000667673502 - 0.19663926944608978 - 0.17549804536847785 - 0.1747660797341959 - 0.1733985544939657 - 0.17204103363489412 - 0.18165752579382782 - 0.18835786592472062 - 0.18837179576029925 - 0.19741374109182327 - 0.18611000667673502 - 0.19663926944608978 - 0.17549804536847785 - 0.1747660797341959 - 0.1733985544939657 - 0.17204103363489412 - 0.18165752579382782 - 0.18835786592472062 - 0.18837179576029925 - 0.19741374109182327 - 0.18611000667673502 - 0.19663926944608978 - 0.17549804536847785 - 0.1747660797341959 - 0.1733985544939657 - 0.17204103363489412 - 0.18165752579382782 - 0.18835786592472062 - 0.18837179576029925 - 0.19741374109182327 - 0.18611000667673502 - 0.19663926944608978 - 0.17549804536847785 - 0.1747660797341959 - 0.1733985544939657 - 0.17204103363489412 - 0.18165752579382782 - 0.18835786592472062 - 0.18837179576029925 - 0.19741374109182327 - 0.18611000667673502 - 0.19663926944608978 - 0.17549804536847785 - 0.1747660797341959 - 0.1733985544939657 - 0.17204103363489412 - 0.18165752579382782 - 0.18835786592472062 - 0.18837179576029925 - 0.19741374109182327 - 0.18611000667673502 - 0.19663926944608978 - 0.17549804536847785 - 0.1747660797341959 - 0.1733985544939657 - 0.17204103363489412 - 0.18165752579382782 - 0.18835786592472062 - 0.18837179576029925 - 0.19741374109182327 - 0.18611000667673502 - 0.19663926944608978 - 0.17549804536847785 - 0.1747660797341959 - 0.1733985544939657 - 0.17204103363489412 - 0.18165752579382782 - 0.18835786592472062 - 0.18837179576029925 - 0.19741374109182327 - 0.18611000667673502 - 0.19663926944608978 - 0.17549804536847785 - 0.1747660797341959 - 0.1733985544939657 - 0.17204103363489412 - 0.18165752579382782 - 0.18835786592472062 - 0.18837179576029925 - 0.19741374109182327 - 0.18611000667673502 - 0.19663926944608978 - 0.17549804536847785 - 0.1747660797341959 - 0.1733985544939657 - 0.17204103363489412 - 0.18165752579382782 - 0.18835786592472062 - 0.18837179576029925 - 0.19741374109182327 - 0.18611000667673502 - 0.19663926944608978 - 0.17549804536847785 - 0.1747660797341959 - 0.1733985544939657 - 0.17204103363489412 - 0.18165752579382782 - 0.18835786592472062 - 0.18837179576029925 - 0.19741374109182327 - 0.18611000667673502 - 0.19663926944608978 - 0.17549804536847785 - 0.1747660797341959 - 0.1733985544939657 - 0.17204103363489412 - 0.18165752579382782 - 0.18835786592472062 - 0.18837179576029925 - 0.19741374109182327 - 0.18611000667673502 - 0.19663926944608978 - 0.17549804536847785 - 0.1747660797341959 - 0.1733985544939657 - 0.17204103363489412 - 0.18165752579382782 - 0.18835786592472062 - 0.18837179576029925 - 0.19741374109182327 - 0.18611000667673502 - 0.19663926944608978 - 0.17549804536847785 - 0.1747660797341959 - 0.1733985544939657 - 0.17204103363489412 - 0.18165752579382782 - 0.18835786592472062 - 0.18837179576029925 - 0.19741374109182327 - 0.18611000667673502 - 0.19663926944608978 - 0.17549804536847785 - 0.1747660797341959 - 0.1733985544939657 - 0.17204103363489412 - 0.18165752579382782 - 0.18835786592472062 - 0.18837179576029925 - 0.19741374109182327 - 0.18611000667673502 - 0.19663926944608978 - 0.17549804536847785 - 0.1747660797341959 - 0.1733985544939657 - 0.17204103363489412 - 0.18165752579382782 - 0.18835786592472062 - 0.18837179576029925 - 0.19741374109182327 - 0.18611000667673502 - 0.19663926944608978 - 0.17549804536847785 - 0.1747660797341959 - 0.1733985544939657 - 0.17204103363489412 - 0.18165752579382782 - 0.18835786592472062 - 0.18837179576029925 - 0.19741374109182327 - 0.18611000667673502 - 0.19663926944608978 - 0.17549804536847785 - 0.1747660797341959 - 0.1733985544939657 - 0.17204103363489412 - 0.18165752579382782 - 0.18835786592472062 - 0.18837179576029925 - 0.19741374109182327 - 0.18611000667673502 - 0.19663926944608978 - 0.17549804536847785 - 0.1747660797341959 - 0.1733985544939657 - 0.17204103363489412 - 0.18165752579382782 - 0.18835786592472062 - 0.18837179576029925 - 0.19741374109182327 - 0.18611000667673502 - 0.19663926944608978 - 0.17549804536847785 - 0.1747660797341959 - 0.1733985544939657 - 0.17204103363489412 - 0.18165752579382782 - 0.18835786592472062 - 0.18837179576029925 - 0.19741374109182327 - 0.18611000667673502 - 0.19663926944608978 - 0.17549804536847785 - 0.1747660797341959 - 0.1733985544939657 - 0.17204103363489412 - 0.18165752579382782 - 0.18835786592472062 - 0.18837179576029925 - 0.19741374109182327 - 0.18611000667673502 - 0.19663926944608978 - 0.17549804536847785 - 0.1747660797341959 - 0.1733985544939657 - 0.17204103363489412 - 0.18165752579382782 - 0.18835786592472062 - 0.18837179576029925 - 0.19741374109182327 - 0.18611000667673502 - 0.19663926944608978 - 0.17549804536847785 - 0.1747660797341959 - 0.1733985544939657 - 0.17204103363489412 - 0.18165752579382782 - 0.18835786592472062 - 0.18837179576029925 - 0.19741374109182327 - 0.18611000667673502 - 0.19663926944608978 - 0.17549804536847785 - 0.1747660797341959 - 0.1733985544939657 - 0.17204103363489412 - 0.18165752579382782 - 0.18835786592472062 - 0.18837179576029925 - 0.19741374109182327 - 0.18611000667673502 - 0.19663926944608978 - 0.17549804536847785 - 0.1747660797341959 - 0.1733985544939657 - 0.17204103363489412 - 0.18165752579382782 - 0.18835786592472062 - 0.18837179576029925 - 0.19741374109182327 - 0.18611000667673502 - 0.19663926944608978 - 0.17549804536847785 - 0.1747660797341959 - 0.1733985544939657 - 0.17204103363489412 - 0.18165752579382782 - 0.18835786592472062 - 0.18837179576029925 - 0.19741374109182327 - 0.18611000667673502 - 0.19663926944608978 - 0.17549804536847785 - 0.1747660797341959 - 0.1733985544939657 - 0.17204103363489412 - 0.18165752579382782 - 0.18835786592472062 - 0.18837179576029925 - 0.19741374109182327 - 0.18611000667673502 - 0.19663926944608978 - 0.17549804536847785 - 0.1747660797341959 - 0.1733985544939657 - 0.17204103363489412 - 0.18165752579382782 - 0.18835786592472062 - 0.18837179576029925 - 0.19741374109182327 - 0.18611000667673502 - 0.19663926944608978 - 0.17549804536847785 - 0.1747660797341959 - 0.1733985544939657 - 0.17204103363489412 - 0.18165752579382782 - 0.18835786592472062 - 0.18837179576029925 - 0.19741374109182327 - 0.18611000667673502 - 0.19663926944608978 - 0.17549804536847785 - 0.1747660797341959 - 0.1733985544939657 - 0.17204103363489412 - 0.18165752579382782 - 0.18835786592472062 - 0.18837179576029925 - 0.19741374109182327 - 0.18611000667673502 - 0.19663926944608978 - 0.17549804536847785 - 0.1747660797341959 - 0.1733985544939657 - 0.17204103363489412 - 0.18165752579382782 - 0.18835786592472062 - 0.18837179576029925 - 0.19741374109182327 - 0.18611000667673502 - 0.19663926944608978 - 0.17549804536847785 - 0.1747660797341959 - 0.1733985544939657 - 0.17204103363489412 - 0.18165752579382782 - 0.18835786592472062 - 0.18837179576029925 - 0.19741374109182327 - 0.18611000667673502 - 0.19663926944608978 - 0.17549804536847785 - 0.1747660797341959 - 0.1733985544939657 - 0.17204103363489412 - 0.18165752579382782 - 0.18835786592472062 - 0.18837179576029925 - 0.19741374109182327 - 0.18611000667673502 - 0.19663926944608978 - 0.17549804536847785 - 0.1747660797341959 - 0.1733985544939657 - 0.17204103363489412 - 0.18165752579382782 - 0.18835786592472062 - 0.18837179576029925 - 0.19741374109182327 - 0.18611000667673502 - 0.19663926944608978 - 0.17549804536847785 - 0.1747660797341959 - 0.1733985544939657 - 0.17204103363489412 - 0.18165752579382782 - 0.18835786592472062 - 0.18837179576029925 - 0.19741374109182327 - 0.18611000667673502 - 0.19663926944608978 - 0.17549804536847785 - 0.1747660797341959 - 0.1733985544939657 - 0.17204103363489412 - 0.18165752579382782 - 0.18835786592472062 - 0.18837179576029925 - 0.19741374109182327 - 0.18611000667673502 - 0.19663926944608978 - 0.17549804536847785 - 0.1747660797341959 - 0.1733985544939657 - 0.17204103363489412 - 0.18165752579382782 - 0.18835786592472062 - 0.18837179576029925 - 0.19741374109182327 - 0.18611000667673502 - 0.19663926944608978 - 0.17549804536847785 - 0.1747660797341959 - 0.1733985544939657 - 0.17204103363489412 - 0.18165752579382782 - 0.18835786592472062 - 0.18837179576029925 - 0.19741374109182327 - 0.18611000667673502 - 0.19663926944608978 - 0.17549804536847785 - 0.1747660797341959 - 0.1733985544939657 - 0.17204103363489412 - 0.18165752579382782 - 0.18835786592472062 - 0.18837179576029925 - 0.19741374109182327 - 0.18611000667673502 - 0.19663926944608978 - 0.17549804536847785 - 0.1747660797341959 - 0.1733985544939657 - 0.17204103363489412 - 0.18165752579382782 - 0.18835786592472062 - 0.18837179576029925 - 0.19741374109182327 - 0.18611000667673502 - 0.19663926944608978 - 0.17549804536847785 - 0.1747660797341959 - 0.1733985544939657 - 0.17204103363489412 - 0.18165752579382782 - 0.18835786592472062 - 0.18837179576029925 - 0.19741374109182327 - 0.18611000667673502 - 0.19663926944608978 - 0.17549804536847785 - 0.1747660797341959 - 0.1733985544939657 - 0.17204103363489412 - 0.18165752579382782 - 0.18835786592472062 - 0.18837179576029925 - 0.19741374109182327 - 0.18611000667673502 - 0.19663926944608978 - 0.17549804536847785 - 0.1747660797341959 - 0.1733985544939657 - 0.17204103363489412 - 0.18165752579382782 - 0.18835786592472062 - 0.18837179576029925 - 0.19741374109182327 - 0.18611000667673502 - 0.19663926944608978 - 0.17549804536847785 - 0.1747660797341959 - 0.1733985544939657 - 0.17204103363489412 - 0.18165752579382782 - 0.18835786592472062 - 0.18837179576029925 - 0.19741374109182327 - 0.18611000667673502 - 0.19663926944608978 - 0.17549804536847785 - 0.1747660797341959 - 0.1733985544939657 - 0.17204103363489412 - 0.18165752579382782 - 0.18835786592472062 - 0.18837179576029925 - 0.19741374109182327 - 0.18611000667673502 - 0.19663926944608978 - 0.17549804536847785 - 0.1747660797341959 - 0.1733985544939657 - 0.17204103363489412 - 0.18165752579382782 - 0.18835786592472062 - 0.18837179576029925 - 0.19741374109182327 - 0.18611000667673502 - 0.19663926944608978 - 0.17549804536847785 - 0.1747660797341959 - 0.1733985544939657 - 0.17204103363489412 - 0.18165752579382782 - 0.18835786592472062 - 0.18837179576029925 - 0.19741374109182327 - 0.18611000667673502 - 0.19663926944608978 - 0.17549804536847785 - 0.1747660797341959 - 0.1733985544939657 - 0.17204103363489412 - 0.18165752579382782 - 0.18835786592472062 - 0.18837179576029925 - 0.19741374109182327 - 0.18611000667673502 - 0.19663926944608978 - 0.17549804536847785 - 0.1747660797341959 - 0.1733985544939657 - 0.17204103363489412 - 0.18165752579382782 - 0.18835786592472062 - 0.18837179576029925 - 0.19741374109182327 - 0.18611000667673502 - 0.19663926944608978 - 0.17549804536847785 - 0.1747660797341959 - 0.1733985544939657 - 0.17204103363489412 - 0.18165752579382782 - 0.18835786592472062 - 0.18837179576029925 - 0.19741374109182327 - 0.18611000667673502 - 0.19663926944608978 - 0.17549804536847785 - 0.1747660797341959 - 0.1733985544939657 - 0.17204103363489412 - 0.18165752579382782 - 0.18835786592472062 - 0.18837179576029925 - 0.19741374109182327 - 0.18611000667673502 - 0.19663926944608978 - 0.17549804536847785 - 0.1747660797341959 - 0.1733985544939657 - 0.17204103363489412 - 0.18165752579382782 - 0.18835786592472062 - 0.18837179576029925 - 0.19741374109182327 - 0.18611000667673502 - 0.19663926944608978 - 0.17549804536847785 - 0.1747660797341959 - 0.1733985544939657 - 0.17204103363489412 - 0.18165752579382782 - 0.18835786592472062 - 0.18837179576029925 - 0.19741374109182327 - 0.18611000667673502 - 0.19663926944608978 - 0.17549804536847785 - 0.1747660797341959 - 0.1733985544939657 - 0.17204103363489412 - 0.18165752579382782 - 0.18835786592472062 - 0.18837179576029925 - 0.19741374109182327 - 0.18611000667673502 - 0.19663926944608978 - 0.17549804536847785 - 0.1747660797341959 - 0.1733985544939657 - 0.17204103363489412 - 0.18165752579382782 - 0.18835786592472062 - 0.18837179576029925 - 0.19741374109182327 - 0.18611000667673502 - 0.19663926944608978 - 0.17549804536847785 - 0.1747660797341959 - 0.1733985544939657 - 0.17204103363489412 - 0.18165752579382782 - 0.18835786592472062 - 0.18837179576029925 - 0.19741374109182327 - 0.18611000667673502 - 0.19663926944608978 - 0.17549804536847785 - 0.1747660797341959 - 0.1733985544939657 - 0.17204103363489412 - 0.18165752579382782 - 0.18835786592472062 - 0.18837179576029925 - 0.19741374109182327 - 0.18611000667673502 - 0.19663926944608978 - 0.17549804536847785 - 0.1747660797341959 - 0.1733985544939657 - 0.17204103363489412 - 0.18165752579382782 - 0.18835786592472062 - 0.18837179576029925 - 0.19741374109182327 - 0.18611000667673502 - 0.19663926944608978 - 0.17549804536847785 - 0.1747660797341959 - 0.1733985544939657 - 0.17204103363489412 - 0.18165752579382782 - 0.18835786592472062 - 0.18837179576029925 - 0.19741374109182327 - 0.18611000667673502 - 0.19663926944608978 - 0.17549804536847785 - 0.1747660797341959 - 0.1733985544939657 - 0.17204103363489412 - 0.18165752579382782 - 0.18835786592472062 - 0.18837179576029925 - 0.19741374109182327 - 0.18611000667673502 - 0.19663926944608978 - 0.17549804536847785 - 0.1747660797341959 - 0.1733985544939657 - 0.17204103363489412 - 0.18165752579382782 - 0.18835786592472062 - 0.18837179576029925 - 0.19741374109182327 - 0.18611000667673502 - 0.19663926944608978 - 0.17549804536847785 - 0.1747660797341959 - 0.1733985544939657 - 0.17204103363489412 - 0.18165752579382782 - 0.18835786592472062 - 0.18837179576029925 - 0.19741374109182327 - 0.18611000667673502 - 0.19663926944608978 - 0.17549804536847785 - 0.1747660797341959 - 0.1733985544939657 - 0.17204103363489412 - 0.18165752579382782 - 0.18835786592472062 - 0.18837179576029925 - 0.19741374109182327 - 0.18611000667673502 - 0.19663926944608978 - 0.17549804536847785 - 0.1747660797341959 - 0.1733985544939657 - 0.17204103363489412 - 0.18165752579382782 - 0.18835786592472062 - 0.18837179576029925 - 0.19741374109182327 - 0.18611000667673502 - 0.19663926944608978 - 0.17549804536847785 - 0.1747660797341959 - 0.1733985544939657 - 0.17204103363489412 - 0.18165752579382782 - 0.18835786592472062 - 0.18837179576029925 - 0.19741374109182327 - 0.18611000667673502 - task: type: Retrieval dataset: name: MTEB CQADupstackAndroidRetrieval type: mteb/cqadupstack-android config: default split: test revision: f46a197baaae43b4f621051089b82a364682dfeb metrics: - type: map_at_1 value: 19.709 - type: map_at_10 value: 26.522000000000002 - type: map_at_100 value: 27.613 - type: map_at_1000 value: 27.750999999999998 - type: map_at_20 value: 27.033 - type: map_at_3 value: 24.127000000000002 - type: map_at_5 value: 25.319000000000003 - type: mrr_at_1 value: 24.607 - type: mrr_at_10 value: 31.776 - type: mrr_at_100 value: 32.629999999999995 - type: mrr_at_1000 value: 32.699 - type: mrr_at_20 value: 32.23 - type: mrr_at_3 value: 29.423 - type: mrr_at_5 value: 30.703000000000003 - type: ndcg_at_1 value: 24.607 - type: ndcg_at_10 value: 31.311 - type: ndcg_at_100 value: 36.412 - type: ndcg_at_1000 value: 39.428999999999995 - type: ndcg_at_20 value: 32.793 - type: ndcg_at_3 value: 27.388 - type: ndcg_at_5 value: 28.899 - type: precision_at_1 value: 24.607 - type: precision_at_10 value: 5.951 - type: precision_at_100 value: 1.083 - type: precision_at_1000 value: 0.165 - type: precision_at_20 value: 3.5479999999999996 - type: precision_at_3 value: 12.971 - type: precision_at_5 value: 9.356 - type: recall_at_1 value: 19.709 - type: recall_at_10 value: 40.274 - type: recall_at_100 value: 62.926 - type: recall_at_1000 value: 83.54599999999999 - type: recall_at_20 value: 45.585 - type: recall_at_3 value: 28.587 - type: recall_at_5 value: 32.967999999999996 - task: type: Retrieval dataset: name: MTEB CQADupstackEnglishRetrieval type: mteb/cqadupstack-english config: default split: test revision: ad9991cb51e31e31e430383c75ffb2885547b5f0 metrics: - type: map_at_1 value: 11.749 - type: map_at_10 value: 15.958 - type: map_at_100 value: 16.694 - type: map_at_1000 value: 16.805 - type: map_at_20 value: 16.325 - type: map_at_3 value: 14.469000000000001 - type: map_at_5 value: 15.286 - type: mrr_at_1 value: 14.521999999999998 - type: mrr_at_10 value: 19.076999999999998 - type: mrr_at_100 value: 19.785 - type: mrr_at_1000 value: 19.863 - type: mrr_at_20 value: 19.451999999999998 - type: mrr_at_3 value: 17.419999999999998 - type: mrr_at_5 value: 18.379 - type: ndcg_at_1 value: 14.521999999999998 - type: ndcg_at_10 value: 18.944 - type: ndcg_at_100 value: 22.685 - type: ndcg_at_1000 value: 25.562 - type: ndcg_at_20 value: 20.169999999999998 - type: ndcg_at_3 value: 16.18 - type: ndcg_at_5 value: 17.476 - type: precision_at_1 value: 14.521999999999998 - type: precision_at_10 value: 3.5409999999999995 - type: precision_at_100 value: 0.679 - type: precision_at_1000 value: 0.11399999999999999 - type: precision_at_20 value: 2.185 - type: precision_at_3 value: 7.495 - type: precision_at_5 value: 5.541 - type: recall_at_1 value: 11.749 - type: recall_at_10 value: 24.759999999999998 - type: recall_at_100 value: 41.54 - type: recall_at_1000 value: 61.836 - type: recall_at_20 value: 29.252 - type: recall_at_3 value: 17.278 - type: recall_at_5 value: 20.57 - task: type: Retrieval dataset: name: MTEB CQADupstackGamingRetrieval type: mteb/cqadupstack-gaming config: default split: test revision: 4885aa143210c98657558c04aaf3dc47cfb54340 metrics: - type: map_at_1 value: 19.827 - type: map_at_10 value: 27.417 - type: map_at_100 value: 28.383000000000003 - type: map_at_1000 value: 28.483000000000004 - type: map_at_20 value: 27.901999999999997 - type: map_at_3 value: 25.3 - type: map_at_5 value: 26.432 - type: mrr_at_1 value: 22.947 - type: mrr_at_10 value: 30.279 - type: mrr_at_100 value: 31.1 - type: mrr_at_1000 value: 31.171 - type: mrr_at_20 value: 30.714000000000002 - type: mrr_at_3 value: 28.37 - type: mrr_at_5 value: 29.37 - type: ndcg_at_1 value: 22.947 - type: ndcg_at_10 value: 31.793 - type: ndcg_at_100 value: 36.571999999999996 - type: ndcg_at_1000 value: 39.106 - type: ndcg_at_20 value: 33.376 - type: ndcg_at_3 value: 27.872000000000003 - type: ndcg_at_5 value: 29.601 - type: precision_at_1 value: 22.947 - type: precision_at_10 value: 5.3420000000000005 - type: precision_at_100 value: 0.856 - type: precision_at_1000 value: 0.116 - type: precision_at_20 value: 3.107 - type: precision_at_3 value: 12.684999999999999 - type: precision_at_5 value: 8.790000000000001 - type: recall_at_1 value: 19.827 - type: recall_at_10 value: 42.191 - type: recall_at_100 value: 64.307 - type: recall_at_1000 value: 83.161 - type: recall_at_20 value: 48.046 - type: recall_at_3 value: 31.352999999999998 - type: recall_at_5 value: 35.783 - task: type: Retrieval dataset: name: MTEB CQADupstackGisRetrieval type: mteb/cqadupstack-gis config: default split: test revision: 5003b3064772da1887988e05400cf3806fe491f2 metrics: - type: map_at_1 value: 11.802 - type: map_at_10 value: 15.799 - type: map_at_100 value: 16.53 - type: map_at_1000 value: 16.638 - type: map_at_20 value: 16.161 - type: map_at_3 value: 14.495 - type: map_at_5 value: 15.128 - type: mrr_at_1 value: 12.655 - type: mrr_at_10 value: 17.03 - type: mrr_at_100 value: 17.785999999999998 - type: mrr_at_1000 value: 17.88 - type: mrr_at_20 value: 17.416 - type: mrr_at_3 value: 15.65 - type: mrr_at_5 value: 16.305 - type: ndcg_at_1 value: 12.655 - type: ndcg_at_10 value: 18.411 - type: ndcg_at_100 value: 22.547 - type: ndcg_at_1000 value: 25.685999999999996 - type: ndcg_at_20 value: 19.732 - type: ndcg_at_3 value: 15.713 - type: ndcg_at_5 value: 16.821 - type: precision_at_1 value: 12.655 - type: precision_at_10 value: 2.904 - type: precision_at_100 value: 0.525 - type: precision_at_1000 value: 0.083 - type: precision_at_20 value: 1.7399999999999998 - type: precision_at_3 value: 6.6290000000000004 - type: precision_at_5 value: 4.655 - type: recall_at_1 value: 11.802 - type: recall_at_10 value: 25.373 - type: recall_at_100 value: 45.462 - type: recall_at_1000 value: 69.98299999999999 - type: recall_at_20 value: 30.455 - type: recall_at_3 value: 17.941 - type: recall_at_5 value: 20.61 - task: type: Retrieval dataset: name: MTEB CQADupstackMathematicaRetrieval type: mteb/cqadupstack-mathematica config: default split: test revision: 90fceea13679c63fe563ded68f3b6f06e50061de metrics: - type: map_at_1 value: 6.6739999999999995 - type: map_at_10 value: 10.181 - type: map_at_100 value: 11.138 - type: map_at_1000 value: 11.258 - type: map_at_20 value: 10.673 - type: map_at_3 value: 8.997 - type: map_at_5 value: 9.587 - type: mrr_at_1 value: 8.209 - type: mrr_at_10 value: 12.356 - type: mrr_at_100 value: 13.370000000000001 - type: mrr_at_1000 value: 13.466000000000001 - type: mrr_at_20 value: 12.889000000000001 - type: mrr_at_3 value: 10.821 - type: mrr_at_5 value: 11.604000000000001 - type: ndcg_at_1 value: 8.209 - type: ndcg_at_10 value: 12.849 - type: ndcg_at_100 value: 17.916 - type: ndcg_at_1000 value: 21.192 - type: ndcg_at_20 value: 14.643 - type: ndcg_at_3 value: 10.299 - type: ndcg_at_5 value: 11.350999999999999 - type: precision_at_1 value: 8.209 - type: precision_at_10 value: 2.5 - type: precision_at_100 value: 0.577 - type: precision_at_1000 value: 0.099 - type: precision_at_20 value: 1.667 - type: precision_at_3 value: 5.017 - type: precision_at_5 value: 3.7560000000000002 - type: recall_at_1 value: 6.6739999999999995 - type: recall_at_10 value: 19.016 - type: recall_at_100 value: 41.806 - type: recall_at_1000 value: 65.605 - type: recall_at_20 value: 25.764 - type: recall_at_3 value: 12.030000000000001 - type: recall_at_5 value: 14.568 - task: type: Retrieval dataset: name: MTEB CQADupstackPhysicsRetrieval type: mteb/cqadupstack-physics config: default split: test revision: 79531abbd1fb92d06c6d6315a0cbbbf5bb247ea4 metrics: - type: map_at_1 value: 12.133 - type: map_at_10 value: 17.32 - type: map_at_100 value: 18.294 - type: map_at_1000 value: 18.404 - type: map_at_20 value: 17.804000000000002 - type: map_at_3 value: 15.626000000000001 - type: map_at_5 value: 16.572 - type: mrr_at_1 value: 15.399 - type: mrr_at_10 value: 21.054000000000002 - type: mrr_at_100 value: 21.951999999999998 - type: mrr_at_1000 value: 22.03 - type: mrr_at_20 value: 21.522 - type: mrr_at_3 value: 19.297 - type: mrr_at_5 value: 20.294 - type: ndcg_at_1 value: 15.399 - type: ndcg_at_10 value: 21.02 - type: ndcg_at_100 value: 25.978 - type: ndcg_at_1000 value: 28.803 - type: ndcg_at_20 value: 22.642 - type: ndcg_at_3 value: 17.864 - type: ndcg_at_5 value: 19.335 - type: precision_at_1 value: 15.399 - type: precision_at_10 value: 3.9079999999999995 - type: precision_at_100 value: 0.781 - type: precision_at_1000 value: 0.12 - type: precision_at_20 value: 2.493 - type: precision_at_3 value: 8.502 - type: precision_at_5 value: 6.16 - type: recall_at_1 value: 12.133 - type: recall_at_10 value: 28.753 - type: recall_at_100 value: 50.806 - type: recall_at_1000 value: 70.75399999999999 - type: recall_at_20 value: 34.485 - type: recall_at_3 value: 19.664 - type: recall_at_5 value: 23.566000000000003 - task: type: Retrieval dataset: name: MTEB CQADupstackProgrammersRetrieval type: mteb/cqadupstack-programmers config: default split: test revision: 6184bc1440d2dbc7612be22b50686b8826d22b32 metrics: - type: map_at_1 value: 9.555 - type: map_at_10 value: 13.553 - type: map_at_100 value: 14.438 - type: map_at_1000 value: 14.562 - type: map_at_20 value: 13.977999999999998 - type: map_at_3 value: 12.118 - type: map_at_5 value: 12.811 - type: mrr_at_1 value: 11.872 - type: mrr_at_10 value: 16.613 - type: mrr_at_100 value: 17.512 - type: mrr_at_1000 value: 17.607 - type: mrr_at_20 value: 17.108 - type: mrr_at_3 value: 15.068000000000001 - type: mrr_at_5 value: 15.839 - type: ndcg_at_1 value: 11.872 - type: ndcg_at_10 value: 16.556 - type: ndcg_at_100 value: 21.34 - type: ndcg_at_1000 value: 24.903 - type: ndcg_at_20 value: 18.102 - type: ndcg_at_3 value: 13.844000000000001 - type: ndcg_at_5 value: 14.893999999999998 - type: precision_at_1 value: 11.872 - type: precision_at_10 value: 3.082 - type: precision_at_100 value: 0.658 - type: precision_at_1000 value: 0.11299999999999999 - type: precision_at_20 value: 1.992 - type: precision_at_3 value: 6.544999999999999 - type: precision_at_5 value: 4.68 - type: recall_at_1 value: 9.555 - type: recall_at_10 value: 22.931 - type: recall_at_100 value: 44.535000000000004 - type: recall_at_1000 value: 70.77799999999999 - type: recall_at_20 value: 28.403 - type: recall_at_3 value: 15.201 - type: recall_at_5 value: 18.145 - task: type: Retrieval dataset: name: MTEB CQADupstackRetrieval type: mteb/cqadupstack config: default split: test revision: 4ffe81d471b1924886b33c7567bfb200e9eec5c4 metrics: - type: map_at_1 value: 11.476083333333333 - type: map_at_10 value: 16.002499999999998 - type: map_at_100 value: 16.875083333333333 - type: map_at_1000 value: 16.991916666666665 - type: map_at_20 value: 16.445416666666667 - type: map_at_3 value: 14.473666666666668 - type: map_at_5 value: 15.269583333333333 - type: mrr_at_1 value: 13.799083333333334 - type: mrr_at_10 value: 18.69941666666667 - type: mrr_at_100 value: 19.54075 - type: mrr_at_1000 value: 19.62791666666667 - type: mrr_at_20 value: 19.15166666666667 - type: mrr_at_3 value: 17.079666666666665 - type: mrr_at_5 value: 17.93583333333333 - type: ndcg_at_1 value: 13.799083333333334 - type: ndcg_at_10 value: 19.157583333333335 - type: ndcg_at_100 value: 23.675666666666668 - type: ndcg_at_1000 value: 26.761499999999998 - type: ndcg_at_20 value: 20.688416666666665 - type: ndcg_at_3 value: 16.23775 - type: ndcg_at_5 value: 17.494500000000002 - type: precision_at_1 value: 13.799083333333334 - type: precision_at_10 value: 3.449666666666667 - type: precision_at_100 value: 0.6782499999999999 - type: precision_at_1000 value: 0.11108333333333333 - type: precision_at_20 value: 2.1610833333333335 - type: precision_at_3 value: 7.496333333333332 - type: precision_at_5 value: 5.4156666666666675 - type: recall_at_1 value: 11.476083333333333 - type: recall_at_10 value: 26.132916666666667 - type: recall_at_100 value: 46.88099999999999 - type: recall_at_1000 value: 69.47425 - type: recall_at_20 value: 31.838583333333336 - type: recall_at_3 value: 17.943749999999998 - type: recall_at_5 value: 21.176833333333335 - task: type: Retrieval dataset: name: MTEB CQADupstackStatsRetrieval type: mteb/cqadupstack-stats config: default split: test revision: 65ac3a16b8e91f9cee4c9828cc7c335575432a2a metrics: - type: map_at_1 value: 10.166 - type: map_at_10 value: 13.980999999999998 - type: map_at_100 value: 14.728 - type: map_at_1000 value: 14.812 - type: map_at_20 value: 14.338000000000001 - type: map_at_3 value: 12.5 - type: map_at_5 value: 13.408000000000001 - type: mrr_at_1 value: 11.503 - type: mrr_at_10 value: 15.799 - type: mrr_at_100 value: 16.539 - type: mrr_at_1000 value: 16.614 - type: mrr_at_20 value: 16.155 - type: mrr_at_3 value: 14.213000000000001 - type: mrr_at_5 value: 15.201999999999998 - type: ndcg_at_1 value: 11.503 - type: ndcg_at_10 value: 16.647000000000002 - type: ndcg_at_100 value: 20.84 - type: ndcg_at_1000 value: 23.385 - type: ndcg_at_20 value: 17.93 - type: ndcg_at_3 value: 13.761999999999999 - type: ndcg_at_5 value: 15.311 - type: precision_at_1 value: 11.503 - type: precision_at_10 value: 2.7449999999999997 - type: precision_at_100 value: 0.541 - type: precision_at_1000 value: 0.082 - type: precision_at_20 value: 1.6789999999999998 - type: precision_at_3 value: 6.033 - type: precision_at_5 value: 4.5089999999999995 - type: recall_at_1 value: 10.166 - type: recall_at_10 value: 23.284 - type: recall_at_100 value: 43.224000000000004 - type: recall_at_1000 value: 62.856 - type: recall_at_20 value: 28.166000000000004 - type: recall_at_3 value: 15.396 - type: recall_at_5 value: 19.248 - task: type: Retrieval dataset: name: MTEB CQADupstackTexRetrieval type: mteb/cqadupstack-tex config: default split: test revision: 46989137a86843e03a6195de44b09deda022eec7 metrics: - type: map_at_1 value: 6.516 - type: map_at_10 value: 9.185 - type: map_at_100 value: 9.795 - type: map_at_1000 value: 9.902 - type: map_at_20 value: 9.508999999999999 - type: map_at_3 value: 8.245 - type: map_at_5 value: 8.724 - type: mrr_at_1 value: 8.121 - type: mrr_at_10 value: 11.228 - type: mrr_at_100 value: 11.885 - type: mrr_at_1000 value: 11.978 - type: mrr_at_20 value: 11.583 - type: mrr_at_3 value: 10.145999999999999 - type: mrr_at_5 value: 10.688 - type: ndcg_at_1 value: 8.121 - type: ndcg_at_10 value: 11.245 - type: ndcg_at_100 value: 14.524999999999999 - type: ndcg_at_1000 value: 17.62 - type: ndcg_at_20 value: 12.385 - type: ndcg_at_3 value: 9.429 - type: ndcg_at_5 value: 10.181999999999999 - type: precision_at_1 value: 8.121 - type: precision_at_10 value: 2.137 - type: precision_at_100 value: 0.451 - type: precision_at_1000 value: 0.08499999999999999 - type: precision_at_20 value: 1.387 - type: precision_at_3 value: 4.4510000000000005 - type: precision_at_5 value: 3.2620000000000005 - type: recall_at_1 value: 6.516 - type: recall_at_10 value: 15.456 - type: recall_at_100 value: 30.709999999999997 - type: recall_at_1000 value: 53.854 - type: recall_at_20 value: 19.756 - type: recall_at_3 value: 10.41 - type: recall_at_5 value: 12.317 - task: type: Retrieval dataset: name: MTEB CQADupstackUnixRetrieval type: mteb/cqadupstack-unix config: default split: test revision: 6c6430d3a6d36f8d2a829195bc5dc94d7e063e53 metrics: - type: map_at_1 value: 10.955 - type: map_at_10 value: 14.689 - type: map_at_100 value: 15.482000000000001 - type: map_at_1000 value: 15.614 - type: map_at_20 value: 15.085 - type: map_at_3 value: 13.318 - type: map_at_5 value: 13.950999999999999 - type: mrr_at_1 value: 13.34 - type: mrr_at_10 value: 17.514 - type: mrr_at_100 value: 18.3 - type: mrr_at_1000 value: 18.406 - type: mrr_at_20 value: 17.924 - type: mrr_at_3 value: 15.920000000000002 - type: mrr_at_5 value: 16.625 - type: ndcg_at_1 value: 13.34 - type: ndcg_at_10 value: 17.574 - type: ndcg_at_100 value: 21.909 - type: ndcg_at_1000 value: 25.402 - type: ndcg_at_20 value: 19.017 - type: ndcg_at_3 value: 14.75 - type: ndcg_at_5 value: 15.787999999999998 - type: precision_at_1 value: 13.34 - type: precision_at_10 value: 3.041 - type: precision_at_100 value: 0.599 - type: precision_at_1000 value: 0.1 - type: precision_at_20 value: 1.908 - type: precision_at_3 value: 6.529999999999999 - type: precision_at_5 value: 4.646 - type: recall_at_1 value: 10.955 - type: recall_at_10 value: 23.831 - type: recall_at_100 value: 43.747 - type: recall_at_1000 value: 69.327 - type: recall_at_20 value: 29.17 - type: recall_at_3 value: 16.165 - type: recall_at_5 value: 18.701 - task: type: Retrieval dataset: name: MTEB CQADupstackWebmastersRetrieval type: mteb/cqadupstack-webmasters config: default split: test revision: 160c094312a0e1facb97e55eeddb698c0abe3571 metrics: - type: map_at_1 value: 11.936 - type: map_at_10 value: 16.878 - type: map_at_100 value: 17.921 - type: map_at_1000 value: 18.093 - type: map_at_20 value: 17.468 - type: map_at_3 value: 15.21 - type: map_at_5 value: 16.056 - type: mrr_at_1 value: 15.02 - type: mrr_at_10 value: 20.023 - type: mrr_at_100 value: 20.965 - type: mrr_at_1000 value: 21.060000000000002 - type: mrr_at_20 value: 20.576 - type: mrr_at_3 value: 18.215 - type: mrr_at_5 value: 19.134 - type: ndcg_at_1 value: 15.02 - type: ndcg_at_10 value: 20.459 - type: ndcg_at_100 value: 25.163999999999998 - type: ndcg_at_1000 value: 28.811999999999998 - type: ndcg_at_20 value: 22.387 - type: ndcg_at_3 value: 17.265 - type: ndcg_at_5 value: 18.605 - type: precision_at_1 value: 15.02 - type: precision_at_10 value: 3.9530000000000003 - type: precision_at_100 value: 0.8659999999999999 - type: precision_at_1000 value: 0.173 - type: precision_at_20 value: 2.619 - type: precision_at_3 value: 8.169 - type: precision_at_5 value: 6.047000000000001 - type: recall_at_1 value: 11.936 - type: recall_at_10 value: 27.694999999999997 - type: recall_at_100 value: 49.159000000000006 - type: recall_at_1000 value: 74.134 - type: recall_at_20 value: 35.258 - type: recall_at_3 value: 18.54 - type: recall_at_5 value: 21.959 - task: type: Retrieval dataset: name: MTEB CQADupstackWordpressRetrieval type: mteb/cqadupstack-wordpress config: default split: test revision: 4ffe81d471b1924886b33c7567bfb200e9eec5c4 metrics: - type: map_at_1 value: 6.691 - type: map_at_10 value: 10.546999999999999 - type: map_at_100 value: 11.485 - type: map_at_1000 value: 11.581 - type: map_at_20 value: 11.068999999999999 - type: map_at_3 value: 9.279 - type: map_at_5 value: 9.961 - type: mrr_at_1 value: 7.394 - type: mrr_at_10 value: 11.644 - type: mrr_at_100 value: 12.665000000000001 - type: mrr_at_1000 value: 12.761 - type: mrr_at_20 value: 12.251 - type: mrr_at_3 value: 10.413 - type: mrr_at_5 value: 11.087 - type: ndcg_at_1 value: 7.394 - type: ndcg_at_10 value: 13.081999999999999 - type: ndcg_at_100 value: 18.22 - type: ndcg_at_1000 value: 21.238 - type: ndcg_at_20 value: 15.084 - type: ndcg_at_3 value: 10.487 - type: ndcg_at_5 value: 11.671 - type: precision_at_1 value: 7.394 - type: precision_at_10 value: 2.292 - type: precision_at_100 value: 0.523 - type: precision_at_1000 value: 0.083 - type: precision_at_20 value: 1.608 - type: precision_at_3 value: 4.929 - type: precision_at_5 value: 3.5860000000000003 - type: recall_at_1 value: 6.691 - type: recall_at_10 value: 20.031 - type: recall_at_100 value: 44.35 - type: recall_at_1000 value: 67.857 - type: recall_at_20 value: 27.723 - type: recall_at_3 value: 12.76 - type: recall_at_5 value: 15.687000000000001 - task: type: Retrieval dataset: name: MTEB ClimateFEVER type: mteb/climate-fever config: default split: test revision: 47f2ac6acb640fc46020b02a5b59fdda04d39380 metrics: - type: map_at_1 value: 3.218 - type: map_at_10 value: 5.554 - type: map_at_100 value: 6.216 - type: map_at_1000 value: 6.338000000000001 - type: map_at_20 value: 5.907 - type: map_at_3 value: 4.707 - type: map_at_5 value: 5.094 - type: mrr_at_1 value: 6.84 - type: mrr_at_10 value: 11.296000000000001 - type: mrr_at_100 value: 12.224 - type: mrr_at_1000 value: 12.31 - type: mrr_at_20 value: 11.791 - type: mrr_at_3 value: 9.609 - type: mrr_at_5 value: 10.404 - type: ndcg_at_1 value: 6.84 - type: ndcg_at_10 value: 8.346 - type: ndcg_at_100 value: 12.06 - type: ndcg_at_1000 value: 15.132000000000001 - type: ndcg_at_20 value: 9.652 - type: ndcg_at_3 value: 6.489000000000001 - type: ndcg_at_5 value: 7.045999999999999 - type: precision_at_1 value: 6.84 - type: precision_at_10 value: 2.658 - type: precision_at_100 value: 0.655 - type: precision_at_1000 value: 0.121 - type: precision_at_20 value: 1.863 - type: precision_at_3 value: 4.691 - type: precision_at_5 value: 3.6479999999999997 - type: recall_at_1 value: 3.218 - type: recall_at_10 value: 10.725 - type: recall_at_100 value: 24.131 - type: recall_at_1000 value: 42.106 - type: recall_at_20 value: 14.539 - type: recall_at_3 value: 6.3020000000000005 - type: recall_at_5 value: 7.763000000000001 - task: type: Retrieval dataset: name: MTEB DBPedia type: mteb/dbpedia config: default split: test revision: c0f706b76e590d620bd6618b3ca8efdd34e2d659 metrics: - type: map_at_1 value: 4.506 - type: map_at_10 value: 8.535 - type: map_at_100 value: 11.072 - type: map_at_1000 value: 11.764 - type: map_at_20 value: 9.492 - type: map_at_3 value: 6.697 - type: map_at_5 value: 7.452 - type: mrr_at_1 value: 36.75 - type: mrr_at_10 value: 46.35 - type: mrr_at_100 value: 47.034 - type: mrr_at_1000 value: 47.08 - type: mrr_at_20 value: 46.784 - type: mrr_at_3 value: 44.0 - type: mrr_at_5 value: 45.262 - type: ndcg_at_1 value: 29.25 - type: ndcg_at_10 value: 21.318 - type: ndcg_at_100 value: 23.449 - type: ndcg_at_1000 value: 29.267 - type: ndcg_at_20 value: 20.735 - type: ndcg_at_3 value: 24.45 - type: ndcg_at_5 value: 22.637999999999998 - type: precision_at_1 value: 36.75 - type: precision_at_10 value: 16.775000000000002 - type: precision_at_100 value: 5.212 - type: precision_at_1000 value: 1.167 - type: precision_at_20 value: 12.225 - type: precision_at_3 value: 26.917 - type: precision_at_5 value: 22.0 - type: recall_at_1 value: 4.506 - type: recall_at_10 value: 12.341000000000001 - type: recall_at_100 value: 26.723000000000003 - type: recall_at_1000 value: 46.293 - type: recall_at_20 value: 15.903 - type: recall_at_3 value: 7.994999999999999 - type: recall_at_5 value: 9.407 - task: type: Classification dataset: name: MTEB EmotionClassification type: mteb/emotion config: default split: test revision: 4f58c6b202a23cf9a4da393831edf4f9183cad37 metrics: - type: accuracy value: 44.375 - type: f1 value: 39.487258967288 - task: type: Retrieval dataset: name: MTEB FEVER type: mteb/fever config: default split: test revision: bea83ef9e8fb933d90a2f1d5515737465d613e12 metrics: - type: map_at_1 value: 16.572 - type: map_at_10 value: 22.349 - type: map_at_100 value: 23.145 - type: map_at_1000 value: 23.22 - type: map_at_20 value: 22.771 - type: map_at_3 value: 20.326 - type: map_at_5 value: 21.404 - type: mrr_at_1 value: 17.657 - type: mrr_at_10 value: 23.679 - type: mrr_at_100 value: 24.504 - type: mrr_at_1000 value: 24.576999999999998 - type: mrr_at_20 value: 24.122 - type: mrr_at_3 value: 21.557000000000002 - type: mrr_at_5 value: 22.695 - type: ndcg_at_1 value: 17.657 - type: ndcg_at_10 value: 26.081 - type: ndcg_at_100 value: 30.366 - type: ndcg_at_1000 value: 32.607 - type: ndcg_at_20 value: 27.608 - type: ndcg_at_3 value: 21.85 - type: ndcg_at_5 value: 23.796999999999997 - type: precision_at_1 value: 17.657 - type: precision_at_10 value: 3.968 - type: precision_at_100 value: 0.626 - type: precision_at_1000 value: 0.083 - type: precision_at_20 value: 2.3120000000000003 - type: precision_at_3 value: 8.951 - type: precision_at_5 value: 6.4 - type: recall_at_1 value: 16.572 - type: recall_at_10 value: 36.634 - type: recall_at_100 value: 57.135000000000005 - type: recall_at_1000 value: 74.832 - type: recall_at_20 value: 42.491 - type: recall_at_3 value: 25.087 - type: recall_at_5 value: 29.744999999999997 - task: type: Retrieval dataset: name: MTEB FiQA2018 type: mteb/fiqa config: default split: test revision: 27a168819829fe9bcd655c2df245fb19452e8e06 metrics: - type: map_at_1 value: 4.891 - type: map_at_10 value: 8.346 - type: map_at_100 value: 9.286 - type: map_at_1000 value: 9.465 - type: map_at_20 value: 8.826 - type: map_at_3 value: 7.13 - type: map_at_5 value: 7.643999999999999 - type: mrr_at_1 value: 10.030999999999999 - type: mrr_at_10 value: 14.899000000000001 - type: mrr_at_100 value: 15.82 - type: mrr_at_1000 value: 15.931000000000001 - type: mrr_at_20 value: 15.408 - type: mrr_at_3 value: 13.169 - type: mrr_at_5 value: 13.971 - type: ndcg_at_1 value: 10.030999999999999 - type: ndcg_at_10 value: 11.713 - type: ndcg_at_100 value: 16.436999999999998 - type: ndcg_at_1000 value: 20.971999999999998 - type: ndcg_at_20 value: 13.341 - type: ndcg_at_3 value: 9.879999999999999 - type: ndcg_at_5 value: 10.249 - type: precision_at_1 value: 10.030999999999999 - type: precision_at_10 value: 3.519 - type: precision_at_100 value: 0.8330000000000001 - type: precision_at_1000 value: 0.16 - type: precision_at_20 value: 2.377 - type: precision_at_3 value: 6.687 - type: precision_at_5 value: 5.0 - type: recall_at_1 value: 4.891 - type: recall_at_10 value: 15.221000000000002 - type: recall_at_100 value: 33.432 - type: recall_at_1000 value: 62.475 - type: recall_at_20 value: 20.467 - type: recall_at_3 value: 9.393 - type: recall_at_5 value: 11.214 - task: type: Retrieval dataset: name: MTEB HotpotQA type: mteb/hotpotqa config: default split: test revision: ab518f4d6fcca38d87c25209f94beba119d02014 metrics: - type: map_at_1 value: 22.856 - type: map_at_10 value: 30.656 - type: map_at_100 value: 31.447000000000003 - type: map_at_1000 value: 31.545 - type: map_at_20 value: 31.066 - type: map_at_3 value: 28.692 - type: map_at_5 value: 29.817 - type: mrr_at_1 value: 45.712 - type: mrr_at_10 value: 52.481 - type: mrr_at_100 value: 53.049 - type: mrr_at_1000 value: 53.09 - type: mrr_at_20 value: 52.803999999999995 - type: mrr_at_3 value: 50.709 - type: mrr_at_5 value: 51.795 - type: ndcg_at_1 value: 45.712 - type: ndcg_at_10 value: 38.381 - type: ndcg_at_100 value: 41.965 - type: ndcg_at_1000 value: 44.234 - type: ndcg_at_20 value: 39.657 - type: ndcg_at_3 value: 34.776 - type: ndcg_at_5 value: 36.622 - type: precision_at_1 value: 45.712 - type: precision_at_10 value: 8.062999999999999 - type: precision_at_100 value: 1.094 - type: precision_at_1000 value: 0.13999999999999999 - type: precision_at_20 value: 4.443 - type: precision_at_3 value: 21.476 - type: precision_at_5 value: 14.35 - type: recall_at_1 value: 22.856 - type: recall_at_10 value: 40.317 - type: recall_at_100 value: 54.705999999999996 - type: recall_at_1000 value: 69.892 - type: recall_at_20 value: 44.429 - type: recall_at_3 value: 32.214999999999996 - type: recall_at_5 value: 35.874 - task: type: Classification dataset: name: MTEB ImdbClassification type: mteb/imdb config: default split: test revision: 3d86128a09e091d6018b6d26cad27f2739fc2db7 metrics: - type: accuracy value: 73.02000000000001 - type: ap value: 67.25944041954726 - type: f1 value: 72.8697134997555 - task: type: Retrieval dataset: name: MTEB MSMARCO type: mteb/msmarco config: default split: dev revision: c5a29a104738b98a9e76336939199e264163d4a0 metrics: - type: map_at_1 value: 8.751000000000001 - type: map_at_10 value: 13.916999999999998 - type: map_at_100 value: 14.684 - type: map_at_1000 value: 14.766000000000002 - type: map_at_20 value: 14.338999999999999 - type: map_at_3 value: 12.197 - type: map_at_5 value: 13.163 - type: mrr_at_1 value: 8.911 - type: mrr_at_10 value: 14.198 - type: mrr_at_100 value: 14.960999999999999 - type: mrr_at_1000 value: 15.040000000000001 - type: mrr_at_20 value: 14.616999999999999 - type: mrr_at_3 value: 12.452 - type: mrr_at_5 value: 13.427 - type: ndcg_at_1 value: 8.911 - type: ndcg_at_10 value: 16.963 - type: ndcg_at_100 value: 21.062 - type: ndcg_at_1000 value: 23.543 - type: ndcg_at_20 value: 18.482000000000003 - type: ndcg_at_3 value: 13.391 - type: ndcg_at_5 value: 15.139 - type: precision_at_1 value: 8.911 - type: precision_at_10 value: 2.741 - type: precision_at_100 value: 0.485 - type: precision_at_1000 value: 0.06999999999999999 - type: precision_at_20 value: 1.683 - type: precision_at_3 value: 5.688 - type: precision_at_5 value: 4.3069999999999995 - type: recall_at_1 value: 8.751000000000001 - type: recall_at_10 value: 26.368000000000002 - type: recall_at_100 value: 46.22 - type: recall_at_1000 value: 66.22 - type: recall_at_20 value: 32.291 - type: recall_at_3 value: 16.595 - type: recall_at_5 value: 20.802 - task: type: Classification dataset: name: MTEB MTOPDomainClassification (en) type: mteb/mtop_domain config: en split: test revision: d80d48c1eb48d3562165c59d59d0034df9fff0bf metrics: - type: accuracy value: 89.87232102143183 - type: f1 value: 89.25570902684863 - task: type: Classification dataset: name: MTEB MTOPIntentClassification (en) type: mteb/mtop_intent config: en split: test revision: ae001d0e6b1228650b7bd1c2c65fb50ad11a8aba metrics: - type: accuracy value: 71.02599179206568 - type: f1 value: 52.14883678941826 - task: type: Classification dataset: name: MTEB MassiveIntentClassification (en) type: mteb/amazon_massive_intent config: en split: test revision: 31efe3c427b0bae9c22cbb560b8f15491cc6bed7 metrics: - type: accuracy value: 67.74714189643576 - type: f1 value: 65.4738868705899 - task: type: Classification dataset: name: MTEB MassiveScenarioClassification (en) type: mteb/amazon_massive_scenario config: en split: test revision: 7d571f92784cd94a019292a1f45445077d0ef634 metrics: - type: accuracy value: 72.36381977135171 - type: f1 value: 71.5956356866047 - task: type: Clustering dataset: name: MTEB MedrxivClusteringP2P type: mteb/medrxiv-clustering-p2p config: default split: test revision: e7a26af6f3ae46b30dde8737f02c07b1505bcc73 metrics: - type: v_measure value: 27.418721421866266 - type: v_measures value: - 0.25699019421325164 - 0.2551070596948231 - 0.2691672146325009 - 0.263190709241409 - 0.25833683058459567 - 0.2969925236078273 - 0.2799007926692717 - 0.29259126151386433 - 0.2840268235473181 - 0.2855687324817643 - 0.25699019421325164 - 0.2551070596948231 - 0.2691672146325009 - 0.263190709241409 - 0.25833683058459567 - 0.2969925236078273 - 0.2799007926692717 - 0.29259126151386433 - 0.2840268235473181 - 0.2855687324817643 - 0.25699019421325164 - 0.2551070596948231 - 0.2691672146325009 - 0.263190709241409 - 0.25833683058459567 - 0.2969925236078273 - 0.2799007926692717 - 0.29259126151386433 - 0.2840268235473181 - 0.2855687324817643 - 0.25699019421325164 - 0.2551070596948231 - 0.2691672146325009 - 0.263190709241409 - 0.25833683058459567 - 0.2969925236078273 - 0.2799007926692717 - 0.29259126151386433 - 0.2840268235473181 - 0.2855687324817643 - 0.25699019421325164 - 0.2551070596948231 - 0.2691672146325009 - 0.263190709241409 - 0.25833683058459567 - 0.2969925236078273 - 0.2799007926692717 - 0.29259126151386433 - 0.2840268235473181 - 0.2855687324817643 - 0.25699019421325164 - 0.2551070596948231 - 0.2691672146325009 - 0.263190709241409 - 0.25833683058459567 - 0.2969925236078273 - 0.2799007926692717 - 0.29259126151386433 - 0.2840268235473181 - 0.2855687324817643 - 0.25699019421325164 - 0.2551070596948231 - 0.2691672146325009 - 0.263190709241409 - 0.25833683058459567 - 0.2969925236078273 - 0.2799007926692717 - 0.29259126151386433 - 0.2840268235473181 - 0.2855687324817643 - 0.25699019421325164 - 0.2551070596948231 - 0.2691672146325009 - 0.263190709241409 - 0.25833683058459567 - 0.2969925236078273 - 0.2799007926692717 - 0.29259126151386433 - 0.2840268235473181 - 0.2855687324817643 - 0.25699019421325164 - 0.2551070596948231 - 0.2691672146325009 - 0.263190709241409 - 0.25833683058459567 - 0.2969925236078273 - 0.2799007926692717 - 0.29259126151386433 - 0.2840268235473181 - 0.2855687324817643 - 0.25699019421325164 - 0.2551070596948231 - 0.2691672146325009 - 0.263190709241409 - 0.25833683058459567 - 0.2969925236078273 - 0.2799007926692717 - 0.29259126151386433 - 0.2840268235473181 - 0.2855687324817643 - 0.25699019421325164 - 0.2551070596948231 - 0.2691672146325009 - 0.263190709241409 - 0.25833683058459567 - 0.2969925236078273 - 0.2799007926692717 - 0.29259126151386433 - 0.2840268235473181 - 0.2855687324817643 - 0.25699019421325164 - 0.2551070596948231 - 0.2691672146325009 - 0.263190709241409 - 0.25833683058459567 - 0.2969925236078273 - 0.2799007926692717 - 0.29259126151386433 - 0.2840268235473181 - 0.2855687324817643 - 0.25699019421325164 - 0.2551070596948231 - 0.2691672146325009 - 0.263190709241409 - 0.25833683058459567 - 0.2969925236078273 - 0.2799007926692717 - 0.29259126151386433 - 0.2840268235473181 - 0.2855687324817643 - 0.25699019421325164 - 0.2551070596948231 - 0.2691672146325009 - 0.263190709241409 - 0.25833683058459567 - 0.2969925236078273 - 0.2799007926692717 - 0.29259126151386433 - 0.2840268235473181 - 0.2855687324817643 - 0.25699019421325164 - 0.2551070596948231 - 0.2691672146325009 - 0.263190709241409 - 0.25833683058459567 - 0.2969925236078273 - 0.2799007926692717 - 0.29259126151386433 - 0.2840268235473181 - 0.2855687324817643 - 0.25699019421325164 - 0.2551070596948231 - 0.2691672146325009 - 0.263190709241409 - 0.25833683058459567 - 0.2969925236078273 - 0.2799007926692717 - 0.29259126151386433 - 0.2840268235473181 - 0.2855687324817643 - 0.25699019421325164 - 0.2551070596948231 - 0.2691672146325009 - 0.263190709241409 - 0.25833683058459567 - 0.2969925236078273 - 0.2799007926692717 - 0.29259126151386433 - 0.2840268235473181 - 0.2855687324817643 - 0.25699019421325164 - 0.2551070596948231 - 0.2691672146325009 - 0.263190709241409 - 0.25833683058459567 - 0.2969925236078273 - 0.2799007926692717 - 0.29259126151386433 - 0.2840268235473181 - 0.2855687324817643 - 0.25699019421325164 - 0.2551070596948231 - 0.2691672146325009 - 0.263190709241409 - 0.25833683058459567 - 0.2969925236078273 - 0.2799007926692717 - 0.29259126151386433 - 0.2840268235473181 - 0.2855687324817643 - 0.25699019421325164 - 0.2551070596948231 - 0.2691672146325009 - 0.263190709241409 - 0.25833683058459567 - 0.2969925236078273 - 0.2799007926692717 - 0.29259126151386433 - 0.2840268235473181 - 0.2855687324817643 - 0.25699019421325164 - 0.2551070596948231 - 0.2691672146325009 - 0.263190709241409 - 0.25833683058459567 - 0.2969925236078273 - 0.2799007926692717 - 0.29259126151386433 - 0.2840268235473181 - 0.2855687324817643 - 0.25699019421325164 - 0.2551070596948231 - 0.2691672146325009 - 0.263190709241409 - 0.25833683058459567 - 0.2969925236078273 - 0.2799007926692717 - 0.29259126151386433 - 0.2840268235473181 - 0.2855687324817643 - 0.25699019421325164 - 0.2551070596948231 - 0.2691672146325009 - 0.263190709241409 - 0.25833683058459567 - 0.2969925236078273 - 0.2799007926692717 - 0.29259126151386433 - 0.2840268235473181 - 0.2855687324817643 - 0.25699019421325164 - 0.2551070596948231 - 0.2691672146325009 - 0.263190709241409 - 0.25833683058459567 - 0.2969925236078273 - 0.2799007926692717 - 0.29259126151386433 - 0.2840268235473181 - 0.2855687324817643 - 0.25699019421325164 - 0.2551070596948231 - 0.2691672146325009 - 0.263190709241409 - 0.25833683058459567 - 0.2969925236078273 - 0.2799007926692717 - 0.29259126151386433 - 0.2840268235473181 - 0.2855687324817643 - 0.25699019421325164 - 0.2551070596948231 - 0.2691672146325009 - 0.263190709241409 - 0.25833683058459567 - 0.2969925236078273 - 0.2799007926692717 - 0.29259126151386433 - 0.2840268235473181 - 0.2855687324817643 - 0.25699019421325164 - 0.2551070596948231 - 0.2691672146325009 - 0.263190709241409 - 0.25833683058459567 - 0.2969925236078273 - 0.2799007926692717 - 0.29259126151386433 - 0.2840268235473181 - 0.2855687324817643 - 0.25699019421325164 - 0.2551070596948231 - 0.2691672146325009 - 0.263190709241409 - 0.25833683058459567 - 0.2969925236078273 - 0.2799007926692717 - 0.29259126151386433 - 0.2840268235473181 - 0.2855687324817643 - 0.25699019421325164 - 0.2551070596948231 - 0.2691672146325009 - 0.263190709241409 - 0.25833683058459567 - 0.2969925236078273 - 0.2799007926692717 - 0.29259126151386433 - 0.2840268235473181 - 0.2855687324817643 - 0.25699019421325164 - 0.2551070596948231 - 0.2691672146325009 - 0.263190709241409 - 0.25833683058459567 - 0.2969925236078273 - 0.2799007926692717 - 0.29259126151386433 - 0.2840268235473181 - 0.2855687324817643 - 0.25699019421325164 - 0.2551070596948231 - 0.2691672146325009 - 0.263190709241409 - 0.25833683058459567 - 0.2969925236078273 - 0.2799007926692717 - 0.29259126151386433 - 0.2840268235473181 - 0.2855687324817643 - 0.25699019421325164 - 0.2551070596948231 - 0.2691672146325009 - 0.263190709241409 - 0.25833683058459567 - 0.2969925236078273 - 0.2799007926692717 - 0.29259126151386433 - 0.2840268235473181 - 0.2855687324817643 - 0.25699019421325164 - 0.2551070596948231 - 0.2691672146325009 - 0.263190709241409 - 0.25833683058459567 - 0.2969925236078273 - 0.2799007926692717 - 0.29259126151386433 - 0.2840268235473181 - 0.2855687324817643 - 0.25699019421325164 - 0.2551070596948231 - 0.2691672146325009 - 0.263190709241409 - 0.25833683058459567 - 0.2969925236078273 - 0.2799007926692717 - 0.29259126151386433 - 0.2840268235473181 - 0.2855687324817643 - 0.25699019421325164 - 0.2551070596948231 - 0.2691672146325009 - 0.263190709241409 - 0.25833683058459567 - 0.2969925236078273 - 0.2799007926692717 - 0.29259126151386433 - 0.2840268235473181 - 0.2855687324817643 - 0.25699019421325164 - 0.2551070596948231 - 0.2691672146325009 - 0.263190709241409 - 0.25833683058459567 - 0.2969925236078273 - 0.2799007926692717 - 0.29259126151386433 - 0.2840268235473181 - 0.2855687324817643 - 0.25699019421325164 - 0.2551070596948231 - 0.2691672146325009 - 0.263190709241409 - 0.25833683058459567 - 0.2969925236078273 - 0.2799007926692717 - 0.29259126151386433 - 0.2840268235473181 - 0.2855687324817643 - 0.25699019421325164 - 0.2551070596948231 - 0.2691672146325009 - 0.263190709241409 - 0.25833683058459567 - 0.2969925236078273 - 0.2799007926692717 - 0.29259126151386433 - 0.2840268235473181 - 0.2855687324817643 - 0.25699019421325164 - 0.2551070596948231 - 0.2691672146325009 - 0.263190709241409 - 0.25833683058459567 - 0.2969925236078273 - 0.2799007926692717 - 0.29259126151386433 - 0.2840268235473181 - 0.2855687324817643 - 0.25699019421325164 - 0.2551070596948231 - 0.2691672146325009 - 0.263190709241409 - 0.25833683058459567 - 0.2969925236078273 - 0.2799007926692717 - 0.29259126151386433 - 0.2840268235473181 - 0.2855687324817643 - 0.25699019421325164 - 0.2551070596948231 - 0.2691672146325009 - 0.263190709241409 - 0.25833683058459567 - 0.2969925236078273 - 0.2799007926692717 - 0.29259126151386433 - 0.2840268235473181 - 0.2855687324817643 - 0.25699019421325164 - 0.2551070596948231 - 0.2691672146325009 - 0.263190709241409 - 0.25833683058459567 - 0.2969925236078273 - 0.2799007926692717 - 0.29259126151386433 - 0.2840268235473181 - 0.2855687324817643 - 0.25699019421325164 - 0.2551070596948231 - 0.2691672146325009 - 0.263190709241409 - 0.25833683058459567 - 0.2969925236078273 - 0.2799007926692717 - 0.29259126151386433 - 0.2840268235473181 - 0.2855687324817643 - 0.25699019421325164 - 0.2551070596948231 - 0.2691672146325009 - 0.263190709241409 - 0.25833683058459567 - 0.2969925236078273 - 0.2799007926692717 - 0.29259126151386433 - 0.2840268235473181 - 0.2855687324817643 - 0.25699019421325164 - 0.2551070596948231 - 0.2691672146325009 - 0.263190709241409 - 0.25833683058459567 - 0.2969925236078273 - 0.2799007926692717 - 0.29259126151386433 - 0.2840268235473181 - 0.2855687324817643 - 0.25699019421325164 - 0.2551070596948231 - 0.2691672146325009 - 0.263190709241409 - 0.25833683058459567 - 0.2969925236078273 - 0.2799007926692717 - 0.29259126151386433 - 0.2840268235473181 - 0.2855687324817643 - 0.25699019421325164 - 0.2551070596948231 - 0.2691672146325009 - 0.263190709241409 - 0.25833683058459567 - 0.2969925236078273 - 0.2799007926692717 - 0.29259126151386433 - 0.2840268235473181 - 0.2855687324817643 - 0.25699019421325164 - 0.2551070596948231 - 0.2691672146325009 - 0.263190709241409 - 0.25833683058459567 - 0.2969925236078273 - 0.2799007926692717 - 0.29259126151386433 - 0.2840268235473181 - 0.2855687324817643 - 0.25699019421325164 - 0.2551070596948231 - 0.2691672146325009 - 0.263190709241409 - 0.25833683058459567 - 0.2969925236078273 - 0.2799007926692717 - 0.29259126151386433 - 0.2840268235473181 - 0.2855687324817643 - 0.25699019421325164 - 0.2551070596948231 - 0.2691672146325009 - 0.263190709241409 - 0.25833683058459567 - 0.2969925236078273 - 0.2799007926692717 - 0.29259126151386433 - 0.2840268235473181 - 0.2855687324817643 - 0.25699019421325164 - 0.2551070596948231 - 0.2691672146325009 - 0.263190709241409 - 0.25833683058459567 - 0.2969925236078273 - 0.2799007926692717 - 0.29259126151386433 - 0.2840268235473181 - 0.2855687324817643 - 0.25699019421325164 - 0.2551070596948231 - 0.2691672146325009 - 0.263190709241409 - 0.25833683058459567 - 0.2969925236078273 - 0.2799007926692717 - 0.29259126151386433 - 0.2840268235473181 - 0.2855687324817643 - 0.25699019421325164 - 0.2551070596948231 - 0.2691672146325009 - 0.263190709241409 - 0.25833683058459567 - 0.2969925236078273 - 0.2799007926692717 - 0.29259126151386433 - 0.2840268235473181 - 0.2855687324817643 - 0.25699019421325164 - 0.2551070596948231 - 0.2691672146325009 - 0.263190709241409 - 0.25833683058459567 - 0.2969925236078273 - 0.2799007926692717 - 0.29259126151386433 - 0.2840268235473181 - 0.2855687324817643 - 0.25699019421325164 - 0.2551070596948231 - 0.2691672146325009 - 0.263190709241409 - 0.25833683058459567 - 0.2969925236078273 - 0.2799007926692717 - 0.29259126151386433 - 0.2840268235473181 - 0.2855687324817643 - 0.25699019421325164 - 0.2551070596948231 - 0.2691672146325009 - 0.263190709241409 - 0.25833683058459567 - 0.2969925236078273 - 0.2799007926692717 - 0.29259126151386433 - 0.2840268235473181 - 0.2855687324817643 - 0.25699019421325164 - 0.2551070596948231 - 0.2691672146325009 - 0.263190709241409 - 0.25833683058459567 - 0.2969925236078273 - 0.2799007926692717 - 0.29259126151386433 - 0.2840268235473181 - 0.2855687324817643 - 0.25699019421325164 - 0.2551070596948231 - 0.2691672146325009 - 0.263190709241409 - 0.25833683058459567 - 0.2969925236078273 - 0.2799007926692717 - 0.29259126151386433 - 0.2840268235473181 - 0.2855687324817643 - 0.25699019421325164 - 0.2551070596948231 - 0.2691672146325009 - 0.263190709241409 - 0.25833683058459567 - 0.2969925236078273 - 0.2799007926692717 - 0.29259126151386433 - 0.2840268235473181 - 0.2855687324817643 - 0.25699019421325164 - 0.2551070596948231 - 0.2691672146325009 - 0.263190709241409 - 0.25833683058459567 - 0.2969925236078273 - 0.2799007926692717 - 0.29259126151386433 - 0.2840268235473181 - 0.2855687324817643 - 0.25699019421325164 - 0.2551070596948231 - 0.2691672146325009 - 0.263190709241409 - 0.25833683058459567 - 0.2969925236078273 - 0.2799007926692717 - 0.29259126151386433 - 0.2840268235473181 - 0.2855687324817643 - 0.25699019421325164 - 0.2551070596948231 - 0.2691672146325009 - 0.263190709241409 - 0.25833683058459567 - 0.2969925236078273 - 0.2799007926692717 - 0.29259126151386433 - 0.2840268235473181 - 0.2855687324817643 - 0.25699019421325164 - 0.2551070596948231 - 0.2691672146325009 - 0.263190709241409 - 0.25833683058459567 - 0.2969925236078273 - 0.2799007926692717 - 0.29259126151386433 - 0.2840268235473181 - 0.2855687324817643 - 0.25699019421325164 - 0.2551070596948231 - 0.2691672146325009 - 0.263190709241409 - 0.25833683058459567 - 0.2969925236078273 - 0.2799007926692717 - 0.29259126151386433 - 0.2840268235473181 - 0.2855687324817643 - 0.25699019421325164 - 0.2551070596948231 - 0.2691672146325009 - 0.263190709241409 - 0.25833683058459567 - 0.2969925236078273 - 0.2799007926692717 - 0.29259126151386433 - 0.2840268235473181 - 0.2855687324817643 - 0.25699019421325164 - 0.2551070596948231 - 0.2691672146325009 - 0.263190709241409 - 0.25833683058459567 - 0.2969925236078273 - 0.2799007926692717 - 0.29259126151386433 - 0.2840268235473181 - 0.2855687324817643 - 0.25699019421325164 - 0.2551070596948231 - 0.2691672146325009 - 0.263190709241409 - 0.25833683058459567 - 0.2969925236078273 - 0.2799007926692717 - 0.29259126151386433 - 0.2840268235473181 - 0.2855687324817643 - 0.25699019421325164 - 0.2551070596948231 - 0.2691672146325009 - 0.263190709241409 - 0.25833683058459567 - 0.2969925236078273 - 0.2799007926692717 - 0.29259126151386433 - 0.2840268235473181 - 0.2855687324817643 - 0.25699019421325164 - 0.2551070596948231 - 0.2691672146325009 - 0.263190709241409 - 0.25833683058459567 - 0.2969925236078273 - 0.2799007926692717 - 0.29259126151386433 - 0.2840268235473181 - 0.2855687324817643 - 0.25699019421325164 - 0.2551070596948231 - 0.2691672146325009 - 0.263190709241409 - 0.25833683058459567 - 0.2969925236078273 - 0.2799007926692717 - 0.29259126151386433 - 0.2840268235473181 - 0.2855687324817643 - 0.25699019421325164 - 0.2551070596948231 - 0.2691672146325009 - 0.263190709241409 - 0.25833683058459567 - 0.2969925236078273 - 0.2799007926692717 - 0.29259126151386433 - 0.2840268235473181 - 0.2855687324817643 - 0.25699019421325164 - 0.2551070596948231 - 0.2691672146325009 - 0.263190709241409 - 0.25833683058459567 - 0.2969925236078273 - 0.2799007926692717 - 0.29259126151386433 - 0.2840268235473181 - 0.2855687324817643 - 0.25699019421325164 - 0.2551070596948231 - 0.2691672146325009 - 0.263190709241409 - 0.25833683058459567 - 0.2969925236078273 - 0.2799007926692717 - 0.29259126151386433 - 0.2840268235473181 - 0.2855687324817643 - 0.25699019421325164 - 0.2551070596948231 - 0.2691672146325009 - 0.263190709241409 - 0.25833683058459567 - 0.2969925236078273 - 0.2799007926692717 - 0.29259126151386433 - 0.2840268235473181 - 0.2855687324817643 - 0.25699019421325164 - 0.2551070596948231 - 0.2691672146325009 - 0.263190709241409 - 0.25833683058459567 - 0.2969925236078273 - 0.2799007926692717 - 0.29259126151386433 - 0.2840268235473181 - 0.2855687324817643 - 0.25699019421325164 - 0.2551070596948231 - 0.2691672146325009 - 0.263190709241409 - 0.25833683058459567 - 0.2969925236078273 - 0.2799007926692717 - 0.29259126151386433 - 0.2840268235473181 - 0.2855687324817643 - 0.25699019421325164 - 0.2551070596948231 - 0.2691672146325009 - 0.263190709241409 - 0.25833683058459567 - 0.2969925236078273 - 0.2799007926692717 - 0.29259126151386433 - 0.2840268235473181 - 0.2855687324817643 - 0.25699019421325164 - 0.2551070596948231 - 0.2691672146325009 - 0.263190709241409 - 0.25833683058459567 - 0.2969925236078273 - 0.2799007926692717 - 0.29259126151386433 - 0.2840268235473181 - 0.2855687324817643 - 0.25699019421325164 - 0.2551070596948231 - 0.2691672146325009 - 0.263190709241409 - 0.25833683058459567 - 0.2969925236078273 - 0.2799007926692717 - 0.29259126151386433 - 0.2840268235473181 - 0.2855687324817643 - 0.25699019421325164 - 0.2551070596948231 - 0.2691672146325009 - 0.263190709241409 - 0.25833683058459567 - 0.2969925236078273 - 0.2799007926692717 - 0.29259126151386433 - 0.2840268235473181 - 0.2855687324817643 - 0.25699019421325164 - 0.2551070596948231 - 0.2691672146325009 - 0.263190709241409 - 0.25833683058459567 - 0.2969925236078273 - 0.2799007926692717 - 0.29259126151386433 - 0.2840268235473181 - 0.2855687324817643 - 0.25699019421325164 - 0.2551070596948231 - 0.2691672146325009 - 0.263190709241409 - 0.25833683058459567 - 0.2969925236078273 - 0.2799007926692717 - 0.29259126151386433 - 0.2840268235473181 - 0.2855687324817643 - 0.25699019421325164 - 0.2551070596948231 - 0.2691672146325009 - 0.263190709241409 - 0.25833683058459567 - 0.2969925236078273 - 0.2799007926692717 - 0.29259126151386433 - 0.2840268235473181 - 0.2855687324817643 - 0.25699019421325164 - 0.2551070596948231 - 0.2691672146325009 - 0.263190709241409 - 0.25833683058459567 - 0.2969925236078273 - 0.2799007926692717 - 0.29259126151386433 - 0.2840268235473181 - 0.2855687324817643 - 0.25699019421325164 - 0.2551070596948231 - 0.2691672146325009 - 0.263190709241409 - 0.25833683058459567 - 0.2969925236078273 - 0.2799007926692717 - 0.29259126151386433 - 0.2840268235473181 - 0.2855687324817643 - 0.25699019421325164 - 0.2551070596948231 - 0.2691672146325009 - 0.263190709241409 - 0.25833683058459567 - 0.2969925236078273 - 0.2799007926692717 - 0.29259126151386433 - 0.2840268235473181 - 0.2855687324817643 - 0.25699019421325164 - 0.2551070596948231 - 0.2691672146325009 - 0.263190709241409 - 0.25833683058459567 - 0.2969925236078273 - 0.2799007926692717 - 0.29259126151386433 - 0.2840268235473181 - 0.2855687324817643 - 0.25699019421325164 - 0.2551070596948231 - 0.2691672146325009 - 0.263190709241409 - 0.25833683058459567 - 0.2969925236078273 - 0.2799007926692717 - 0.29259126151386433 - 0.2840268235473181 - 0.2855687324817643 - 0.25699019421325164 - 0.2551070596948231 - 0.2691672146325009 - 0.263190709241409 - 0.25833683058459567 - 0.2969925236078273 - 0.2799007926692717 - 0.29259126151386433 - 0.2840268235473181 - 0.2855687324817643 - 0.25699019421325164 - 0.2551070596948231 - 0.2691672146325009 - 0.263190709241409 - 0.25833683058459567 - 0.2969925236078273 - 0.2799007926692717 - 0.29259126151386433 - 0.2840268235473181 - 0.2855687324817643 - 0.25699019421325164 - 0.2551070596948231 - 0.2691672146325009 - 0.263190709241409 - 0.25833683058459567 - 0.2969925236078273 - 0.2799007926692717 - 0.29259126151386433 - 0.2840268235473181 - 0.2855687324817643 - 0.25699019421325164 - 0.2551070596948231 - 0.2691672146325009 - 0.263190709241409 - 0.25833683058459567 - 0.2969925236078273 - 0.2799007926692717 - 0.29259126151386433 - 0.2840268235473181 - 0.2855687324817643 - 0.25699019421325164 - 0.2551070596948231 - 0.2691672146325009 - 0.263190709241409 - 0.25833683058459567 - 0.2969925236078273 - 0.2799007926692717 - 0.29259126151386433 - 0.2840268235473181 - 0.2855687324817643 - 0.25699019421325164 - 0.2551070596948231 - 0.2691672146325009 - 0.263190709241409 - 0.25833683058459567 - 0.2969925236078273 - 0.2799007926692717 - 0.29259126151386433 - 0.2840268235473181 - 0.2855687324817643 - 0.25699019421325164 - 0.2551070596948231 - 0.2691672146325009 - 0.263190709241409 - 0.25833683058459567 - 0.2969925236078273 - 0.2799007926692717 - 0.29259126151386433 - 0.2840268235473181 - 0.2855687324817643 - 0.25699019421325164 - 0.2551070596948231 - 0.2691672146325009 - 0.263190709241409 - 0.25833683058459567 - 0.2969925236078273 - 0.2799007926692717 - 0.29259126151386433 - 0.2840268235473181 - 0.2855687324817643 - 0.25699019421325164 - 0.2551070596948231 - 0.2691672146325009 - 0.263190709241409 - 0.25833683058459567 - 0.2969925236078273 - 0.2799007926692717 - 0.29259126151386433 - 0.2840268235473181 - 0.2855687324817643 - 0.25699019421325164 - 0.2551070596948231 - 0.2691672146325009 - 0.263190709241409 - 0.25833683058459567 - 0.2969925236078273 - 0.2799007926692717 - 0.29259126151386433 - 0.2840268235473181 - 0.2855687324817643 - 0.25699019421325164 - 0.2551070596948231 - 0.2691672146325009 - 0.263190709241409 - 0.25833683058459567 - 0.2969925236078273 - 0.2799007926692717 - 0.29259126151386433 - 0.2840268235473181 - 0.2855687324817643 - 0.25699019421325164 - 0.2551070596948231 - 0.2691672146325009 - 0.263190709241409 - 0.25833683058459567 - 0.2969925236078273 - 0.2799007926692717 - 0.29259126151386433 - 0.2840268235473181 - 0.2855687324817643 - task: type: Clustering dataset: name: MTEB MedrxivClusteringS2S type: mteb/medrxiv-clustering-s2s config: default split: test revision: 35191c8c0dca72d8ff3efcd72aa802307d469663 metrics: - type: v_measure value: 22.40590099674712 - type: v_measures value: - 0.20312599898502812 - 0.21028636757346386 - 0.2078091337066853 - 0.21248714226010795 - 0.2051414930300016 - 0.2430753205246834 - 0.23790607540735365 - 0.24673502894784635 - 0.23967523571775606 - 0.23434830352178554 - 0.20312599898502812 - 0.21028636757346386 - 0.2078091337066853 - 0.21248714226010795 - 0.2051414930300016 - 0.2430753205246834 - 0.23790607540735365 - 0.24673502894784635 - 0.23967523571775606 - 0.23434830352178554 - 0.20312599898502812 - 0.21028636757346386 - 0.2078091337066853 - 0.21248714226010795 - 0.2051414930300016 - 0.2430753205246834 - 0.23790607540735365 - 0.24673502894784635 - 0.23967523571775606 - 0.23434830352178554 - 0.20312599898502812 - 0.21028636757346386 - 0.2078091337066853 - 0.21248714226010795 - 0.2051414930300016 - 0.2430753205246834 - 0.23790607540735365 - 0.24673502894784635 - 0.23967523571775606 - 0.23434830352178554 - 0.20312599898502812 - 0.21028636757346386 - 0.2078091337066853 - 0.21248714226010795 - 0.2051414930300016 - 0.2430753205246834 - 0.23790607540735365 - 0.24673502894784635 - 0.23967523571775606 - 0.23434830352178554 - 0.20312599898502812 - 0.21028636757346386 - 0.2078091337066853 - 0.21248714226010795 - 0.2051414930300016 - 0.2430753205246834 - 0.23790607540735365 - 0.24673502894784635 - 0.23967523571775606 - 0.23434830352178554 - 0.20312599898502812 - 0.21028636757346386 - 0.2078091337066853 - 0.21248714226010795 - 0.2051414930300016 - 0.2430753205246834 - 0.23790607540735365 - 0.24673502894784635 - 0.23967523571775606 - 0.23434830352178554 - 0.20312599898502812 - 0.21028636757346386 - 0.2078091337066853 - 0.21248714226010795 - 0.2051414930300016 - 0.2430753205246834 - 0.23790607540735365 - 0.24673502894784635 - 0.23967523571775606 - 0.23434830352178554 - 0.20312599898502812 - 0.21028636757346386 - 0.2078091337066853 - 0.21248714226010795 - 0.2051414930300016 - 0.2430753205246834 - 0.23790607540735365 - 0.24673502894784635 - 0.23967523571775606 - 0.23434830352178554 - 0.20312599898502812 - 0.21028636757346386 - 0.2078091337066853 - 0.21248714226010795 - 0.2051414930300016 - 0.2430753205246834 - 0.23790607540735365 - 0.24673502894784635 - 0.23967523571775606 - 0.23434830352178554 - 0.20312599898502812 - 0.21028636757346386 - 0.2078091337066853 - 0.21248714226010795 - 0.2051414930300016 - 0.2430753205246834 - 0.23790607540735365 - 0.24673502894784635 - 0.23967523571775606 - 0.23434830352178554 - 0.20312599898502812 - 0.21028636757346386 - 0.2078091337066853 - 0.21248714226010795 - 0.2051414930300016 - 0.2430753205246834 - 0.23790607540735365 - 0.24673502894784635 - 0.23967523571775606 - 0.23434830352178554 - 0.20312599898502812 - 0.21028636757346386 - 0.2078091337066853 - 0.21248714226010795 - 0.2051414930300016 - 0.2430753205246834 - 0.23790607540735365 - 0.24673502894784635 - 0.23967523571775606 - 0.23434830352178554 - 0.20312599898502812 - 0.21028636757346386 - 0.2078091337066853 - 0.21248714226010795 - 0.2051414930300016 - 0.2430753205246834 - 0.23790607540735365 - 0.24673502894784635 - 0.23967523571775606 - 0.23434830352178554 - 0.20312599898502812 - 0.21028636757346386 - 0.2078091337066853 - 0.21248714226010795 - 0.2051414930300016 - 0.2430753205246834 - 0.23790607540735365 - 0.24673502894784635 - 0.23967523571775606 - 0.23434830352178554 - 0.20312599898502812 - 0.21028636757346386 - 0.2078091337066853 - 0.21248714226010795 - 0.2051414930300016 - 0.2430753205246834 - 0.23790607540735365 - 0.24673502894784635 - 0.23967523571775606 - 0.23434830352178554 - 0.20312599898502812 - 0.21028636757346386 - 0.2078091337066853 - 0.21248714226010795 - 0.2051414930300016 - 0.2430753205246834 - 0.23790607540735365 - 0.24673502894784635 - 0.23967523571775606 - 0.23434830352178554 - 0.20312599898502812 - 0.21028636757346386 - 0.2078091337066853 - 0.21248714226010795 - 0.2051414930300016 - 0.2430753205246834 - 0.23790607540735365 - 0.24673502894784635 - 0.23967523571775606 - 0.23434830352178554 - 0.20312599898502812 - 0.21028636757346386 - 0.2078091337066853 - 0.21248714226010795 - 0.2051414930300016 - 0.2430753205246834 - 0.23790607540735365 - 0.24673502894784635 - 0.23967523571775606 - 0.23434830352178554 - 0.20312599898502812 - 0.21028636757346386 - 0.2078091337066853 - 0.21248714226010795 - 0.2051414930300016 - 0.2430753205246834 - 0.23790607540735365 - 0.24673502894784635 - 0.23967523571775606 - 0.23434830352178554 - 0.20312599898502812 - 0.21028636757346386 - 0.2078091337066853 - 0.21248714226010795 - 0.2051414930300016 - 0.2430753205246834 - 0.23790607540735365 - 0.24673502894784635 - 0.23967523571775606 - 0.23434830352178554 - 0.20312599898502812 - 0.21028636757346386 - 0.2078091337066853 - 0.21248714226010795 - 0.2051414930300016 - 0.2430753205246834 - 0.23790607540735365 - 0.24673502894784635 - 0.23967523571775606 - 0.23434830352178554 - 0.20312599898502812 - 0.21028636757346386 - 0.2078091337066853 - 0.21248714226010795 - 0.2051414930300016 - 0.2430753205246834 - 0.23790607540735365 - 0.24673502894784635 - 0.23967523571775606 - 0.23434830352178554 - 0.20312599898502812 - 0.21028636757346386 - 0.2078091337066853 - 0.21248714226010795 - 0.2051414930300016 - 0.2430753205246834 - 0.23790607540735365 - 0.24673502894784635 - 0.23967523571775606 - 0.23434830352178554 - 0.20312599898502812 - 0.21028636757346386 - 0.2078091337066853 - 0.21248714226010795 - 0.2051414930300016 - 0.2430753205246834 - 0.23790607540735365 - 0.24673502894784635 - 0.23967523571775606 - 0.23434830352178554 - 0.20312599898502812 - 0.21028636757346386 - 0.2078091337066853 - 0.21248714226010795 - 0.2051414930300016 - 0.2430753205246834 - 0.23790607540735365 - 0.24673502894784635 - 0.23967523571775606 - 0.23434830352178554 - 0.20312599898502812 - 0.21028636757346386 - 0.2078091337066853 - 0.21248714226010795 - 0.2051414930300016 - 0.2430753205246834 - 0.23790607540735365 - 0.24673502894784635 - 0.23967523571775606 - 0.23434830352178554 - 0.20312599898502812 - 0.21028636757346386 - 0.2078091337066853 - 0.21248714226010795 - 0.2051414930300016 - 0.2430753205246834 - 0.23790607540735365 - 0.24673502894784635 - 0.23967523571775606 - 0.23434830352178554 - 0.20312599898502812 - 0.21028636757346386 - 0.2078091337066853 - 0.21248714226010795 - 0.2051414930300016 - 0.2430753205246834 - 0.23790607540735365 - 0.24673502894784635 - 0.23967523571775606 - 0.23434830352178554 - 0.20312599898502812 - 0.21028636757346386 - 0.2078091337066853 - 0.21248714226010795 - 0.2051414930300016 - 0.2430753205246834 - 0.23790607540735365 - 0.24673502894784635 - 0.23967523571775606 - 0.23434830352178554 - 0.20312599898502812 - 0.21028636757346386 - 0.2078091337066853 - 0.21248714226010795 - 0.2051414930300016 - 0.2430753205246834 - 0.23790607540735365 - 0.24673502894784635 - 0.23967523571775606 - 0.23434830352178554 - 0.20312599898502812 - 0.21028636757346386 - 0.2078091337066853 - 0.21248714226010795 - 0.2051414930300016 - 0.2430753205246834 - 0.23790607540735365 - 0.24673502894784635 - 0.23967523571775606 - 0.23434830352178554 - 0.20312599898502812 - 0.21028636757346386 - 0.2078091337066853 - 0.21248714226010795 - 0.2051414930300016 - 0.2430753205246834 - 0.23790607540735365 - 0.24673502894784635 - 0.23967523571775606 - 0.23434830352178554 - 0.20312599898502812 - 0.21028636757346386 - 0.2078091337066853 - 0.21248714226010795 - 0.2051414930300016 - 0.2430753205246834 - 0.23790607540735365 - 0.24673502894784635 - 0.23967523571775606 - 0.23434830352178554 - 0.20312599898502812 - 0.21028636757346386 - 0.2078091337066853 - 0.21248714226010795 - 0.2051414930300016 - 0.2430753205246834 - 0.23790607540735365 - 0.24673502894784635 - 0.23967523571775606 - 0.23434830352178554 - 0.20312599898502812 - 0.21028636757346386 - 0.2078091337066853 - 0.21248714226010795 - 0.2051414930300016 - 0.2430753205246834 - 0.23790607540735365 - 0.24673502894784635 - 0.23967523571775606 - 0.23434830352178554 - 0.20312599898502812 - 0.21028636757346386 - 0.2078091337066853 - 0.21248714226010795 - 0.2051414930300016 - 0.2430753205246834 - 0.23790607540735365 - 0.24673502894784635 - 0.23967523571775606 - 0.23434830352178554 - 0.20312599898502812 - 0.21028636757346386 - 0.2078091337066853 - 0.21248714226010795 - 0.2051414930300016 - 0.2430753205246834 - 0.23790607540735365 - 0.24673502894784635 - 0.23967523571775606 - 0.23434830352178554 - 0.20312599898502812 - 0.21028636757346386 - 0.2078091337066853 - 0.21248714226010795 - 0.2051414930300016 - 0.2430753205246834 - 0.23790607540735365 - 0.24673502894784635 - 0.23967523571775606 - 0.23434830352178554 - 0.20312599898502812 - 0.21028636757346386 - 0.2078091337066853 - 0.21248714226010795 - 0.2051414930300016 - 0.2430753205246834 - 0.23790607540735365 - 0.24673502894784635 - 0.23967523571775606 - 0.23434830352178554 - 0.20312599898502812 - 0.21028636757346386 - 0.2078091337066853 - 0.21248714226010795 - 0.2051414930300016 - 0.2430753205246834 - 0.23790607540735365 - 0.24673502894784635 - 0.23967523571775606 - 0.23434830352178554 - 0.20312599898502812 - 0.21028636757346386 - 0.2078091337066853 - 0.21248714226010795 - 0.2051414930300016 - 0.2430753205246834 - 0.23790607540735365 - 0.24673502894784635 - 0.23967523571775606 - 0.23434830352178554 - 0.20312599898502812 - 0.21028636757346386 - 0.2078091337066853 - 0.21248714226010795 - 0.2051414930300016 - 0.2430753205246834 - 0.23790607540735365 - 0.24673502894784635 - 0.23967523571775606 - 0.23434830352178554 - 0.20312599898502812 - 0.21028636757346386 - 0.2078091337066853 - 0.21248714226010795 - 0.2051414930300016 - 0.2430753205246834 - 0.23790607540735365 - 0.24673502894784635 - 0.23967523571775606 - 0.23434830352178554 - 0.20312599898502812 - 0.21028636757346386 - 0.2078091337066853 - 0.21248714226010795 - 0.2051414930300016 - 0.2430753205246834 - 0.23790607540735365 - 0.24673502894784635 - 0.23967523571775606 - 0.23434830352178554 - 0.20312599898502812 - 0.21028636757346386 - 0.2078091337066853 - 0.21248714226010795 - 0.2051414930300016 - 0.2430753205246834 - 0.23790607540735365 - 0.24673502894784635 - 0.23967523571775606 - 0.23434830352178554 - 0.20312599898502812 - 0.21028636757346386 - 0.2078091337066853 - 0.21248714226010795 - 0.2051414930300016 - 0.2430753205246834 - 0.23790607540735365 - 0.24673502894784635 - 0.23967523571775606 - 0.23434830352178554 - 0.20312599898502812 - 0.21028636757346386 - 0.2078091337066853 - 0.21248714226010795 - 0.2051414930300016 - 0.2430753205246834 - 0.23790607540735365 - 0.24673502894784635 - 0.23967523571775606 - 0.23434830352178554 - 0.20312599898502812 - 0.21028636757346386 - 0.2078091337066853 - 0.21248714226010795 - 0.2051414930300016 - 0.2430753205246834 - 0.23790607540735365 - 0.24673502894784635 - 0.23967523571775606 - 0.23434830352178554 - 0.20312599898502812 - 0.21028636757346386 - 0.2078091337066853 - 0.21248714226010795 - 0.2051414930300016 - 0.2430753205246834 - 0.23790607540735365 - 0.24673502894784635 - 0.23967523571775606 - 0.23434830352178554 - 0.20312599898502812 - 0.21028636757346386 - 0.2078091337066853 - 0.21248714226010795 - 0.2051414930300016 - 0.2430753205246834 - 0.23790607540735365 - 0.24673502894784635 - 0.23967523571775606 - 0.23434830352178554 - 0.20312599898502812 - 0.21028636757346386 - 0.2078091337066853 - 0.21248714226010795 - 0.2051414930300016 - 0.2430753205246834 - 0.23790607540735365 - 0.24673502894784635 - 0.23967523571775606 - 0.23434830352178554 - 0.20312599898502812 - 0.21028636757346386 - 0.2078091337066853 - 0.21248714226010795 - 0.2051414930300016 - 0.2430753205246834 - 0.23790607540735365 - 0.24673502894784635 - 0.23967523571775606 - 0.23434830352178554 - 0.20312599898502812 - 0.21028636757346386 - 0.2078091337066853 - 0.21248714226010795 - 0.2051414930300016 - 0.2430753205246834 - 0.23790607540735365 - 0.24673502894784635 - 0.23967523571775606 - 0.23434830352178554 - 0.20312599898502812 - 0.21028636757346386 - 0.2078091337066853 - 0.21248714226010795 - 0.2051414930300016 - 0.2430753205246834 - 0.23790607540735365 - 0.24673502894784635 - 0.23967523571775606 - 0.23434830352178554 - 0.20312599898502812 - 0.21028636757346386 - 0.2078091337066853 - 0.21248714226010795 - 0.2051414930300016 - 0.2430753205246834 - 0.23790607540735365 - 0.24673502894784635 - 0.23967523571775606 - 0.23434830352178554 - 0.20312599898502812 - 0.21028636757346386 - 0.2078091337066853 - 0.21248714226010795 - 0.2051414930300016 - 0.2430753205246834 - 0.23790607540735365 - 0.24673502894784635 - 0.23967523571775606 - 0.23434830352178554 - 0.20312599898502812 - 0.21028636757346386 - 0.2078091337066853 - 0.21248714226010795 - 0.2051414930300016 - 0.2430753205246834 - 0.23790607540735365 - 0.24673502894784635 - 0.23967523571775606 - 0.23434830352178554 - 0.20312599898502812 - 0.21028636757346386 - 0.2078091337066853 - 0.21248714226010795 - 0.2051414930300016 - 0.2430753205246834 - 0.23790607540735365 - 0.24673502894784635 - 0.23967523571775606 - 0.23434830352178554 - 0.20312599898502812 - 0.21028636757346386 - 0.2078091337066853 - 0.21248714226010795 - 0.2051414930300016 - 0.2430753205246834 - 0.23790607540735365 - 0.24673502894784635 - 0.23967523571775606 - 0.23434830352178554 - 0.20312599898502812 - 0.21028636757346386 - 0.2078091337066853 - 0.21248714226010795 - 0.2051414930300016 - 0.2430753205246834 - 0.23790607540735365 - 0.24673502894784635 - 0.23967523571775606 - 0.23434830352178554 - 0.20312599898502812 - 0.21028636757346386 - 0.2078091337066853 - 0.21248714226010795 - 0.2051414930300016 - 0.2430753205246834 - 0.23790607540735365 - 0.24673502894784635 - 0.23967523571775606 - 0.23434830352178554 - 0.20312599898502812 - 0.21028636757346386 - 0.2078091337066853 - 0.21248714226010795 - 0.2051414930300016 - 0.2430753205246834 - 0.23790607540735365 - 0.24673502894784635 - 0.23967523571775606 - 0.23434830352178554 - 0.20312599898502812 - 0.21028636757346386 - 0.2078091337066853 - 0.21248714226010795 - 0.2051414930300016 - 0.2430753205246834 - 0.23790607540735365 - 0.24673502894784635 - 0.23967523571775606 - 0.23434830352178554 - 0.20312599898502812 - 0.21028636757346386 - 0.2078091337066853 - 0.21248714226010795 - 0.2051414930300016 - 0.2430753205246834 - 0.23790607540735365 - 0.24673502894784635 - 0.23967523571775606 - 0.23434830352178554 - 0.20312599898502812 - 0.21028636757346386 - 0.2078091337066853 - 0.21248714226010795 - 0.2051414930300016 - 0.2430753205246834 - 0.23790607540735365 - 0.24673502894784635 - 0.23967523571775606 - 0.23434830352178554 - 0.20312599898502812 - 0.21028636757346386 - 0.2078091337066853 - 0.21248714226010795 - 0.2051414930300016 - 0.2430753205246834 - 0.23790607540735365 - 0.24673502894784635 - 0.23967523571775606 - 0.23434830352178554 - 0.20312599898502812 - 0.21028636757346386 - 0.2078091337066853 - 0.21248714226010795 - 0.2051414930300016 - 0.2430753205246834 - 0.23790607540735365 - 0.24673502894784635 - 0.23967523571775606 - 0.23434830352178554 - 0.20312599898502812 - 0.21028636757346386 - 0.2078091337066853 - 0.21248714226010795 - 0.2051414930300016 - 0.2430753205246834 - 0.23790607540735365 - 0.24673502894784635 - 0.23967523571775606 - 0.23434830352178554 - 0.20312599898502812 - 0.21028636757346386 - 0.2078091337066853 - 0.21248714226010795 - 0.2051414930300016 - 0.2430753205246834 - 0.23790607540735365 - 0.24673502894784635 - 0.23967523571775606 - 0.23434830352178554 - 0.20312599898502812 - 0.21028636757346386 - 0.2078091337066853 - 0.21248714226010795 - 0.2051414930300016 - 0.2430753205246834 - 0.23790607540735365 - 0.24673502894784635 - 0.23967523571775606 - 0.23434830352178554 - 0.20312599898502812 - 0.21028636757346386 - 0.2078091337066853 - 0.21248714226010795 - 0.2051414930300016 - 0.2430753205246834 - 0.23790607540735365 - 0.24673502894784635 - 0.23967523571775606 - 0.23434830352178554 - 0.20312599898502812 - 0.21028636757346386 - 0.2078091337066853 - 0.21248714226010795 - 0.2051414930300016 - 0.2430753205246834 - 0.23790607540735365 - 0.24673502894784635 - 0.23967523571775606 - 0.23434830352178554 - 0.20312599898502812 - 0.21028636757346386 - 0.2078091337066853 - 0.21248714226010795 - 0.2051414930300016 - 0.2430753205246834 - 0.23790607540735365 - 0.24673502894784635 - 0.23967523571775606 - 0.23434830352178554 - 0.20312599898502812 - 0.21028636757346386 - 0.2078091337066853 - 0.21248714226010795 - 0.2051414930300016 - 0.2430753205246834 - 0.23790607540735365 - 0.24673502894784635 - 0.23967523571775606 - 0.23434830352178554 - 0.20312599898502812 - 0.21028636757346386 - 0.2078091337066853 - 0.21248714226010795 - 0.2051414930300016 - 0.2430753205246834 - 0.23790607540735365 - 0.24673502894784635 - 0.23967523571775606 - 0.23434830352178554 - 0.20312599898502812 - 0.21028636757346386 - 0.2078091337066853 - 0.21248714226010795 - 0.2051414930300016 - 0.2430753205246834 - 0.23790607540735365 - 0.24673502894784635 - 0.23967523571775606 - 0.23434830352178554 - 0.20312599898502812 - 0.21028636757346386 - 0.2078091337066853 - 0.21248714226010795 - 0.2051414930300016 - 0.2430753205246834 - 0.23790607540735365 - 0.24673502894784635 - 0.23967523571775606 - 0.23434830352178554 - 0.20312599898502812 - 0.21028636757346386 - 0.2078091337066853 - 0.21248714226010795 - 0.2051414930300016 - 0.2430753205246834 - 0.23790607540735365 - 0.24673502894784635 - 0.23967523571775606 - 0.23434830352178554 - 0.20312599898502812 - 0.21028636757346386 - 0.2078091337066853 - 0.21248714226010795 - 0.2051414930300016 - 0.2430753205246834 - 0.23790607540735365 - 0.24673502894784635 - 0.23967523571775606 - 0.23434830352178554 - 0.20312599898502812 - 0.21028636757346386 - 0.2078091337066853 - 0.21248714226010795 - 0.2051414930300016 - 0.2430753205246834 - 0.23790607540735365 - 0.24673502894784635 - 0.23967523571775606 - 0.23434830352178554 - 0.20312599898502812 - 0.21028636757346386 - 0.2078091337066853 - 0.21248714226010795 - 0.2051414930300016 - 0.2430753205246834 - 0.23790607540735365 - 0.24673502894784635 - 0.23967523571775606 - 0.23434830352178554 - 0.20312599898502812 - 0.21028636757346386 - 0.2078091337066853 - 0.21248714226010795 - 0.2051414930300016 - 0.2430753205246834 - 0.23790607540735365 - 0.24673502894784635 - 0.23967523571775606 - 0.23434830352178554 - 0.20312599898502812 - 0.21028636757346386 - 0.2078091337066853 - 0.21248714226010795 - 0.2051414930300016 - 0.2430753205246834 - 0.23790607540735365 - 0.24673502894784635 - 0.23967523571775606 - 0.23434830352178554 - 0.20312599898502812 - 0.21028636757346386 - 0.2078091337066853 - 0.21248714226010795 - 0.2051414930300016 - 0.2430753205246834 - 0.23790607540735365 - 0.24673502894784635 - 0.23967523571775606 - 0.23434830352178554 - 0.20312599898502812 - 0.21028636757346386 - 0.2078091337066853 - 0.21248714226010795 - 0.2051414930300016 - 0.2430753205246834 - 0.23790607540735365 - 0.24673502894784635 - 0.23967523571775606 - 0.23434830352178554 - 0.20312599898502812 - 0.21028636757346386 - 0.2078091337066853 - 0.21248714226010795 - 0.2051414930300016 - 0.2430753205246834 - 0.23790607540735365 - 0.24673502894784635 - 0.23967523571775606 - 0.23434830352178554 - 0.20312599898502812 - 0.21028636757346386 - 0.2078091337066853 - 0.21248714226010795 - 0.2051414930300016 - 0.2430753205246834 - 0.23790607540735365 - 0.24673502894784635 - 0.23967523571775606 - 0.23434830352178554 - 0.20312599898502812 - 0.21028636757346386 - 0.2078091337066853 - 0.21248714226010795 - 0.2051414930300016 - 0.2430753205246834 - 0.23790607540735365 - 0.24673502894784635 - 0.23967523571775606 - 0.23434830352178554 - 0.20312599898502812 - 0.21028636757346386 - 0.2078091337066853 - 0.21248714226010795 - 0.2051414930300016 - 0.2430753205246834 - 0.23790607540735365 - 0.24673502894784635 - 0.23967523571775606 - 0.23434830352178554 - 0.20312599898502812 - 0.21028636757346386 - 0.2078091337066853 - 0.21248714226010795 - 0.2051414930300016 - 0.2430753205246834 - 0.23790607540735365 - 0.24673502894784635 - 0.23967523571775606 - 0.23434830352178554 - 0.20312599898502812 - 0.21028636757346386 - 0.2078091337066853 - 0.21248714226010795 - 0.2051414930300016 - 0.2430753205246834 - 0.23790607540735365 - 0.24673502894784635 - 0.23967523571775606 - 0.23434830352178554 - 0.20312599898502812 - 0.21028636757346386 - 0.2078091337066853 - 0.21248714226010795 - 0.2051414930300016 - 0.2430753205246834 - 0.23790607540735365 - 0.24673502894784635 - 0.23967523571775606 - 0.23434830352178554 - 0.20312599898502812 - 0.21028636757346386 - 0.2078091337066853 - 0.21248714226010795 - 0.2051414930300016 - 0.2430753205246834 - 0.23790607540735365 - 0.24673502894784635 - 0.23967523571775606 - 0.23434830352178554 - 0.20312599898502812 - 0.21028636757346386 - 0.2078091337066853 - 0.21248714226010795 - 0.2051414930300016 - 0.2430753205246834 - 0.23790607540735365 - 0.24673502894784635 - 0.23967523571775606 - 0.23434830352178554 - 0.20312599898502812 - 0.21028636757346386 - 0.2078091337066853 - 0.21248714226010795 - 0.2051414930300016 - 0.2430753205246834 - 0.23790607540735365 - 0.24673502894784635 - 0.23967523571775606 - 0.23434830352178554 - 0.20312599898502812 - 0.21028636757346386 - 0.2078091337066853 - 0.21248714226010795 - 0.2051414930300016 - 0.2430753205246834 - 0.23790607540735365 - 0.24673502894784635 - 0.23967523571775606 - 0.23434830352178554 - 0.20312599898502812 - 0.21028636757346386 - 0.2078091337066853 - 0.21248714226010795 - 0.2051414930300016 - 0.2430753205246834 - 0.23790607540735365 - 0.24673502894784635 - 0.23967523571775606 - 0.23434830352178554 - 0.20312599898502812 - 0.21028636757346386 - 0.2078091337066853 - 0.21248714226010795 - 0.2051414930300016 - 0.2430753205246834 - 0.23790607540735365 - 0.24673502894784635 - 0.23967523571775606 - 0.23434830352178554 - 0.20312599898502812 - 0.21028636757346386 - 0.2078091337066853 - 0.21248714226010795 - 0.2051414930300016 - 0.2430753205246834 - 0.23790607540735365 - 0.24673502894784635 - 0.23967523571775606 - 0.23434830352178554 - task: type: Reranking dataset: name: MTEB MindSmallReranking type: mteb/mind_small config: default split: test revision: 3bdac13927fdc888b903db93b2ffdbd90b295a69 metrics: - type: map value: 29.924796610724826 - type: mrr value: 30.962158101843464 - task: type: Retrieval dataset: name: MTEB NFCorpus type: mteb/nfcorpus config: default split: test revision: ec0fa4fe99da2ff19ca1214b7966684033a58814 metrics: - type: map_at_1 value: 1.3379999999999999 - type: map_at_10 value: 3.62 - type: map_at_100 value: 4.891 - type: map_at_1000 value: 5.87 - type: map_at_20 value: 4.164000000000001 - type: map_at_3 value: 2.608 - type: map_at_5 value: 3.1910000000000003 - type: mrr_at_1 value: 18.576 - type: mrr_at_10 value: 26.487 - type: mrr_at_100 value: 27.736 - type: mrr_at_1000 value: 27.828000000000003 - type: mrr_at_20 value: 27.319 - type: mrr_at_3 value: 23.891000000000002 - type: mrr_at_5 value: 25.501 - type: ndcg_at_1 value: 17.957 - type: ndcg_at_10 value: 14.021 - type: ndcg_at_100 value: 14.41 - type: ndcg_at_1000 value: 24.197 - type: ndcg_at_20 value: 13.883000000000001 - type: ndcg_at_3 value: 15.913 - type: ndcg_at_5 value: 15.120000000000001 - type: precision_at_1 value: 18.576 - type: precision_at_10 value: 10.402000000000001 - type: precision_at_100 value: 4.334 - type: precision_at_1000 value: 1.661 - type: precision_at_20 value: 8.731 - type: precision_at_3 value: 15.067 - type: precision_at_5 value: 12.940999999999999 - type: recall_at_1 value: 1.3379999999999999 - type: recall_at_10 value: 6.711 - type: recall_at_100 value: 16.862 - type: recall_at_1000 value: 52.537 - type: recall_at_20 value: 9.89 - type: recall_at_3 value: 3.614 - type: recall_at_5 value: 5.428999999999999 - task: type: Retrieval dataset: name: MTEB NQ type: mteb/nq config: default split: test revision: b774495ed302d8c44a3a7ea25c90dbce03968f31 metrics: - type: map_at_1 value: 10.187 - type: map_at_10 value: 16.61 - type: map_at_100 value: 17.599 - type: map_at_1000 value: 17.689 - type: map_at_20 value: 17.141000000000002 - type: map_at_3 value: 14.405000000000001 - type: map_at_5 value: 15.543000000000001 - type: mrr_at_1 value: 11.327 - type: mrr_at_10 value: 18.184 - type: mrr_at_100 value: 19.137 - type: mrr_at_1000 value: 19.215 - type: mrr_at_20 value: 18.717 - type: mrr_at_3 value: 15.918 - type: mrr_at_5 value: 17.052 - type: ndcg_at_1 value: 11.327 - type: ndcg_at_10 value: 20.744 - type: ndcg_at_100 value: 25.865 - type: ndcg_at_1000 value: 28.419 - type: ndcg_at_20 value: 22.648 - type: ndcg_at_3 value: 16.147 - type: ndcg_at_5 value: 18.168 - type: precision_at_1 value: 11.327 - type: precision_at_10 value: 3.7220000000000004 - type: precision_at_100 value: 0.658 - type: precision_at_1000 value: 0.091 - type: precision_at_20 value: 2.294 - type: precision_at_3 value: 7.503 - type: precision_at_5 value: 5.608 - type: recall_at_1 value: 10.187 - type: recall_at_10 value: 32.051 - type: recall_at_100 value: 56.016 - type: recall_at_1000 value: 75.649 - type: recall_at_20 value: 39.267 - type: recall_at_3 value: 19.689 - type: recall_at_5 value: 24.445 - task: type: Retrieval dataset: name: MTEB QuoraRetrieval type: mteb/quora config: default split: test revision: e4e08e0b7dbe3c8700f0daef558ff32256715259 metrics: - type: map_at_1 value: 58.404 - type: map_at_10 value: 70.125 - type: map_at_100 value: 70.923 - type: map_at_1000 value: 70.968 - type: map_at_20 value: 70.60300000000001 - type: map_at_3 value: 67.342 - type: map_at_5 value: 68.97999999999999 - type: mrr_at_1 value: 67.29 - type: mrr_at_10 value: 74.773 - type: mrr_at_100 value: 75.093 - type: mrr_at_1000 value: 75.106 - type: mrr_at_20 value: 74.973 - type: mrr_at_3 value: 73.188 - type: mrr_at_5 value: 74.165 - type: ndcg_at_1 value: 67.33 - type: ndcg_at_10 value: 74.936 - type: ndcg_at_100 value: 77.479 - type: ndcg_at_1000 value: 78.147 - type: ndcg_at_20 value: 76.048 - type: ndcg_at_3 value: 71.30499999999999 - type: ndcg_at_5 value: 73.09400000000001 - type: precision_at_1 value: 67.33 - type: precision_at_10 value: 11.335 - type: precision_at_100 value: 1.385 - type: precision_at_1000 value: 0.151 - type: precision_at_20 value: 6.116 - type: precision_at_3 value: 30.833 - type: precision_at_5 value: 20.384 - type: recall_at_1 value: 58.404 - type: recall_at_10 value: 84.138 - type: recall_at_100 value: 94.32000000000001 - type: recall_at_1000 value: 98.51299999999999 - type: recall_at_20 value: 87.996 - type: recall_at_3 value: 73.68400000000001 - type: recall_at_5 value: 78.681 - task: type: Clustering dataset: name: MTEB RedditClustering type: mteb/reddit-clustering config: default split: test revision: 24640382cdbf8abc73003fb0fa6d111a705499eb metrics: - type: v_measure value: 26.713463922652704 - type: v_measures value: - 0.356358075769195 - 0.3011200622167429 - 0.22467375312763427 - 0.2394109956052364 - 0.2899555542978596 - 0.21406581833340438 - 0.326841157469233 - 0.20064055405544595 - 0.2089858781934912 - 0.22835715928471212 - 0.24742539971848806 - 0.36899923991825895 - 0.24701463701714044 - 0.2560178333573794 - 0.3552016140245526 - 0.23774804137045452 - 0.27017447263584743 - 0.37586623336347835 - 0.2564531409603795 - 0.2262824317679402 - 0.21248869632976208 - 0.22661416857784017 - 0.35027209205919524 - 0.23589310962174836 - 0.22150586158775468 - 0.356358075769195 - 0.3011200622167429 - 0.22467375312763427 - 0.2394109956052364 - 0.2899555542978596 - 0.21406581833340438 - 0.326841157469233 - 0.20064055405544595 - 0.2089858781934912 - 0.22835715928471212 - 0.24742539971848806 - 0.36899923991825895 - 0.24701463701714044 - 0.2560178333573794 - 0.3552016140245526 - 0.23774804137045452 - 0.27017447263584743 - 0.37586623336347835 - 0.2564531409603795 - 0.2262824317679402 - 0.21248869632976208 - 0.22661416857784017 - 0.35027209205919524 - 0.23589310962174836 - 0.22150586158775468 - 0.356358075769195 - 0.3011200622167429 - 0.22467375312763427 - 0.2394109956052364 - 0.2899555542978596 - 0.21406581833340438 - 0.326841157469233 - 0.20064055405544595 - 0.2089858781934912 - 0.22835715928471212 - 0.24742539971848806 - 0.36899923991825895 - 0.24701463701714044 - 0.2560178333573794 - 0.3552016140245526 - 0.23774804137045452 - 0.27017447263584743 - 0.37586623336347835 - 0.2564531409603795 - 0.2262824317679402 - 0.21248869632976208 - 0.22661416857784017 - 0.35027209205919524 - 0.23589310962174836 - 0.22150586158775468 - 0.356358075769195 - 0.3011200622167429 - 0.22467375312763427 - 0.2394109956052364 - 0.2899555542978596 - 0.21406581833340438 - 0.326841157469233 - 0.20064055405544595 - 0.2089858781934912 - 0.22835715928471212 - 0.24742539971848806 - 0.36899923991825895 - 0.24701463701714044 - 0.2560178333573794 - 0.3552016140245526 - 0.23774804137045452 - 0.27017447263584743 - 0.37586623336347835 - 0.2564531409603795 - 0.2262824317679402 - 0.21248869632976208 - 0.22661416857784017 - 0.35027209205919524 - 0.23589310962174836 - 0.22150586158775468 - 0.356358075769195 - 0.3011200622167429 - 0.22467375312763427 - 0.2394109956052364 - 0.2899555542978596 - 0.21406581833340438 - 0.326841157469233 - 0.20064055405544595 - 0.2089858781934912 - 0.22835715928471212 - 0.24742539971848806 - 0.36899923991825895 - 0.24701463701714044 - 0.2560178333573794 - 0.3552016140245526 - 0.23774804137045452 - 0.27017447263584743 - 0.37586623336347835 - 0.2564531409603795 - 0.2262824317679402 - 0.21248869632976208 - 0.22661416857784017 - 0.35027209205919524 - 0.23589310962174836 - 0.22150586158775468 - 0.356358075769195 - 0.3011200622167429 - 0.22467375312763427 - 0.2394109956052364 - 0.2899555542978596 - 0.21406581833340438 - 0.326841157469233 - 0.20064055405544595 - 0.2089858781934912 - 0.22835715928471212 - 0.24742539971848806 - 0.36899923991825895 - 0.24701463701714044 - 0.2560178333573794 - 0.3552016140245526 - 0.23774804137045452 - 0.27017447263584743 - 0.37586623336347835 - 0.2564531409603795 - 0.2262824317679402 - 0.21248869632976208 - 0.22661416857784017 - 0.35027209205919524 - 0.23589310962174836 - 0.22150586158775468 - 0.356358075769195 - 0.3011200622167429 - 0.22467375312763427 - 0.2394109956052364 - 0.2899555542978596 - 0.21406581833340438 - 0.326841157469233 - 0.20064055405544595 - 0.2089858781934912 - 0.22835715928471212 - 0.24742539971848806 - 0.36899923991825895 - 0.24701463701714044 - 0.2560178333573794 - 0.3552016140245526 - 0.23774804137045452 - 0.27017447263584743 - 0.37586623336347835 - 0.2564531409603795 - 0.2262824317679402 - 0.21248869632976208 - 0.22661416857784017 - 0.35027209205919524 - 0.23589310962174836 - 0.22150586158775468 - 0.356358075769195 - 0.3011200622167429 - 0.22467375312763427 - 0.2394109956052364 - 0.2899555542978596 - 0.21406581833340438 - 0.326841157469233 - 0.20064055405544595 - 0.2089858781934912 - 0.22835715928471212 - 0.24742539971848806 - 0.36899923991825895 - 0.24701463701714044 - 0.2560178333573794 - 0.3552016140245526 - 0.23774804137045452 - 0.27017447263584743 - 0.37586623336347835 - 0.2564531409603795 - 0.2262824317679402 - 0.21248869632976208 - 0.22661416857784017 - 0.35027209205919524 - 0.23589310962174836 - 0.22150586158775468 - 0.356358075769195 - 0.3011200622167429 - 0.22467375312763427 - 0.2394109956052364 - 0.2899555542978596 - 0.21406581833340438 - 0.326841157469233 - 0.20064055405544595 - 0.2089858781934912 - 0.22835715928471212 - 0.24742539971848806 - 0.36899923991825895 - 0.24701463701714044 - 0.2560178333573794 - 0.3552016140245526 - 0.23774804137045452 - 0.27017447263584743 - 0.37586623336347835 - 0.2564531409603795 - 0.2262824317679402 - 0.21248869632976208 - 0.22661416857784017 - 0.35027209205919524 - 0.23589310962174836 - 0.22150586158775468 - 0.356358075769195 - 0.3011200622167429 - 0.22467375312763427 - 0.2394109956052364 - 0.2899555542978596 - 0.21406581833340438 - 0.326841157469233 - 0.20064055405544595 - 0.2089858781934912 - 0.22835715928471212 - 0.24742539971848806 - 0.36899923991825895 - 0.24701463701714044 - 0.2560178333573794 - 0.3552016140245526 - 0.23774804137045452 - 0.27017447263584743 - 0.37586623336347835 - 0.2564531409603795 - 0.2262824317679402 - 0.21248869632976208 - 0.22661416857784017 - 0.35027209205919524 - 0.23589310962174836 - 0.22150586158775468 - 0.356358075769195 - 0.3011200622167429 - 0.22467375312763427 - 0.2394109956052364 - 0.2899555542978596 - 0.21406581833340438 - 0.326841157469233 - 0.20064055405544595 - 0.2089858781934912 - 0.22835715928471212 - 0.24742539971848806 - 0.36899923991825895 - 0.24701463701714044 - 0.2560178333573794 - 0.3552016140245526 - 0.23774804137045452 - 0.27017447263584743 - 0.37586623336347835 - 0.2564531409603795 - 0.2262824317679402 - 0.21248869632976208 - 0.22661416857784017 - 0.35027209205919524 - 0.23589310962174836 - 0.22150586158775468 - 0.356358075769195 - 0.3011200622167429 - 0.22467375312763427 - 0.2394109956052364 - 0.2899555542978596 - 0.21406581833340438 - 0.326841157469233 - 0.20064055405544595 - 0.2089858781934912 - 0.22835715928471212 - 0.24742539971848806 - 0.36899923991825895 - 0.24701463701714044 - 0.2560178333573794 - 0.3552016140245526 - 0.23774804137045452 - 0.27017447263584743 - 0.37586623336347835 - 0.2564531409603795 - 0.2262824317679402 - 0.21248869632976208 - 0.22661416857784017 - 0.35027209205919524 - 0.23589310962174836 - 0.22150586158775468 - 0.356358075769195 - 0.3011200622167429 - 0.22467375312763427 - 0.2394109956052364 - 0.2899555542978596 - 0.21406581833340438 - 0.326841157469233 - 0.20064055405544595 - 0.2089858781934912 - 0.22835715928471212 - 0.24742539971848806 - 0.36899923991825895 - 0.24701463701714044 - 0.2560178333573794 - 0.3552016140245526 - 0.23774804137045452 - 0.27017447263584743 - 0.37586623336347835 - 0.2564531409603795 - 0.2262824317679402 - 0.21248869632976208 - 0.22661416857784017 - 0.35027209205919524 - 0.23589310962174836 - 0.22150586158775468 - 0.356358075769195 - 0.3011200622167429 - 0.22467375312763427 - 0.2394109956052364 - 0.2899555542978596 - 0.21406581833340438 - 0.326841157469233 - 0.20064055405544595 - 0.2089858781934912 - 0.22835715928471212 - 0.24742539971848806 - 0.36899923991825895 - 0.24701463701714044 - 0.2560178333573794 - 0.3552016140245526 - 0.23774804137045452 - 0.27017447263584743 - 0.37586623336347835 - 0.2564531409603795 - 0.2262824317679402 - 0.21248869632976208 - 0.22661416857784017 - 0.35027209205919524 - 0.23589310962174836 - 0.22150586158775468 - 0.356358075769195 - 0.3011200622167429 - 0.22467375312763427 - 0.2394109956052364 - 0.2899555542978596 - 0.21406581833340438 - 0.326841157469233 - 0.20064055405544595 - 0.2089858781934912 - 0.22835715928471212 - 0.24742539971848806 - 0.36899923991825895 - 0.24701463701714044 - 0.2560178333573794 - 0.3552016140245526 - 0.23774804137045452 - 0.27017447263584743 - 0.37586623336347835 - 0.2564531409603795 - 0.2262824317679402 - 0.21248869632976208 - 0.22661416857784017 - 0.35027209205919524 - 0.23589310962174836 - 0.22150586158775468 - 0.356358075769195 - 0.3011200622167429 - 0.22467375312763427 - 0.2394109956052364 - 0.2899555542978596 - 0.21406581833340438 - 0.326841157469233 - 0.20064055405544595 - 0.2089858781934912 - 0.22835715928471212 - 0.24742539971848806 - 0.36899923991825895 - 0.24701463701714044 - 0.2560178333573794 - 0.3552016140245526 - 0.23774804137045452 - 0.27017447263584743 - 0.37586623336347835 - 0.2564531409603795 - 0.2262824317679402 - 0.21248869632976208 - 0.22661416857784017 - 0.35027209205919524 - 0.23589310962174836 - 0.22150586158775468 - 0.356358075769195 - 0.3011200622167429 - 0.22467375312763427 - 0.2394109956052364 - 0.2899555542978596 - 0.21406581833340438 - 0.326841157469233 - 0.20064055405544595 - 0.2089858781934912 - 0.22835715928471212 - 0.24742539971848806 - 0.36899923991825895 - 0.24701463701714044 - 0.2560178333573794 - 0.3552016140245526 - 0.23774804137045452 - 0.27017447263584743 - 0.37586623336347835 - 0.2564531409603795 - 0.2262824317679402 - 0.21248869632976208 - 0.22661416857784017 - 0.35027209205919524 - 0.23589310962174836 - 0.22150586158775468 - 0.356358075769195 - 0.3011200622167429 - 0.22467375312763427 - 0.2394109956052364 - 0.2899555542978596 - 0.21406581833340438 - 0.326841157469233 - 0.20064055405544595 - 0.2089858781934912 - 0.22835715928471212 - 0.24742539971848806 - 0.36899923991825895 - 0.24701463701714044 - 0.2560178333573794 - 0.3552016140245526 - 0.23774804137045452 - 0.27017447263584743 - 0.37586623336347835 - 0.2564531409603795 - 0.2262824317679402 - 0.21248869632976208 - 0.22661416857784017 - 0.35027209205919524 - 0.23589310962174836 - 0.22150586158775468 - 0.356358075769195 - 0.3011200622167429 - 0.22467375312763427 - 0.2394109956052364 - 0.2899555542978596 - 0.21406581833340438 - 0.326841157469233 - 0.20064055405544595 - 0.2089858781934912 - 0.22835715928471212 - 0.24742539971848806 - 0.36899923991825895 - 0.24701463701714044 - 0.2560178333573794 - 0.3552016140245526 - 0.23774804137045452 - 0.27017447263584743 - 0.37586623336347835 - 0.2564531409603795 - 0.2262824317679402 - 0.21248869632976208 - 0.22661416857784017 - 0.35027209205919524 - 0.23589310962174836 - 0.22150586158775468 - 0.356358075769195 - 0.3011200622167429 - 0.22467375312763427 - 0.2394109956052364 - 0.2899555542978596 - 0.21406581833340438 - 0.326841157469233 - 0.20064055405544595 - 0.2089858781934912 - 0.22835715928471212 - 0.24742539971848806 - 0.36899923991825895 - 0.24701463701714044 - 0.2560178333573794 - 0.3552016140245526 - 0.23774804137045452 - 0.27017447263584743 - 0.37586623336347835 - 0.2564531409603795 - 0.2262824317679402 - 0.21248869632976208 - 0.22661416857784017 - 0.35027209205919524 - 0.23589310962174836 - 0.22150586158775468 - 0.356358075769195 - 0.3011200622167429 - 0.22467375312763427 - 0.2394109956052364 - 0.2899555542978596 - 0.21406581833340438 - 0.326841157469233 - 0.20064055405544595 - 0.2089858781934912 - 0.22835715928471212 - 0.24742539971848806 - 0.36899923991825895 - 0.24701463701714044 - 0.2560178333573794 - 0.3552016140245526 - 0.23774804137045452 - 0.27017447263584743 - 0.37586623336347835 - 0.2564531409603795 - 0.2262824317679402 - 0.21248869632976208 - 0.22661416857784017 - 0.35027209205919524 - 0.23589310962174836 - 0.22150586158775468 - 0.356358075769195 - 0.3011200622167429 - 0.22467375312763427 - 0.2394109956052364 - 0.2899555542978596 - 0.21406581833340438 - 0.326841157469233 - 0.20064055405544595 - 0.2089858781934912 - 0.22835715928471212 - 0.24742539971848806 - 0.36899923991825895 - 0.24701463701714044 - 0.2560178333573794 - 0.3552016140245526 - 0.23774804137045452 - 0.27017447263584743 - 0.37586623336347835 - 0.2564531409603795 - 0.2262824317679402 - 0.21248869632976208 - 0.22661416857784017 - 0.35027209205919524 - 0.23589310962174836 - 0.22150586158775468 - 0.356358075769195 - 0.3011200622167429 - 0.22467375312763427 - 0.2394109956052364 - 0.2899555542978596 - 0.21406581833340438 - 0.326841157469233 - 0.20064055405544595 - 0.2089858781934912 - 0.22835715928471212 - 0.24742539971848806 - 0.36899923991825895 - 0.24701463701714044 - 0.2560178333573794 - 0.3552016140245526 - 0.23774804137045452 - 0.27017447263584743 - 0.37586623336347835 - 0.2564531409603795 - 0.2262824317679402 - 0.21248869632976208 - 0.22661416857784017 - 0.35027209205919524 - 0.23589310962174836 - 0.22150586158775468 - 0.356358075769195 - 0.3011200622167429 - 0.22467375312763427 - 0.2394109956052364 - 0.2899555542978596 - 0.21406581833340438 - 0.326841157469233 - 0.20064055405544595 - 0.2089858781934912 - 0.22835715928471212 - 0.24742539971848806 - 0.36899923991825895 - 0.24701463701714044 - 0.2560178333573794 - 0.3552016140245526 - 0.23774804137045452 - 0.27017447263584743 - 0.37586623336347835 - 0.2564531409603795 - 0.2262824317679402 - 0.21248869632976208 - 0.22661416857784017 - 0.35027209205919524 - 0.23589310962174836 - 0.22150586158775468 - 0.356358075769195 - 0.3011200622167429 - 0.22467375312763427 - 0.2394109956052364 - 0.2899555542978596 - 0.21406581833340438 - 0.326841157469233 - 0.20064055405544595 - 0.2089858781934912 - 0.22835715928471212 - 0.24742539971848806 - 0.36899923991825895 - 0.24701463701714044 - 0.2560178333573794 - 0.3552016140245526 - 0.23774804137045452 - 0.27017447263584743 - 0.37586623336347835 - 0.2564531409603795 - 0.2262824317679402 - 0.21248869632976208 - 0.22661416857784017 - 0.35027209205919524 - 0.23589310962174836 - 0.22150586158775468 - 0.356358075769195 - 0.3011200622167429 - 0.22467375312763427 - 0.2394109956052364 - 0.2899555542978596 - 0.21406581833340438 - 0.326841157469233 - 0.20064055405544595 - 0.2089858781934912 - 0.22835715928471212 - 0.24742539971848806 - 0.36899923991825895 - 0.24701463701714044 - 0.2560178333573794 - 0.3552016140245526 - 0.23774804137045452 - 0.27017447263584743 - 0.37586623336347835 - 0.2564531409603795 - 0.2262824317679402 - 0.21248869632976208 - 0.22661416857784017 - 0.35027209205919524 - 0.23589310962174836 - 0.22150586158775468 - 0.356358075769195 - 0.3011200622167429 - 0.22467375312763427 - 0.2394109956052364 - 0.2899555542978596 - 0.21406581833340438 - 0.326841157469233 - 0.20064055405544595 - 0.2089858781934912 - 0.22835715928471212 - 0.24742539971848806 - 0.36899923991825895 - 0.24701463701714044 - 0.2560178333573794 - 0.3552016140245526 - 0.23774804137045452 - 0.27017447263584743 - 0.37586623336347835 - 0.2564531409603795 - 0.2262824317679402 - 0.21248869632976208 - 0.22661416857784017 - 0.35027209205919524 - 0.23589310962174836 - 0.22150586158775468 - 0.356358075769195 - 0.3011200622167429 - 0.22467375312763427 - 0.2394109956052364 - 0.2899555542978596 - 0.21406581833340438 - 0.326841157469233 - 0.20064055405544595 - 0.2089858781934912 - 0.22835715928471212 - 0.24742539971848806 - 0.36899923991825895 - 0.24701463701714044 - 0.2560178333573794 - 0.3552016140245526 - 0.23774804137045452 - 0.27017447263584743 - 0.37586623336347835 - 0.2564531409603795 - 0.2262824317679402 - 0.21248869632976208 - 0.22661416857784017 - 0.35027209205919524 - 0.23589310962174836 - 0.22150586158775468 - 0.356358075769195 - 0.3011200622167429 - 0.22467375312763427 - 0.2394109956052364 - 0.2899555542978596 - 0.21406581833340438 - 0.326841157469233 - 0.20064055405544595 - 0.2089858781934912 - 0.22835715928471212 - 0.24742539971848806 - 0.36899923991825895 - 0.24701463701714044 - 0.2560178333573794 - 0.3552016140245526 - 0.23774804137045452 - 0.27017447263584743 - 0.37586623336347835 - 0.2564531409603795 - 0.2262824317679402 - 0.21248869632976208 - 0.22661416857784017 - 0.35027209205919524 - 0.23589310962174836 - 0.22150586158775468 - 0.356358075769195 - 0.3011200622167429 - 0.22467375312763427 - 0.2394109956052364 - 0.2899555542978596 - 0.21406581833340438 - 0.326841157469233 - 0.20064055405544595 - 0.2089858781934912 - 0.22835715928471212 - 0.24742539971848806 - 0.36899923991825895 - 0.24701463701714044 - 0.2560178333573794 - 0.3552016140245526 - 0.23774804137045452 - 0.27017447263584743 - 0.37586623336347835 - 0.2564531409603795 - 0.2262824317679402 - 0.21248869632976208 - 0.22661416857784017 - 0.35027209205919524 - 0.23589310962174836 - 0.22150586158775468 - 0.356358075769195 - 0.3011200622167429 - 0.22467375312763427 - 0.2394109956052364 - 0.2899555542978596 - 0.21406581833340438 - 0.326841157469233 - 0.20064055405544595 - 0.2089858781934912 - 0.22835715928471212 - 0.24742539971848806 - 0.36899923991825895 - 0.24701463701714044 - 0.2560178333573794 - 0.3552016140245526 - 0.23774804137045452 - 0.27017447263584743 - 0.37586623336347835 - 0.2564531409603795 - 0.2262824317679402 - 0.21248869632976208 - 0.22661416857784017 - 0.35027209205919524 - 0.23589310962174836 - 0.22150586158775468 - 0.356358075769195 - 0.3011200622167429 - 0.22467375312763427 - 0.2394109956052364 - 0.2899555542978596 - 0.21406581833340438 - 0.326841157469233 - 0.20064055405544595 - 0.2089858781934912 - 0.22835715928471212 - 0.24742539971848806 - 0.36899923991825895 - 0.24701463701714044 - 0.2560178333573794 - 0.3552016140245526 - 0.23774804137045452 - 0.27017447263584743 - 0.37586623336347835 - 0.2564531409603795 - 0.2262824317679402 - 0.21248869632976208 - 0.22661416857784017 - 0.35027209205919524 - 0.23589310962174836 - 0.22150586158775468 - 0.356358075769195 - 0.3011200622167429 - 0.22467375312763427 - 0.2394109956052364 - 0.2899555542978596 - 0.21406581833340438 - 0.326841157469233 - 0.20064055405544595 - 0.2089858781934912 - 0.22835715928471212 - 0.24742539971848806 - 0.36899923991825895 - 0.24701463701714044 - 0.2560178333573794 - 0.3552016140245526 - 0.23774804137045452 - 0.27017447263584743 - 0.37586623336347835 - 0.2564531409603795 - 0.2262824317679402 - 0.21248869632976208 - 0.22661416857784017 - 0.35027209205919524 - 0.23589310962174836 - 0.22150586158775468 - 0.356358075769195 - 0.3011200622167429 - 0.22467375312763427 - 0.2394109956052364 - 0.2899555542978596 - 0.21406581833340438 - 0.326841157469233 - 0.20064055405544595 - 0.2089858781934912 - 0.22835715928471212 - 0.24742539971848806 - 0.36899923991825895 - 0.24701463701714044 - 0.2560178333573794 - 0.3552016140245526 - 0.23774804137045452 - 0.27017447263584743 - 0.37586623336347835 - 0.2564531409603795 - 0.2262824317679402 - 0.21248869632976208 - 0.22661416857784017 - 0.35027209205919524 - 0.23589310962174836 - 0.22150586158775468 - 0.356358075769195 - 0.3011200622167429 - 0.22467375312763427 - 0.2394109956052364 - 0.2899555542978596 - 0.21406581833340438 - 0.326841157469233 - 0.20064055405544595 - 0.2089858781934912 - 0.22835715928471212 - 0.24742539971848806 - 0.36899923991825895 - 0.24701463701714044 - 0.2560178333573794 - 0.3552016140245526 - 0.23774804137045452 - 0.27017447263584743 - 0.37586623336347835 - 0.2564531409603795 - 0.2262824317679402 - 0.21248869632976208 - 0.22661416857784017 - 0.35027209205919524 - 0.23589310962174836 - 0.22150586158775468 - 0.356358075769195 - 0.3011200622167429 - 0.22467375312763427 - 0.2394109956052364 - 0.2899555542978596 - 0.21406581833340438 - 0.326841157469233 - 0.20064055405544595 - 0.2089858781934912 - 0.22835715928471212 - 0.24742539971848806 - 0.36899923991825895 - 0.24701463701714044 - 0.2560178333573794 - 0.3552016140245526 - 0.23774804137045452 - 0.27017447263584743 - 0.37586623336347835 - 0.2564531409603795 - 0.2262824317679402 - 0.21248869632976208 - 0.22661416857784017 - 0.35027209205919524 - 0.23589310962174836 - 0.22150586158775468 - 0.356358075769195 - 0.3011200622167429 - 0.22467375312763427 - 0.2394109956052364 - 0.2899555542978596 - 0.21406581833340438 - 0.326841157469233 - 0.20064055405544595 - 0.2089858781934912 - 0.22835715928471212 - 0.24742539971848806 - 0.36899923991825895 - 0.24701463701714044 - 0.2560178333573794 - 0.3552016140245526 - 0.23774804137045452 - 0.27017447263584743 - 0.37586623336347835 - 0.2564531409603795 - 0.2262824317679402 - 0.21248869632976208 - 0.22661416857784017 - 0.35027209205919524 - 0.23589310962174836 - 0.22150586158775468 - 0.356358075769195 - 0.3011200622167429 - 0.22467375312763427 - 0.2394109956052364 - 0.2899555542978596 - 0.21406581833340438 - 0.326841157469233 - 0.20064055405544595 - 0.2089858781934912 - 0.22835715928471212 - 0.24742539971848806 - 0.36899923991825895 - 0.24701463701714044 - 0.2560178333573794 - 0.3552016140245526 - 0.23774804137045452 - 0.27017447263584743 - 0.37586623336347835 - 0.2564531409603795 - 0.2262824317679402 - 0.21248869632976208 - 0.22661416857784017 - 0.35027209205919524 - 0.23589310962174836 - 0.22150586158775468 - 0.356358075769195 - 0.3011200622167429 - 0.22467375312763427 - 0.2394109956052364 - 0.2899555542978596 - 0.21406581833340438 - 0.326841157469233 - 0.20064055405544595 - 0.2089858781934912 - 0.22835715928471212 - 0.24742539971848806 - 0.36899923991825895 - 0.24701463701714044 - 0.2560178333573794 - 0.3552016140245526 - 0.23774804137045452 - 0.27017447263584743 - 0.37586623336347835 - 0.2564531409603795 - 0.2262824317679402 - 0.21248869632976208 - 0.22661416857784017 - 0.35027209205919524 - 0.23589310962174836 - 0.22150586158775468 - 0.356358075769195 - 0.3011200622167429 - 0.22467375312763427 - 0.2394109956052364 - 0.2899555542978596 - 0.21406581833340438 - 0.326841157469233 - 0.20064055405544595 - 0.2089858781934912 - 0.22835715928471212 - 0.24742539971848806 - 0.36899923991825895 - 0.24701463701714044 - 0.2560178333573794 - 0.3552016140245526 - 0.23774804137045452 - 0.27017447263584743 - 0.37586623336347835 - 0.2564531409603795 - 0.2262824317679402 - 0.21248869632976208 - 0.22661416857784017 - 0.35027209205919524 - 0.23589310962174836 - 0.22150586158775468 - 0.356358075769195 - 0.3011200622167429 - 0.22467375312763427 - 0.2394109956052364 - 0.2899555542978596 - 0.21406581833340438 - 0.326841157469233 - 0.20064055405544595 - 0.2089858781934912 - 0.22835715928471212 - 0.24742539971848806 - 0.36899923991825895 - 0.24701463701714044 - 0.2560178333573794 - 0.3552016140245526 - 0.23774804137045452 - 0.27017447263584743 - 0.37586623336347835 - 0.2564531409603795 - 0.2262824317679402 - 0.21248869632976208 - 0.22661416857784017 - 0.35027209205919524 - 0.23589310962174836 - 0.22150586158775468 - 0.356358075769195 - 0.3011200622167429 - 0.22467375312763427 - 0.2394109956052364 - 0.2899555542978596 - 0.21406581833340438 - 0.326841157469233 - 0.20064055405544595 - 0.2089858781934912 - 0.22835715928471212 - 0.24742539971848806 - 0.36899923991825895 - 0.24701463701714044 - 0.2560178333573794 - 0.3552016140245526 - 0.23774804137045452 - 0.27017447263584743 - 0.37586623336347835 - 0.2564531409603795 - 0.2262824317679402 - 0.21248869632976208 - 0.22661416857784017 - 0.35027209205919524 - 0.23589310962174836 - 0.22150586158775468 - 0.356358075769195 - 0.3011200622167429 - 0.22467375312763427 - 0.2394109956052364 - 0.2899555542978596 - 0.21406581833340438 - 0.326841157469233 - 0.20064055405544595 - 0.2089858781934912 - 0.22835715928471212 - 0.24742539971848806 - 0.36899923991825895 - 0.24701463701714044 - 0.2560178333573794 - 0.3552016140245526 - 0.23774804137045452 - 0.27017447263584743 - 0.37586623336347835 - 0.2564531409603795 - 0.2262824317679402 - 0.21248869632976208 - 0.22661416857784017 - 0.35027209205919524 - 0.23589310962174836 - 0.22150586158775468 - 0.356358075769195 - 0.3011200622167429 - 0.22467375312763427 - 0.2394109956052364 - 0.2899555542978596 - 0.21406581833340438 - 0.326841157469233 - 0.20064055405544595 - 0.2089858781934912 - 0.22835715928471212 - 0.24742539971848806 - 0.36899923991825895 - 0.24701463701714044 - 0.2560178333573794 - 0.3552016140245526 - 0.23774804137045452 - 0.27017447263584743 - 0.37586623336347835 - 0.2564531409603795 - 0.2262824317679402 - 0.21248869632976208 - 0.22661416857784017 - 0.35027209205919524 - 0.23589310962174836 - 0.22150586158775468 - 0.356358075769195 - 0.3011200622167429 - 0.22467375312763427 - 0.2394109956052364 - 0.2899555542978596 - 0.21406581833340438 - 0.326841157469233 - 0.20064055405544595 - 0.2089858781934912 - 0.22835715928471212 - 0.24742539971848806 - 0.36899923991825895 - 0.24701463701714044 - 0.2560178333573794 - 0.3552016140245526 - 0.23774804137045452 - 0.27017447263584743 - 0.37586623336347835 - 0.2564531409603795 - 0.2262824317679402 - 0.21248869632976208 - 0.22661416857784017 - 0.35027209205919524 - 0.23589310962174836 - 0.22150586158775468 - 0.356358075769195 - 0.3011200622167429 - 0.22467375312763427 - 0.2394109956052364 - 0.2899555542978596 - 0.21406581833340438 - 0.326841157469233 - 0.20064055405544595 - 0.2089858781934912 - 0.22835715928471212 - 0.24742539971848806 - 0.36899923991825895 - 0.24701463701714044 - 0.2560178333573794 - 0.3552016140245526 - 0.23774804137045452 - 0.27017447263584743 - 0.37586623336347835 - 0.2564531409603795 - 0.2262824317679402 - 0.21248869632976208 - 0.22661416857784017 - 0.35027209205919524 - 0.23589310962174836 - 0.22150586158775468 - 0.356358075769195 - 0.3011200622167429 - 0.22467375312763427 - 0.2394109956052364 - 0.2899555542978596 - 0.21406581833340438 - 0.326841157469233 - 0.20064055405544595 - 0.2089858781934912 - 0.22835715928471212 - 0.24742539971848806 - 0.36899923991825895 - 0.24701463701714044 - 0.2560178333573794 - 0.3552016140245526 - 0.23774804137045452 - 0.27017447263584743 - 0.37586623336347835 - 0.2564531409603795 - 0.2262824317679402 - 0.21248869632976208 - 0.22661416857784017 - 0.35027209205919524 - 0.23589310962174836 - 0.22150586158775468 - 0.356358075769195 - 0.3011200622167429 - 0.22467375312763427 - 0.2394109956052364 - 0.2899555542978596 - 0.21406581833340438 - 0.326841157469233 - 0.20064055405544595 - 0.2089858781934912 - 0.22835715928471212 - 0.24742539971848806 - 0.36899923991825895 - 0.24701463701714044 - 0.2560178333573794 - 0.3552016140245526 - 0.23774804137045452 - 0.27017447263584743 - 0.37586623336347835 - 0.2564531409603795 - 0.2262824317679402 - 0.21248869632976208 - 0.22661416857784017 - 0.35027209205919524 - 0.23589310962174836 - 0.22150586158775468 - 0.356358075769195 - 0.3011200622167429 - 0.22467375312763427 - 0.2394109956052364 - 0.2899555542978596 - 0.21406581833340438 - 0.326841157469233 - 0.20064055405544595 - 0.2089858781934912 - 0.22835715928471212 - 0.24742539971848806 - 0.36899923991825895 - 0.24701463701714044 - 0.2560178333573794 - 0.3552016140245526 - 0.23774804137045452 - 0.27017447263584743 - 0.37586623336347835 - 0.2564531409603795 - 0.2262824317679402 - 0.21248869632976208 - 0.22661416857784017 - 0.35027209205919524 - 0.23589310962174836 - 0.22150586158775468 - 0.356358075769195 - 0.3011200622167429 - 0.22467375312763427 - 0.2394109956052364 - 0.2899555542978596 - 0.21406581833340438 - 0.326841157469233 - 0.20064055405544595 - 0.2089858781934912 - 0.22835715928471212 - 0.24742539971848806 - 0.36899923991825895 - 0.24701463701714044 - 0.2560178333573794 - 0.3552016140245526 - 0.23774804137045452 - 0.27017447263584743 - 0.37586623336347835 - 0.2564531409603795 - 0.2262824317679402 - 0.21248869632976208 - 0.22661416857784017 - 0.35027209205919524 - 0.23589310962174836 - 0.22150586158775468 - 0.356358075769195 - 0.3011200622167429 - 0.22467375312763427 - 0.2394109956052364 - 0.2899555542978596 - 0.21406581833340438 - 0.326841157469233 - 0.20064055405544595 - 0.2089858781934912 - 0.22835715928471212 - 0.24742539971848806 - 0.36899923991825895 - 0.24701463701714044 - 0.2560178333573794 - 0.3552016140245526 - 0.23774804137045452 - 0.27017447263584743 - 0.37586623336347835 - 0.2564531409603795 - 0.2262824317679402 - 0.21248869632976208 - 0.22661416857784017 - 0.35027209205919524 - 0.23589310962174836 - 0.22150586158775468 - 0.356358075769195 - 0.3011200622167429 - 0.22467375312763427 - 0.2394109956052364 - 0.2899555542978596 - 0.21406581833340438 - 0.326841157469233 - 0.20064055405544595 - 0.2089858781934912 - 0.22835715928471212 - 0.24742539971848806 - 0.36899923991825895 - 0.24701463701714044 - 0.2560178333573794 - 0.3552016140245526 - 0.23774804137045452 - 0.27017447263584743 - 0.37586623336347835 - 0.2564531409603795 - 0.2262824317679402 - 0.21248869632976208 - 0.22661416857784017 - 0.35027209205919524 - 0.23589310962174836 - 0.22150586158775468 - 0.356358075769195 - 0.3011200622167429 - 0.22467375312763427 - 0.2394109956052364 - 0.2899555542978596 - 0.21406581833340438 - 0.326841157469233 - 0.20064055405544595 - 0.2089858781934912 - 0.22835715928471212 - 0.24742539971848806 - 0.36899923991825895 - 0.24701463701714044 - 0.2560178333573794 - 0.3552016140245526 - 0.23774804137045452 - 0.27017447263584743 - 0.37586623336347835 - 0.2564531409603795 - 0.2262824317679402 - 0.21248869632976208 - 0.22661416857784017 - 0.35027209205919524 - 0.23589310962174836 - 0.22150586158775468 - 0.356358075769195 - 0.3011200622167429 - 0.22467375312763427 - 0.2394109956052364 - 0.2899555542978596 - 0.21406581833340438 - 0.326841157469233 - 0.20064055405544595 - 0.2089858781934912 - 0.22835715928471212 - 0.24742539971848806 - 0.36899923991825895 - 0.24701463701714044 - 0.2560178333573794 - 0.3552016140245526 - 0.23774804137045452 - 0.27017447263584743 - 0.37586623336347835 - 0.2564531409603795 - 0.2262824317679402 - 0.21248869632976208 - 0.22661416857784017 - 0.35027209205919524 - 0.23589310962174836 - 0.22150586158775468 - 0.356358075769195 - 0.3011200622167429 - 0.22467375312763427 - 0.2394109956052364 - 0.2899555542978596 - 0.21406581833340438 - 0.326841157469233 - 0.20064055405544595 - 0.2089858781934912 - 0.22835715928471212 - 0.24742539971848806 - 0.36899923991825895 - 0.24701463701714044 - 0.2560178333573794 - 0.3552016140245526 - 0.23774804137045452 - 0.27017447263584743 - 0.37586623336347835 - 0.2564531409603795 - 0.2262824317679402 - 0.21248869632976208 - 0.22661416857784017 - 0.35027209205919524 - 0.23589310962174836 - 0.22150586158775468 - 0.356358075769195 - 0.3011200622167429 - 0.22467375312763427 - 0.2394109956052364 - 0.2899555542978596 - 0.21406581833340438 - 0.326841157469233 - 0.20064055405544595 - 0.2089858781934912 - 0.22835715928471212 - 0.24742539971848806 - 0.36899923991825895 - 0.24701463701714044 - 0.2560178333573794 - 0.3552016140245526 - 0.23774804137045452 - 0.27017447263584743 - 0.37586623336347835 - 0.2564531409603795 - 0.2262824317679402 - 0.21248869632976208 - 0.22661416857784017 - 0.35027209205919524 - 0.23589310962174836 - 0.22150586158775468 - 0.356358075769195 - 0.3011200622167429 - 0.22467375312763427 - 0.2394109956052364 - 0.2899555542978596 - 0.21406581833340438 - 0.326841157469233 - 0.20064055405544595 - 0.2089858781934912 - 0.22835715928471212 - 0.24742539971848806 - 0.36899923991825895 - 0.24701463701714044 - 0.2560178333573794 - 0.3552016140245526 - 0.23774804137045452 - 0.27017447263584743 - 0.37586623336347835 - 0.2564531409603795 - 0.2262824317679402 - 0.21248869632976208 - 0.22661416857784017 - 0.35027209205919524 - 0.23589310962174836 - 0.22150586158775468 - 0.356358075769195 - 0.3011200622167429 - 0.22467375312763427 - 0.2394109956052364 - 0.2899555542978596 - 0.21406581833340438 - 0.326841157469233 - 0.20064055405544595 - 0.2089858781934912 - 0.22835715928471212 - 0.24742539971848806 - 0.36899923991825895 - 0.24701463701714044 - 0.2560178333573794 - 0.3552016140245526 - 0.23774804137045452 - 0.27017447263584743 - 0.37586623336347835 - 0.2564531409603795 - 0.2262824317679402 - 0.21248869632976208 - 0.22661416857784017 - 0.35027209205919524 - 0.23589310962174836 - 0.22150586158775468 - 0.356358075769195 - 0.3011200622167429 - 0.22467375312763427 - 0.2394109956052364 - 0.2899555542978596 - 0.21406581833340438 - 0.326841157469233 - 0.20064055405544595 - 0.2089858781934912 - 0.22835715928471212 - 0.24742539971848806 - 0.36899923991825895 - 0.24701463701714044 - 0.2560178333573794 - 0.3552016140245526 - 0.23774804137045452 - 0.27017447263584743 - 0.37586623336347835 - 0.2564531409603795 - 0.2262824317679402 - 0.21248869632976208 - 0.22661416857784017 - 0.35027209205919524 - 0.23589310962174836 - 0.22150586158775468 - 0.356358075769195 - 0.3011200622167429 - 0.22467375312763427 - 0.2394109956052364 - 0.2899555542978596 - 0.21406581833340438 - 0.326841157469233 - 0.20064055405544595 - 0.2089858781934912 - 0.22835715928471212 - 0.24742539971848806 - 0.36899923991825895 - 0.24701463701714044 - 0.2560178333573794 - 0.3552016140245526 - 0.23774804137045452 - 0.27017447263584743 - 0.37586623336347835 - 0.2564531409603795 - 0.2262824317679402 - 0.21248869632976208 - 0.22661416857784017 - 0.35027209205919524 - 0.23589310962174836 - 0.22150586158775468 - 0.356358075769195 - 0.3011200622167429 - 0.22467375312763427 - 0.2394109956052364 - 0.2899555542978596 - 0.21406581833340438 - 0.326841157469233 - 0.20064055405544595 - 0.2089858781934912 - 0.22835715928471212 - 0.24742539971848806 - 0.36899923991825895 - 0.24701463701714044 - 0.2560178333573794 - 0.3552016140245526 - 0.23774804137045452 - 0.27017447263584743 - 0.37586623336347835 - 0.2564531409603795 - 0.2262824317679402 - 0.21248869632976208 - 0.22661416857784017 - 0.35027209205919524 - 0.23589310962174836 - 0.22150586158775468 - 0.356358075769195 - 0.3011200622167429 - 0.22467375312763427 - 0.2394109956052364 - 0.2899555542978596 - 0.21406581833340438 - 0.326841157469233 - 0.20064055405544595 - 0.2089858781934912 - 0.22835715928471212 - 0.24742539971848806 - 0.36899923991825895 - 0.24701463701714044 - 0.2560178333573794 - 0.3552016140245526 - 0.23774804137045452 - 0.27017447263584743 - 0.37586623336347835 - 0.2564531409603795 - 0.2262824317679402 - 0.21248869632976208 - 0.22661416857784017 - 0.35027209205919524 - 0.23589310962174836 - 0.22150586158775468 - 0.356358075769195 - 0.3011200622167429 - 0.22467375312763427 - 0.2394109956052364 - 0.2899555542978596 - 0.21406581833340438 - 0.326841157469233 - 0.20064055405544595 - 0.2089858781934912 - 0.22835715928471212 - 0.24742539971848806 - 0.36899923991825895 - 0.24701463701714044 - 0.2560178333573794 - 0.3552016140245526 - 0.23774804137045452 - 0.27017447263584743 - 0.37586623336347835 - 0.2564531409603795 - 0.2262824317679402 - 0.21248869632976208 - 0.22661416857784017 - 0.35027209205919524 - 0.23589310962174836 - 0.22150586158775468 - 0.356358075769195 - 0.3011200622167429 - 0.22467375312763427 - 0.2394109956052364 - 0.2899555542978596 - 0.21406581833340438 - 0.326841157469233 - 0.20064055405544595 - 0.2089858781934912 - 0.22835715928471212 - 0.24742539971848806 - 0.36899923991825895 - 0.24701463701714044 - 0.2560178333573794 - 0.3552016140245526 - 0.23774804137045452 - 0.27017447263584743 - 0.37586623336347835 - 0.2564531409603795 - 0.2262824317679402 - 0.21248869632976208 - 0.22661416857784017 - 0.35027209205919524 - 0.23589310962174836 - 0.22150586158775468 - 0.356358075769195 - 0.3011200622167429 - 0.22467375312763427 - 0.2394109956052364 - 0.2899555542978596 - 0.21406581833340438 - 0.326841157469233 - 0.20064055405544595 - 0.2089858781934912 - 0.22835715928471212 - 0.24742539971848806 - 0.36899923991825895 - 0.24701463701714044 - 0.2560178333573794 - 0.3552016140245526 - 0.23774804137045452 - 0.27017447263584743 - 0.37586623336347835 - 0.2564531409603795 - 0.2262824317679402 - 0.21248869632976208 - 0.22661416857784017 - 0.35027209205919524 - 0.23589310962174836 - 0.22150586158775468 - 0.356358075769195 - 0.3011200622167429 - 0.22467375312763427 - 0.2394109956052364 - 0.2899555542978596 - 0.21406581833340438 - 0.326841157469233 - 0.20064055405544595 - 0.2089858781934912 - 0.22835715928471212 - 0.24742539971848806 - 0.36899923991825895 - 0.24701463701714044 - 0.2560178333573794 - 0.3552016140245526 - 0.23774804137045452 - 0.27017447263584743 - 0.37586623336347835 - 0.2564531409603795 - 0.2262824317679402 - 0.21248869632976208 - 0.22661416857784017 - 0.35027209205919524 - 0.23589310962174836 - 0.22150586158775468 - 0.356358075769195 - 0.3011200622167429 - 0.22467375312763427 - 0.2394109956052364 - 0.2899555542978596 - 0.21406581833340438 - 0.326841157469233 - 0.20064055405544595 - 0.2089858781934912 - 0.22835715928471212 - 0.24742539971848806 - 0.36899923991825895 - 0.24701463701714044 - 0.2560178333573794 - 0.3552016140245526 - 0.23774804137045452 - 0.27017447263584743 - 0.37586623336347835 - 0.2564531409603795 - 0.2262824317679402 - 0.21248869632976208 - 0.22661416857784017 - 0.35027209205919524 - 0.23589310962174836 - 0.22150586158775468 - 0.356358075769195 - 0.3011200622167429 - 0.22467375312763427 - 0.2394109956052364 - 0.2899555542978596 - 0.21406581833340438 - 0.326841157469233 - 0.20064055405544595 - 0.2089858781934912 - 0.22835715928471212 - 0.24742539971848806 - 0.36899923991825895 - 0.24701463701714044 - 0.2560178333573794 - 0.3552016140245526 - 0.23774804137045452 - 0.27017447263584743 - 0.37586623336347835 - 0.2564531409603795 - 0.2262824317679402 - 0.21248869632976208 - 0.22661416857784017 - 0.35027209205919524 - 0.23589310962174836 - 0.22150586158775468 - 0.356358075769195 - 0.3011200622167429 - 0.22467375312763427 - 0.2394109956052364 - 0.2899555542978596 - 0.21406581833340438 - 0.326841157469233 - 0.20064055405544595 - 0.2089858781934912 - 0.22835715928471212 - 0.24742539971848806 - 0.36899923991825895 - 0.24701463701714044 - 0.2560178333573794 - 0.3552016140245526 - 0.23774804137045452 - 0.27017447263584743 - 0.37586623336347835 - 0.2564531409603795 - 0.2262824317679402 - 0.21248869632976208 - 0.22661416857784017 - 0.35027209205919524 - 0.23589310962174836 - 0.22150586158775468 - 0.356358075769195 - 0.3011200622167429 - 0.22467375312763427 - 0.2394109956052364 - 0.2899555542978596 - 0.21406581833340438 - 0.326841157469233 - 0.20064055405544595 - 0.2089858781934912 - 0.22835715928471212 - 0.24742539971848806 - 0.36899923991825895 - 0.24701463701714044 - 0.2560178333573794 - 0.3552016140245526 - 0.23774804137045452 - 0.27017447263584743 - 0.37586623336347835 - 0.2564531409603795 - 0.2262824317679402 - 0.21248869632976208 - 0.22661416857784017 - 0.35027209205919524 - 0.23589310962174836 - 0.22150586158775468 - 0.356358075769195 - 0.3011200622167429 - 0.22467375312763427 - 0.2394109956052364 - 0.2899555542978596 - 0.21406581833340438 - 0.326841157469233 - 0.20064055405544595 - 0.2089858781934912 - 0.22835715928471212 - 0.24742539971848806 - 0.36899923991825895 - 0.24701463701714044 - 0.2560178333573794 - 0.3552016140245526 - 0.23774804137045452 - 0.27017447263584743 - 0.37586623336347835 - 0.2564531409603795 - 0.2262824317679402 - 0.21248869632976208 - 0.22661416857784017 - 0.35027209205919524 - 0.23589310962174836 - 0.22150586158775468 - 0.356358075769195 - 0.3011200622167429 - 0.22467375312763427 - 0.2394109956052364 - 0.2899555542978596 - 0.21406581833340438 - 0.326841157469233 - 0.20064055405544595 - 0.2089858781934912 - 0.22835715928471212 - 0.24742539971848806 - 0.36899923991825895 - 0.24701463701714044 - 0.2560178333573794 - 0.3552016140245526 - 0.23774804137045452 - 0.27017447263584743 - 0.37586623336347835 - 0.2564531409603795 - 0.2262824317679402 - 0.21248869632976208 - 0.22661416857784017 - 0.35027209205919524 - 0.23589310962174836 - 0.22150586158775468 - 0.356358075769195 - 0.3011200622167429 - 0.22467375312763427 - 0.2394109956052364 - 0.2899555542978596 - 0.21406581833340438 - 0.326841157469233 - 0.20064055405544595 - 0.2089858781934912 - 0.22835715928471212 - 0.24742539971848806 - 0.36899923991825895 - 0.24701463701714044 - 0.2560178333573794 - 0.3552016140245526 - 0.23774804137045452 - 0.27017447263584743 - 0.37586623336347835 - 0.2564531409603795 - 0.2262824317679402 - 0.21248869632976208 - 0.22661416857784017 - 0.35027209205919524 - 0.23589310962174836 - 0.22150586158775468 - 0.356358075769195 - 0.3011200622167429 - 0.22467375312763427 - 0.2394109956052364 - 0.2899555542978596 - 0.21406581833340438 - 0.326841157469233 - 0.20064055405544595 - 0.2089858781934912 - 0.22835715928471212 - 0.24742539971848806 - 0.36899923991825895 - 0.24701463701714044 - 0.2560178333573794 - 0.3552016140245526 - 0.23774804137045452 - 0.27017447263584743 - 0.37586623336347835 - 0.2564531409603795 - 0.2262824317679402 - 0.21248869632976208 - 0.22661416857784017 - 0.35027209205919524 - 0.23589310962174836 - 0.22150586158775468 - 0.356358075769195 - 0.3011200622167429 - 0.22467375312763427 - 0.2394109956052364 - 0.2899555542978596 - 0.21406581833340438 - 0.326841157469233 - 0.20064055405544595 - 0.2089858781934912 - 0.22835715928471212 - 0.24742539971848806 - 0.36899923991825895 - 0.24701463701714044 - 0.2560178333573794 - 0.3552016140245526 - 0.23774804137045452 - 0.27017447263584743 - 0.37586623336347835 - 0.2564531409603795 - 0.2262824317679402 - 0.21248869632976208 - 0.22661416857784017 - 0.35027209205919524 - 0.23589310962174836 - 0.22150586158775468 - 0.356358075769195 - 0.3011200622167429 - 0.22467375312763427 - 0.2394109956052364 - 0.2899555542978596 - 0.21406581833340438 - 0.326841157469233 - 0.20064055405544595 - 0.2089858781934912 - 0.22835715928471212 - 0.24742539971848806 - 0.36899923991825895 - 0.24701463701714044 - 0.2560178333573794 - 0.3552016140245526 - 0.23774804137045452 - 0.27017447263584743 - 0.37586623336347835 - 0.2564531409603795 - 0.2262824317679402 - 0.21248869632976208 - 0.22661416857784017 - 0.35027209205919524 - 0.23589310962174836 - 0.22150586158775468 - 0.356358075769195 - 0.3011200622167429 - 0.22467375312763427 - 0.2394109956052364 - 0.2899555542978596 - 0.21406581833340438 - 0.326841157469233 - 0.20064055405544595 - 0.2089858781934912 - 0.22835715928471212 - 0.24742539971848806 - 0.36899923991825895 - 0.24701463701714044 - 0.2560178333573794 - 0.3552016140245526 - 0.23774804137045452 - 0.27017447263584743 - 0.37586623336347835 - 0.2564531409603795 - 0.2262824317679402 - 0.21248869632976208 - 0.22661416857784017 - 0.35027209205919524 - 0.23589310962174836 - 0.22150586158775468 - 0.356358075769195 - 0.3011200622167429 - 0.22467375312763427 - 0.2394109956052364 - 0.2899555542978596 - 0.21406581833340438 - 0.326841157469233 - 0.20064055405544595 - 0.2089858781934912 - 0.22835715928471212 - 0.24742539971848806 - 0.36899923991825895 - 0.24701463701714044 - 0.2560178333573794 - 0.3552016140245526 - 0.23774804137045452 - 0.27017447263584743 - 0.37586623336347835 - 0.2564531409603795 - 0.2262824317679402 - 0.21248869632976208 - 0.22661416857784017 - 0.35027209205919524 - 0.23589310962174836 - 0.22150586158775468 - 0.356358075769195 - 0.3011200622167429 - 0.22467375312763427 - 0.2394109956052364 - 0.2899555542978596 - 0.21406581833340438 - 0.326841157469233 - 0.20064055405544595 - 0.2089858781934912 - 0.22835715928471212 - 0.24742539971848806 - 0.36899923991825895 - 0.24701463701714044 - 0.2560178333573794 - 0.3552016140245526 - 0.23774804137045452 - 0.27017447263584743 - 0.37586623336347835 - 0.2564531409603795 - 0.2262824317679402 - 0.21248869632976208 - 0.22661416857784017 - 0.35027209205919524 - 0.23589310962174836 - 0.22150586158775468 - 0.356358075769195 - 0.3011200622167429 - 0.22467375312763427 - 0.2394109956052364 - 0.2899555542978596 - 0.21406581833340438 - 0.326841157469233 - 0.20064055405544595 - 0.2089858781934912 - 0.22835715928471212 - 0.24742539971848806 - 0.36899923991825895 - 0.24701463701714044 - 0.2560178333573794 - 0.3552016140245526 - 0.23774804137045452 - 0.27017447263584743 - 0.37586623336347835 - 0.2564531409603795 - 0.2262824317679402 - 0.21248869632976208 - 0.22661416857784017 - 0.35027209205919524 - 0.23589310962174836 - 0.22150586158775468 - 0.356358075769195 - 0.3011200622167429 - 0.22467375312763427 - 0.2394109956052364 - 0.2899555542978596 - 0.21406581833340438 - 0.326841157469233 - 0.20064055405544595 - 0.2089858781934912 - 0.22835715928471212 - 0.24742539971848806 - 0.36899923991825895 - 0.24701463701714044 - 0.2560178333573794 - 0.3552016140245526 - 0.23774804137045452 - 0.27017447263584743 - 0.37586623336347835 - 0.2564531409603795 - 0.2262824317679402 - 0.21248869632976208 - 0.22661416857784017 - 0.35027209205919524 - 0.23589310962174836 - 0.22150586158775468 - 0.356358075769195 - 0.3011200622167429 - 0.22467375312763427 - 0.2394109956052364 - 0.2899555542978596 - 0.21406581833340438 - 0.326841157469233 - 0.20064055405544595 - 0.2089858781934912 - 0.22835715928471212 - 0.24742539971848806 - 0.36899923991825895 - 0.24701463701714044 - 0.2560178333573794 - 0.3552016140245526 - 0.23774804137045452 - 0.27017447263584743 - 0.37586623336347835 - 0.2564531409603795 - 0.2262824317679402 - 0.21248869632976208 - 0.22661416857784017 - 0.35027209205919524 - 0.23589310962174836 - 0.22150586158775468 - 0.356358075769195 - 0.3011200622167429 - 0.22467375312763427 - 0.2394109956052364 - 0.2899555542978596 - 0.21406581833340438 - 0.326841157469233 - 0.20064055405544595 - 0.2089858781934912 - 0.22835715928471212 - 0.24742539971848806 - 0.36899923991825895 - 0.24701463701714044 - 0.2560178333573794 - 0.3552016140245526 - 0.23774804137045452 - 0.27017447263584743 - 0.37586623336347835 - 0.2564531409603795 - 0.2262824317679402 - 0.21248869632976208 - 0.22661416857784017 - 0.35027209205919524 - 0.23589310962174836 - 0.22150586158775468 - 0.356358075769195 - 0.3011200622167429 - 0.22467375312763427 - 0.2394109956052364 - 0.2899555542978596 - 0.21406581833340438 - 0.326841157469233 - 0.20064055405544595 - 0.2089858781934912 - 0.22835715928471212 - 0.24742539971848806 - 0.36899923991825895 - 0.24701463701714044 - 0.2560178333573794 - 0.3552016140245526 - 0.23774804137045452 - 0.27017447263584743 - 0.37586623336347835 - 0.2564531409603795 - 0.2262824317679402 - 0.21248869632976208 - 0.22661416857784017 - 0.35027209205919524 - 0.23589310962174836 - 0.22150586158775468 - 0.356358075769195 - 0.3011200622167429 - 0.22467375312763427 - 0.2394109956052364 - 0.2899555542978596 - 0.21406581833340438 - 0.326841157469233 - 0.20064055405544595 - 0.2089858781934912 - 0.22835715928471212 - 0.24742539971848806 - 0.36899923991825895 - 0.24701463701714044 - 0.2560178333573794 - 0.3552016140245526 - 0.23774804137045452 - 0.27017447263584743 - 0.37586623336347835 - 0.2564531409603795 - 0.2262824317679402 - 0.21248869632976208 - 0.22661416857784017 - 0.35027209205919524 - 0.23589310962174836 - 0.22150586158775468 - 0.356358075769195 - 0.3011200622167429 - 0.22467375312763427 - 0.2394109956052364 - 0.2899555542978596 - 0.21406581833340438 - 0.326841157469233 - 0.20064055405544595 - 0.2089858781934912 - 0.22835715928471212 - 0.24742539971848806 - 0.36899923991825895 - 0.24701463701714044 - 0.2560178333573794 - 0.3552016140245526 - 0.23774804137045452 - 0.27017447263584743 - 0.37586623336347835 - 0.2564531409603795 - 0.2262824317679402 - 0.21248869632976208 - 0.22661416857784017 - 0.35027209205919524 - 0.23589310962174836 - 0.22150586158775468 - 0.356358075769195 - 0.3011200622167429 - 0.22467375312763427 - 0.2394109956052364 - 0.2899555542978596 - 0.21406581833340438 - 0.326841157469233 - 0.20064055405544595 - 0.2089858781934912 - 0.22835715928471212 - 0.24742539971848806 - 0.36899923991825895 - 0.24701463701714044 - 0.2560178333573794 - 0.3552016140245526 - 0.23774804137045452 - 0.27017447263584743 - 0.37586623336347835 - 0.2564531409603795 - 0.2262824317679402 - 0.21248869632976208 - 0.22661416857784017 - 0.35027209205919524 - 0.23589310962174836 - 0.22150586158775468 - 0.356358075769195 - 0.3011200622167429 - 0.22467375312763427 - 0.2394109956052364 - 0.2899555542978596 - 0.21406581833340438 - 0.326841157469233 - 0.20064055405544595 - 0.2089858781934912 - 0.22835715928471212 - 0.24742539971848806 - 0.36899923991825895 - 0.24701463701714044 - 0.2560178333573794 - 0.3552016140245526 - 0.23774804137045452 - 0.27017447263584743 - 0.37586623336347835 - 0.2564531409603795 - 0.2262824317679402 - 0.21248869632976208 - 0.22661416857784017 - 0.35027209205919524 - 0.23589310962174836 - 0.22150586158775468 - 0.356358075769195 - 0.3011200622167429 - 0.22467375312763427 - 0.2394109956052364 - 0.2899555542978596 - 0.21406581833340438 - 0.326841157469233 - 0.20064055405544595 - 0.2089858781934912 - 0.22835715928471212 - 0.24742539971848806 - 0.36899923991825895 - 0.24701463701714044 - 0.2560178333573794 - 0.3552016140245526 - 0.23774804137045452 - 0.27017447263584743 - 0.37586623336347835 - 0.2564531409603795 - 0.2262824317679402 - 0.21248869632976208 - 0.22661416857784017 - 0.35027209205919524 - 0.23589310962174836 - 0.22150586158775468 - 0.356358075769195 - 0.3011200622167429 - 0.22467375312763427 - 0.2394109956052364 - 0.2899555542978596 - 0.21406581833340438 - 0.326841157469233 - 0.20064055405544595 - 0.2089858781934912 - 0.22835715928471212 - 0.24742539971848806 - 0.36899923991825895 - 0.24701463701714044 - 0.2560178333573794 - 0.3552016140245526 - 0.23774804137045452 - 0.27017447263584743 - 0.37586623336347835 - 0.2564531409603795 - 0.2262824317679402 - 0.21248869632976208 - 0.22661416857784017 - 0.35027209205919524 - 0.23589310962174836 - 0.22150586158775468 - 0.356358075769195 - 0.3011200622167429 - 0.22467375312763427 - 0.2394109956052364 - 0.2899555542978596 - 0.21406581833340438 - 0.326841157469233 - 0.20064055405544595 - 0.2089858781934912 - 0.22835715928471212 - 0.24742539971848806 - 0.36899923991825895 - 0.24701463701714044 - 0.2560178333573794 - 0.3552016140245526 - 0.23774804137045452 - 0.27017447263584743 - 0.37586623336347835 - 0.2564531409603795 - 0.2262824317679402 - 0.21248869632976208 - 0.22661416857784017 - 0.35027209205919524 - 0.23589310962174836 - 0.22150586158775468 - 0.356358075769195 - 0.3011200622167429 - 0.22467375312763427 - 0.2394109956052364 - 0.2899555542978596 - 0.21406581833340438 - 0.326841157469233 - 0.20064055405544595 - 0.2089858781934912 - 0.22835715928471212 - 0.24742539971848806 - 0.36899923991825895 - 0.24701463701714044 - 0.2560178333573794 - 0.3552016140245526 - 0.23774804137045452 - 0.27017447263584743 - 0.37586623336347835 - 0.2564531409603795 - 0.2262824317679402 - 0.21248869632976208 - 0.22661416857784017 - 0.35027209205919524 - 0.23589310962174836 - 0.22150586158775468 - 0.356358075769195 - 0.3011200622167429 - 0.22467375312763427 - 0.2394109956052364 - 0.2899555542978596 - 0.21406581833340438 - 0.326841157469233 - 0.20064055405544595 - 0.2089858781934912 - 0.22835715928471212 - 0.24742539971848806 - 0.36899923991825895 - 0.24701463701714044 - 0.2560178333573794 - 0.3552016140245526 - 0.23774804137045452 - 0.27017447263584743 - 0.37586623336347835 - 0.2564531409603795 - 0.2262824317679402 - 0.21248869632976208 - 0.22661416857784017 - 0.35027209205919524 - 0.23589310962174836 - 0.22150586158775468 - 0.356358075769195 - 0.3011200622167429 - 0.22467375312763427 - 0.2394109956052364 - 0.2899555542978596 - 0.21406581833340438 - 0.326841157469233 - 0.20064055405544595 - 0.2089858781934912 - 0.22835715928471212 - 0.24742539971848806 - 0.36899923991825895 - 0.24701463701714044 - 0.2560178333573794 - 0.3552016140245526 - 0.23774804137045452 - 0.27017447263584743 - 0.37586623336347835 - 0.2564531409603795 - 0.2262824317679402 - 0.21248869632976208 - 0.22661416857784017 - 0.35027209205919524 - 0.23589310962174836 - 0.22150586158775468 - 0.356358075769195 - 0.3011200622167429 - 0.22467375312763427 - 0.2394109956052364 - 0.2899555542978596 - 0.21406581833340438 - 0.326841157469233 - 0.20064055405544595 - 0.2089858781934912 - 0.22835715928471212 - 0.24742539971848806 - 0.36899923991825895 - 0.24701463701714044 - 0.2560178333573794 - 0.3552016140245526 - 0.23774804137045452 - 0.27017447263584743 - 0.37586623336347835 - 0.2564531409603795 - 0.2262824317679402 - 0.21248869632976208 - 0.22661416857784017 - 0.35027209205919524 - 0.23589310962174836 - 0.22150586158775468 - 0.356358075769195 - 0.3011200622167429 - 0.22467375312763427 - 0.2394109956052364 - 0.2899555542978596 - 0.21406581833340438 - 0.326841157469233 - 0.20064055405544595 - 0.2089858781934912 - 0.22835715928471212 - 0.24742539971848806 - 0.36899923991825895 - 0.24701463701714044 - 0.2560178333573794 - 0.3552016140245526 - 0.23774804137045452 - 0.27017447263584743 - 0.37586623336347835 - 0.2564531409603795 - 0.2262824317679402 - 0.21248869632976208 - 0.22661416857784017 - 0.35027209205919524 - 0.23589310962174836 - 0.22150586158775468 - 0.356358075769195 - 0.3011200622167429 - 0.22467375312763427 - 0.2394109956052364 - 0.2899555542978596 - 0.21406581833340438 - 0.326841157469233 - 0.20064055405544595 - 0.2089858781934912 - 0.22835715928471212 - 0.24742539971848806 - 0.36899923991825895 - 0.24701463701714044 - 0.2560178333573794 - 0.3552016140245526 - 0.23774804137045452 - 0.27017447263584743 - 0.37586623336347835 - 0.2564531409603795 - 0.2262824317679402 - 0.21248869632976208 - 0.22661416857784017 - 0.35027209205919524 - 0.23589310962174836 - 0.22150586158775468 - 0.356358075769195 - 0.3011200622167429 - 0.22467375312763427 - 0.2394109956052364 - 0.2899555542978596 - 0.21406581833340438 - 0.326841157469233 - 0.20064055405544595 - 0.2089858781934912 - 0.22835715928471212 - 0.24742539971848806 - 0.36899923991825895 - 0.24701463701714044 - 0.2560178333573794 - 0.3552016140245526 - 0.23774804137045452 - 0.27017447263584743 - 0.37586623336347835 - 0.2564531409603795 - 0.2262824317679402 - 0.21248869632976208 - 0.22661416857784017 - 0.35027209205919524 - 0.23589310962174836 - 0.22150586158775468 - 0.356358075769195 - 0.3011200622167429 - 0.22467375312763427 - 0.2394109956052364 - 0.2899555542978596 - 0.21406581833340438 - 0.326841157469233 - 0.20064055405544595 - 0.2089858781934912 - 0.22835715928471212 - 0.24742539971848806 - 0.36899923991825895 - 0.24701463701714044 - 0.2560178333573794 - 0.3552016140245526 - 0.23774804137045452 - 0.27017447263584743 - 0.37586623336347835 - 0.2564531409603795 - 0.2262824317679402 - 0.21248869632976208 - 0.22661416857784017 - 0.35027209205919524 - 0.23589310962174836 - 0.22150586158775468 - 0.356358075769195 - 0.3011200622167429 - 0.22467375312763427 - 0.2394109956052364 - 0.2899555542978596 - 0.21406581833340438 - 0.326841157469233 - 0.20064055405544595 - 0.2089858781934912 - 0.22835715928471212 - 0.24742539971848806 - 0.36899923991825895 - 0.24701463701714044 - 0.2560178333573794 - 0.3552016140245526 - 0.23774804137045452 - 0.27017447263584743 - 0.37586623336347835 - 0.2564531409603795 - 0.2262824317679402 - 0.21248869632976208 - 0.22661416857784017 - 0.35027209205919524 - 0.23589310962174836 - 0.22150586158775468 - task: type: Clustering dataset: name: MTEB RedditClusteringP2P type: mteb/reddit-clustering-p2p config: default split: test revision: 385e3cb46b4cfa89021f56c4380204149d0efe33 metrics: - type: v_measure value: 44.135854520709856 - type: v_measures value: - 0.4992205891430278 - 0.5024470494091208 - 0.525745119896455 - 0.30230336838014243 - 0.4915802304493441 - 0.4481785980399149 - 0.18082183331189022 - 0.5004539942242847 - 0.4503725957205808 - 0.5124620734962252 - 0.4992205891430278 - 0.5024470494091208 - 0.525745119896455 - 0.30230336838014243 - 0.4915802304493441 - 0.4481785980399149 - 0.18082183331189022 - 0.5004539942242847 - 0.4503725957205808 - 0.5124620734962252 - 0.4992205891430278 - 0.5024470494091208 - 0.525745119896455 - 0.30230336838014243 - 0.4915802304493441 - 0.4481785980399149 - 0.18082183331189022 - 0.5004539942242847 - 0.4503725957205808 - 0.5124620734962252 - 0.4992205891430278 - 0.5024470494091208 - 0.525745119896455 - 0.30230336838014243 - 0.4915802304493441 - 0.4481785980399149 - 0.18082183331189022 - 0.5004539942242847 - 0.4503725957205808 - 0.5124620734962252 - 0.4992205891430278 - 0.5024470494091208 - 0.525745119896455 - 0.30230336838014243 - 0.4915802304493441 - 0.4481785980399149 - 0.18082183331189022 - 0.5004539942242847 - 0.4503725957205808 - 0.5124620734962252 - 0.4992205891430278 - 0.5024470494091208 - 0.525745119896455 - 0.30230336838014243 - 0.4915802304493441 - 0.4481785980399149 - 0.18082183331189022 - 0.5004539942242847 - 0.4503725957205808 - 0.5124620734962252 - 0.4992205891430278 - 0.5024470494091208 - 0.525745119896455 - 0.30230336838014243 - 0.4915802304493441 - 0.4481785980399149 - 0.18082183331189022 - 0.5004539942242847 - 0.4503725957205808 - 0.5124620734962252 - 0.4992205891430278 - 0.5024470494091208 - 0.525745119896455 - 0.30230336838014243 - 0.4915802304493441 - 0.4481785980399149 - 0.18082183331189022 - 0.5004539942242847 - 0.4503725957205808 - 0.5124620734962252 - 0.4992205891430278 - 0.5024470494091208 - 0.525745119896455 - 0.30230336838014243 - 0.4915802304493441 - 0.4481785980399149 - 0.18082183331189022 - 0.5004539942242847 - 0.4503725957205808 - 0.5124620734962252 - 0.4992205891430278 - 0.5024470494091208 - 0.525745119896455 - 0.30230336838014243 - 0.4915802304493441 - 0.4481785980399149 - 0.18082183331189022 - 0.5004539942242847 - 0.4503725957205808 - 0.5124620734962252 - 0.4992205891430278 - 0.5024470494091208 - 0.525745119896455 - 0.30230336838014243 - 0.4915802304493441 - 0.4481785980399149 - 0.18082183331189022 - 0.5004539942242847 - 0.4503725957205808 - 0.5124620734962252 - 0.4992205891430278 - 0.5024470494091208 - 0.525745119896455 - 0.30230336838014243 - 0.4915802304493441 - 0.4481785980399149 - 0.18082183331189022 - 0.5004539942242847 - 0.4503725957205808 - 0.5124620734962252 - 0.4992205891430278 - 0.5024470494091208 - 0.525745119896455 - 0.30230336838014243 - 0.4915802304493441 - 0.4481785980399149 - 0.18082183331189022 - 0.5004539942242847 - 0.4503725957205808 - 0.5124620734962252 - 0.4992205891430278 - 0.5024470494091208 - 0.525745119896455 - 0.30230336838014243 - 0.4915802304493441 - 0.4481785980399149 - 0.18082183331189022 - 0.5004539942242847 - 0.4503725957205808 - 0.5124620734962252 - 0.4992205891430278 - 0.5024470494091208 - 0.525745119896455 - 0.30230336838014243 - 0.4915802304493441 - 0.4481785980399149 - 0.18082183331189022 - 0.5004539942242847 - 0.4503725957205808 - 0.5124620734962252 - 0.4992205891430278 - 0.5024470494091208 - 0.525745119896455 - 0.30230336838014243 - 0.4915802304493441 - 0.4481785980399149 - 0.18082183331189022 - 0.5004539942242847 - 0.4503725957205808 - 0.5124620734962252 - 0.4992205891430278 - 0.5024470494091208 - 0.525745119896455 - 0.30230336838014243 - 0.4915802304493441 - 0.4481785980399149 - 0.18082183331189022 - 0.5004539942242847 - 0.4503725957205808 - 0.5124620734962252 - 0.4992205891430278 - 0.5024470494091208 - 0.525745119896455 - 0.30230336838014243 - 0.4915802304493441 - 0.4481785980399149 - 0.18082183331189022 - 0.5004539942242847 - 0.4503725957205808 - 0.5124620734962252 - 0.4992205891430278 - 0.5024470494091208 - 0.525745119896455 - 0.30230336838014243 - 0.4915802304493441 - 0.4481785980399149 - 0.18082183331189022 - 0.5004539942242847 - 0.4503725957205808 - 0.5124620734962252 - 0.4992205891430278 - 0.5024470494091208 - 0.525745119896455 - 0.30230336838014243 - 0.4915802304493441 - 0.4481785980399149 - 0.18082183331189022 - 0.5004539942242847 - 0.4503725957205808 - 0.5124620734962252 - 0.4992205891430278 - 0.5024470494091208 - 0.525745119896455 - 0.30230336838014243 - 0.4915802304493441 - 0.4481785980399149 - 0.18082183331189022 - 0.5004539942242847 - 0.4503725957205808 - 0.5124620734962252 - 0.4992205891430278 - 0.5024470494091208 - 0.525745119896455 - 0.30230336838014243 - 0.4915802304493441 - 0.4481785980399149 - 0.18082183331189022 - 0.5004539942242847 - 0.4503725957205808 - 0.5124620734962252 - 0.4992205891430278 - 0.5024470494091208 - 0.525745119896455 - 0.30230336838014243 - 0.4915802304493441 - 0.4481785980399149 - 0.18082183331189022 - 0.5004539942242847 - 0.4503725957205808 - 0.5124620734962252 - 0.4992205891430278 - 0.5024470494091208 - 0.525745119896455 - 0.30230336838014243 - 0.4915802304493441 - 0.4481785980399149 - 0.18082183331189022 - 0.5004539942242847 - 0.4503725957205808 - 0.5124620734962252 - 0.4992205891430278 - 0.5024470494091208 - 0.525745119896455 - 0.30230336838014243 - 0.4915802304493441 - 0.4481785980399149 - 0.18082183331189022 - 0.5004539942242847 - 0.4503725957205808 - 0.5124620734962252 - 0.4992205891430278 - 0.5024470494091208 - 0.525745119896455 - 0.30230336838014243 - 0.4915802304493441 - 0.4481785980399149 - 0.18082183331189022 - 0.5004539942242847 - 0.4503725957205808 - 0.5124620734962252 - 0.4992205891430278 - 0.5024470494091208 - 0.525745119896455 - 0.30230336838014243 - 0.4915802304493441 - 0.4481785980399149 - 0.18082183331189022 - 0.5004539942242847 - 0.4503725957205808 - 0.5124620734962252 - 0.4992205891430278 - 0.5024470494091208 - 0.525745119896455 - 0.30230336838014243 - 0.4915802304493441 - 0.4481785980399149 - 0.18082183331189022 - 0.5004539942242847 - 0.4503725957205808 - 0.5124620734962252 - 0.4992205891430278 - 0.5024470494091208 - 0.525745119896455 - 0.30230336838014243 - 0.4915802304493441 - 0.4481785980399149 - 0.18082183331189022 - 0.5004539942242847 - 0.4503725957205808 - 0.5124620734962252 - 0.4992205891430278 - 0.5024470494091208 - 0.525745119896455 - 0.30230336838014243 - 0.4915802304493441 - 0.4481785980399149 - 0.18082183331189022 - 0.5004539942242847 - 0.4503725957205808 - 0.5124620734962252 - 0.4992205891430278 - 0.5024470494091208 - 0.525745119896455 - 0.30230336838014243 - 0.4915802304493441 - 0.4481785980399149 - 0.18082183331189022 - 0.5004539942242847 - 0.4503725957205808 - 0.5124620734962252 - 0.4992205891430278 - 0.5024470494091208 - 0.525745119896455 - 0.30230336838014243 - 0.4915802304493441 - 0.4481785980399149 - 0.18082183331189022 - 0.5004539942242847 - 0.4503725957205808 - 0.5124620734962252 - 0.4992205891430278 - 0.5024470494091208 - 0.525745119896455 - 0.30230336838014243 - 0.4915802304493441 - 0.4481785980399149 - 0.18082183331189022 - 0.5004539942242847 - 0.4503725957205808 - 0.5124620734962252 - 0.4992205891430278 - 0.5024470494091208 - 0.525745119896455 - 0.30230336838014243 - 0.4915802304493441 - 0.4481785980399149 - 0.18082183331189022 - 0.5004539942242847 - 0.4503725957205808 - 0.5124620734962252 - 0.4992205891430278 - 0.5024470494091208 - 0.525745119896455 - 0.30230336838014243 - 0.4915802304493441 - 0.4481785980399149 - 0.18082183331189022 - 0.5004539942242847 - 0.4503725957205808 - 0.5124620734962252 - 0.4992205891430278 - 0.5024470494091208 - 0.525745119896455 - 0.30230336838014243 - 0.4915802304493441 - 0.4481785980399149 - 0.18082183331189022 - 0.5004539942242847 - 0.4503725957205808 - 0.5124620734962252 - 0.4992205891430278 - 0.5024470494091208 - 0.525745119896455 - 0.30230336838014243 - 0.4915802304493441 - 0.4481785980399149 - 0.18082183331189022 - 0.5004539942242847 - 0.4503725957205808 - 0.5124620734962252 - 0.4992205891430278 - 0.5024470494091208 - 0.525745119896455 - 0.30230336838014243 - 0.4915802304493441 - 0.4481785980399149 - 0.18082183331189022 - 0.5004539942242847 - 0.4503725957205808 - 0.5124620734962252 - 0.4992205891430278 - 0.5024470494091208 - 0.525745119896455 - 0.30230336838014243 - 0.4915802304493441 - 0.4481785980399149 - 0.18082183331189022 - 0.5004539942242847 - 0.4503725957205808 - 0.5124620734962252 - 0.4992205891430278 - 0.5024470494091208 - 0.525745119896455 - 0.30230336838014243 - 0.4915802304493441 - 0.4481785980399149 - 0.18082183331189022 - 0.5004539942242847 - 0.4503725957205808 - 0.5124620734962252 - 0.4992205891430278 - 0.5024470494091208 - 0.525745119896455 - 0.30230336838014243 - 0.4915802304493441 - 0.4481785980399149 - 0.18082183331189022 - 0.5004539942242847 - 0.4503725957205808 - 0.5124620734962252 - 0.4992205891430278 - 0.5024470494091208 - 0.525745119896455 - 0.30230336838014243 - 0.4915802304493441 - 0.4481785980399149 - 0.18082183331189022 - 0.5004539942242847 - 0.4503725957205808 - 0.5124620734962252 - 0.4992205891430278 - 0.5024470494091208 - 0.525745119896455 - 0.30230336838014243 - 0.4915802304493441 - 0.4481785980399149 - 0.18082183331189022 - 0.5004539942242847 - 0.4503725957205808 - 0.5124620734962252 - 0.4992205891430278 - 0.5024470494091208 - 0.525745119896455 - 0.30230336838014243 - 0.4915802304493441 - 0.4481785980399149 - 0.18082183331189022 - 0.5004539942242847 - 0.4503725957205808 - 0.5124620734962252 - 0.4992205891430278 - 0.5024470494091208 - 0.525745119896455 - 0.30230336838014243 - 0.4915802304493441 - 0.4481785980399149 - 0.18082183331189022 - 0.5004539942242847 - 0.4503725957205808 - 0.5124620734962252 - 0.4992205891430278 - 0.5024470494091208 - 0.525745119896455 - 0.30230336838014243 - 0.4915802304493441 - 0.4481785980399149 - 0.18082183331189022 - 0.5004539942242847 - 0.4503725957205808 - 0.5124620734962252 - 0.4992205891430278 - 0.5024470494091208 - 0.525745119896455 - 0.30230336838014243 - 0.4915802304493441 - 0.4481785980399149 - 0.18082183331189022 - 0.5004539942242847 - 0.4503725957205808 - 0.5124620734962252 - 0.4992205891430278 - 0.5024470494091208 - 0.525745119896455 - 0.30230336838014243 - 0.4915802304493441 - 0.4481785980399149 - 0.18082183331189022 - 0.5004539942242847 - 0.4503725957205808 - 0.5124620734962252 - 0.4992205891430278 - 0.5024470494091208 - 0.525745119896455 - 0.30230336838014243 - 0.4915802304493441 - 0.4481785980399149 - 0.18082183331189022 - 0.5004539942242847 - 0.4503725957205808 - 0.5124620734962252 - 0.4992205891430278 - 0.5024470494091208 - 0.525745119896455 - 0.30230336838014243 - 0.4915802304493441 - 0.4481785980399149 - 0.18082183331189022 - 0.5004539942242847 - 0.4503725957205808 - 0.5124620734962252 - 0.4992205891430278 - 0.5024470494091208 - 0.525745119896455 - 0.30230336838014243 - 0.4915802304493441 - 0.4481785980399149 - 0.18082183331189022 - 0.5004539942242847 - 0.4503725957205808 - 0.5124620734962252 - 0.4992205891430278 - 0.5024470494091208 - 0.525745119896455 - 0.30230336838014243 - 0.4915802304493441 - 0.4481785980399149 - 0.18082183331189022 - 0.5004539942242847 - 0.4503725957205808 - 0.5124620734962252 - 0.4992205891430278 - 0.5024470494091208 - 0.525745119896455 - 0.30230336838014243 - 0.4915802304493441 - 0.4481785980399149 - 0.18082183331189022 - 0.5004539942242847 - 0.4503725957205808 - 0.5124620734962252 - 0.4992205891430278 - 0.5024470494091208 - 0.525745119896455 - 0.30230336838014243 - 0.4915802304493441 - 0.4481785980399149 - 0.18082183331189022 - 0.5004539942242847 - 0.4503725957205808 - 0.5124620734962252 - 0.4992205891430278 - 0.5024470494091208 - 0.525745119896455 - 0.30230336838014243 - 0.4915802304493441 - 0.4481785980399149 - 0.18082183331189022 - 0.5004539942242847 - 0.4503725957205808 - 0.5124620734962252 - 0.4992205891430278 - 0.5024470494091208 - 0.525745119896455 - 0.30230336838014243 - 0.4915802304493441 - 0.4481785980399149 - 0.18082183331189022 - 0.5004539942242847 - 0.4503725957205808 - 0.5124620734962252 - 0.4992205891430278 - 0.5024470494091208 - 0.525745119896455 - 0.30230336838014243 - 0.4915802304493441 - 0.4481785980399149 - 0.18082183331189022 - 0.5004539942242847 - 0.4503725957205808 - 0.5124620734962252 - 0.4992205891430278 - 0.5024470494091208 - 0.525745119896455 - 0.30230336838014243 - 0.4915802304493441 - 0.4481785980399149 - 0.18082183331189022 - 0.5004539942242847 - 0.4503725957205808 - 0.5124620734962252 - 0.4992205891430278 - 0.5024470494091208 - 0.525745119896455 - 0.30230336838014243 - 0.4915802304493441 - 0.4481785980399149 - 0.18082183331189022 - 0.5004539942242847 - 0.4503725957205808 - 0.5124620734962252 - 0.4992205891430278 - 0.5024470494091208 - 0.525745119896455 - 0.30230336838014243 - 0.4915802304493441 - 0.4481785980399149 - 0.18082183331189022 - 0.5004539942242847 - 0.4503725957205808 - 0.5124620734962252 - 0.4992205891430278 - 0.5024470494091208 - 0.525745119896455 - 0.30230336838014243 - 0.4915802304493441 - 0.4481785980399149 - 0.18082183331189022 - 0.5004539942242847 - 0.4503725957205808 - 0.5124620734962252 - 0.4992205891430278 - 0.5024470494091208 - 0.525745119896455 - 0.30230336838014243 - 0.4915802304493441 - 0.4481785980399149 - 0.18082183331189022 - 0.5004539942242847 - 0.4503725957205808 - 0.5124620734962252 - 0.4992205891430278 - 0.5024470494091208 - 0.525745119896455 - 0.30230336838014243 - 0.4915802304493441 - 0.4481785980399149 - 0.18082183331189022 - 0.5004539942242847 - 0.4503725957205808 - 0.5124620734962252 - 0.4992205891430278 - 0.5024470494091208 - 0.525745119896455 - 0.30230336838014243 - 0.4915802304493441 - 0.4481785980399149 - 0.18082183331189022 - 0.5004539942242847 - 0.4503725957205808 - 0.5124620734962252 - 0.4992205891430278 - 0.5024470494091208 - 0.525745119896455 - 0.30230336838014243 - 0.4915802304493441 - 0.4481785980399149 - 0.18082183331189022 - 0.5004539942242847 - 0.4503725957205808 - 0.5124620734962252 - 0.4992205891430278 - 0.5024470494091208 - 0.525745119896455 - 0.30230336838014243 - 0.4915802304493441 - 0.4481785980399149 - 0.18082183331189022 - 0.5004539942242847 - 0.4503725957205808 - 0.5124620734962252 - 0.4992205891430278 - 0.5024470494091208 - 0.525745119896455 - 0.30230336838014243 - 0.4915802304493441 - 0.4481785980399149 - 0.18082183331189022 - 0.5004539942242847 - 0.4503725957205808 - 0.5124620734962252 - 0.4992205891430278 - 0.5024470494091208 - 0.525745119896455 - 0.30230336838014243 - 0.4915802304493441 - 0.4481785980399149 - 0.18082183331189022 - 0.5004539942242847 - 0.4503725957205808 - 0.5124620734962252 - 0.4992205891430278 - 0.5024470494091208 - 0.525745119896455 - 0.30230336838014243 - 0.4915802304493441 - 0.4481785980399149 - 0.18082183331189022 - 0.5004539942242847 - 0.4503725957205808 - 0.5124620734962252 - 0.4992205891430278 - 0.5024470494091208 - 0.525745119896455 - 0.30230336838014243 - 0.4915802304493441 - 0.4481785980399149 - 0.18082183331189022 - 0.5004539942242847 - 0.4503725957205808 - 0.5124620734962252 - 0.4992205891430278 - 0.5024470494091208 - 0.525745119896455 - 0.30230336838014243 - 0.4915802304493441 - 0.4481785980399149 - 0.18082183331189022 - 0.5004539942242847 - 0.4503725957205808 - 0.5124620734962252 - 0.4992205891430278 - 0.5024470494091208 - 0.525745119896455 - 0.30230336838014243 - 0.4915802304493441 - 0.4481785980399149 - 0.18082183331189022 - 0.5004539942242847 - 0.4503725957205808 - 0.5124620734962252 - 0.4992205891430278 - 0.5024470494091208 - 0.525745119896455 - 0.30230336838014243 - 0.4915802304493441 - 0.4481785980399149 - 0.18082183331189022 - 0.5004539942242847 - 0.4503725957205808 - 0.5124620734962252 - 0.4992205891430278 - 0.5024470494091208 - 0.525745119896455 - 0.30230336838014243 - 0.4915802304493441 - 0.4481785980399149 - 0.18082183331189022 - 0.5004539942242847 - 0.4503725957205808 - 0.5124620734962252 - 0.4992205891430278 - 0.5024470494091208 - 0.525745119896455 - 0.30230336838014243 - 0.4915802304493441 - 0.4481785980399149 - 0.18082183331189022 - 0.5004539942242847 - 0.4503725957205808 - 0.5124620734962252 - 0.4992205891430278 - 0.5024470494091208 - 0.525745119896455 - 0.30230336838014243 - 0.4915802304493441 - 0.4481785980399149 - 0.18082183331189022 - 0.5004539942242847 - 0.4503725957205808 - 0.5124620734962252 - 0.4992205891430278 - 0.5024470494091208 - 0.525745119896455 - 0.30230336838014243 - 0.4915802304493441 - 0.4481785980399149 - 0.18082183331189022 - 0.5004539942242847 - 0.4503725957205808 - 0.5124620734962252 - 0.4992205891430278 - 0.5024470494091208 - 0.525745119896455 - 0.30230336838014243 - 0.4915802304493441 - 0.4481785980399149 - 0.18082183331189022 - 0.5004539942242847 - 0.4503725957205808 - 0.5124620734962252 - 0.4992205891430278 - 0.5024470494091208 - 0.525745119896455 - 0.30230336838014243 - 0.4915802304493441 - 0.4481785980399149 - 0.18082183331189022 - 0.5004539942242847 - 0.4503725957205808 - 0.5124620734962252 - 0.4992205891430278 - 0.5024470494091208 - 0.525745119896455 - 0.30230336838014243 - 0.4915802304493441 - 0.4481785980399149 - 0.18082183331189022 - 0.5004539942242847 - 0.4503725957205808 - 0.5124620734962252 - 0.4992205891430278 - 0.5024470494091208 - 0.525745119896455 - 0.30230336838014243 - 0.4915802304493441 - 0.4481785980399149 - 0.18082183331189022 - 0.5004539942242847 - 0.4503725957205808 - 0.5124620734962252 - 0.4992205891430278 - 0.5024470494091208 - 0.525745119896455 - 0.30230336838014243 - 0.4915802304493441 - 0.4481785980399149 - 0.18082183331189022 - 0.5004539942242847 - 0.4503725957205808 - 0.5124620734962252 - 0.4992205891430278 - 0.5024470494091208 - 0.525745119896455 - 0.30230336838014243 - 0.4915802304493441 - 0.4481785980399149 - 0.18082183331189022 - 0.5004539942242847 - 0.4503725957205808 - 0.5124620734962252 - 0.4992205891430278 - 0.5024470494091208 - 0.525745119896455 - 0.30230336838014243 - 0.4915802304493441 - 0.4481785980399149 - 0.18082183331189022 - 0.5004539942242847 - 0.4503725957205808 - 0.5124620734962252 - 0.4992205891430278 - 0.5024470494091208 - 0.525745119896455 - 0.30230336838014243 - 0.4915802304493441 - 0.4481785980399149 - 0.18082183331189022 - 0.5004539942242847 - 0.4503725957205808 - 0.5124620734962252 - 0.4992205891430278 - 0.5024470494091208 - 0.525745119896455 - 0.30230336838014243 - 0.4915802304493441 - 0.4481785980399149 - 0.18082183331189022 - 0.5004539942242847 - 0.4503725957205808 - 0.5124620734962252 - 0.4992205891430278 - 0.5024470494091208 - 0.525745119896455 - 0.30230336838014243 - 0.4915802304493441 - 0.4481785980399149 - 0.18082183331189022 - 0.5004539942242847 - 0.4503725957205808 - 0.5124620734962252 - 0.4992205891430278 - 0.5024470494091208 - 0.525745119896455 - 0.30230336838014243 - 0.4915802304493441 - 0.4481785980399149 - 0.18082183331189022 - 0.5004539942242847 - 0.4503725957205808 - 0.5124620734962252 - 0.4992205891430278 - 0.5024470494091208 - 0.525745119896455 - 0.30230336838014243 - 0.4915802304493441 - 0.4481785980399149 - 0.18082183331189022 - 0.5004539942242847 - 0.4503725957205808 - 0.5124620734962252 - 0.4992205891430278 - 0.5024470494091208 - 0.525745119896455 - 0.30230336838014243 - 0.4915802304493441 - 0.4481785980399149 - 0.18082183331189022 - 0.5004539942242847 - 0.4503725957205808 - 0.5124620734962252 - 0.4992205891430278 - 0.5024470494091208 - 0.525745119896455 - 0.30230336838014243 - 0.4915802304493441 - 0.4481785980399149 - 0.18082183331189022 - 0.5004539942242847 - 0.4503725957205808 - 0.5124620734962252 - 0.4992205891430278 - 0.5024470494091208 - 0.525745119896455 - 0.30230336838014243 - 0.4915802304493441 - 0.4481785980399149 - 0.18082183331189022 - 0.5004539942242847 - 0.4503725957205808 - 0.5124620734962252 - 0.4992205891430278 - 0.5024470494091208 - 0.525745119896455 - 0.30230336838014243 - 0.4915802304493441 - 0.4481785980399149 - 0.18082183331189022 - 0.5004539942242847 - 0.4503725957205808 - 0.5124620734962252 - 0.4992205891430278 - 0.5024470494091208 - 0.525745119896455 - 0.30230336838014243 - 0.4915802304493441 - 0.4481785980399149 - 0.18082183331189022 - 0.5004539942242847 - 0.4503725957205808 - 0.5124620734962252 - 0.4992205891430278 - 0.5024470494091208 - 0.525745119896455 - 0.30230336838014243 - 0.4915802304493441 - 0.4481785980399149 - 0.18082183331189022 - 0.5004539942242847 - 0.4503725957205808 - 0.5124620734962252 - 0.4992205891430278 - 0.5024470494091208 - 0.525745119896455 - 0.30230336838014243 - 0.4915802304493441 - 0.4481785980399149 - 0.18082183331189022 - 0.5004539942242847 - 0.4503725957205808 - 0.5124620734962252 - 0.4992205891430278 - 0.5024470494091208 - 0.525745119896455 - 0.30230336838014243 - 0.4915802304493441 - 0.4481785980399149 - 0.18082183331189022 - 0.5004539942242847 - 0.4503725957205808 - 0.5124620734962252 - 0.4992205891430278 - 0.5024470494091208 - 0.525745119896455 - 0.30230336838014243 - 0.4915802304493441 - 0.4481785980399149 - 0.18082183331189022 - 0.5004539942242847 - 0.4503725957205808 - 0.5124620734962252 - 0.4992205891430278 - 0.5024470494091208 - 0.525745119896455 - 0.30230336838014243 - 0.4915802304493441 - 0.4481785980399149 - 0.18082183331189022 - 0.5004539942242847 - 0.4503725957205808 - 0.5124620734962252 - 0.4992205891430278 - 0.5024470494091208 - 0.525745119896455 - 0.30230336838014243 - 0.4915802304493441 - 0.4481785980399149 - 0.18082183331189022 - 0.5004539942242847 - 0.4503725957205808 - 0.5124620734962252 - task: type: Retrieval dataset: name: MTEB SCIDOCS type: mteb/scidocs config: default split: test revision: f8c2fcf00f625baaa80f62ec5bd9e1fff3b8ae88 metrics: - type: map_at_1 value: 2.1350000000000002 - type: map_at_10 value: 5.118 - type: map_at_100 value: 6.08 - type: map_at_1000 value: 6.308 - type: map_at_20 value: 5.562 - type: map_at_3 value: 3.804 - type: map_at_5 value: 4.468 - type: mrr_at_1 value: 10.5 - type: mrr_at_10 value: 17.278 - type: mrr_at_100 value: 18.418 - type: mrr_at_1000 value: 18.526 - type: mrr_at_20 value: 17.876 - type: mrr_at_3 value: 14.832999999999998 - type: mrr_at_5 value: 16.317999999999998 - type: ndcg_at_1 value: 10.5 - type: ndcg_at_10 value: 9.39 - type: ndcg_at_100 value: 14.362 - type: ndcg_at_1000 value: 19.524 - type: ndcg_at_20 value: 10.949 - type: ndcg_at_3 value: 8.794 - type: ndcg_at_5 value: 7.789 - type: precision_at_1 value: 10.5 - type: precision_at_10 value: 4.91 - type: precision_at_100 value: 1.221 - type: precision_at_1000 value: 0.247 - type: precision_at_20 value: 3.36 - type: precision_at_3 value: 8.233 - type: precision_at_5 value: 6.9 - type: recall_at_1 value: 2.1350000000000002 - type: recall_at_10 value: 9.955 - type: recall_at_100 value: 24.778 - type: recall_at_1000 value: 50.222 - type: recall_at_20 value: 13.63 - type: recall_at_3 value: 5.01 - type: recall_at_5 value: 6.995 - task: type: STS dataset: name: MTEB SICK-R type: mteb/sickr-sts config: default split: test revision: 20a6d6f312dd54037fe07a32d58e5e168867909d metrics: - type: cos_sim_pearson value: 78.43659263950201 - type: cos_sim_spearman value: 74.68461406509039 - type: euclidean_pearson value: 76.31168073146695 - type: euclidean_spearman value: 75.13681406263804 - type: manhattan_pearson value: 76.2960985430519 - type: manhattan_spearman value: 75.03513932091352 - task: type: STS dataset: name: MTEB STS12 type: mteb/sts12-sts config: default split: test revision: a0d554a64d88156834ff5ae9920b964011b16384 metrics: - type: cos_sim_pearson value: 55.096195345864295 - type: cos_sim_spearman value: 54.34570729554049 - type: euclidean_pearson value: 64.79488422312815 - type: euclidean_spearman value: 61.19116930098903 - type: manhattan_pearson value: 65.04388378143294 - type: manhattan_spearman value: 61.33457037020176 - task: type: STS dataset: name: MTEB STS13 type: mteb/sts13-sts config: default split: test revision: 7e90230a92c190f1bf69ae9002b8cea547a64cca metrics: - type: cos_sim_pearson value: 71.40902040706975 - type: cos_sim_spearman value: 74.24315395719762 - type: euclidean_pearson value: 75.94675003130055 - type: euclidean_spearman value: 76.18445285168187 - type: manhattan_pearson value: 75.88786726620313 - type: manhattan_spearman value: 76.1188105671321 - task: type: STS dataset: name: MTEB STS14 type: mteb/sts14-sts config: default split: test revision: 6031580fec1f6af667f0bd2da0a551cf4f0b2375 metrics: - type: cos_sim_pearson value: 71.9514442512574 - type: cos_sim_spearman value: 69.99484176761607 - type: euclidean_pearson value: 75.02706002860513 - type: euclidean_spearman value: 72.9036480559019 - type: manhattan_pearson value: 75.03815961673163 - type: manhattan_spearman value: 72.92353672671821 - task: type: STS dataset: name: MTEB STS15 type: mteb/sts15-sts config: default split: test revision: ae752c7c21bf194d8b67fd573edf7ae58183cbe3 metrics: - type: cos_sim_pearson value: 72.80522195974591 - type: cos_sim_spearman value: 75.73762657362906 - type: euclidean_pearson value: 80.1521753666007 - type: euclidean_spearman value: 80.25738481137047 - type: manhattan_pearson value: 80.19317991797196 - type: manhattan_spearman value: 80.31866668763018 - task: type: STS dataset: name: MTEB STS16 type: mteb/sts16-sts config: default split: test revision: 4d8694f8f0e0100860b497b999b3dbed754a0513 metrics: - type: cos_sim_pearson value: 69.45092072084951 - type: cos_sim_spearman value: 73.6472761328024 - type: euclidean_pearson value: 74.95031941602217 - type: euclidean_spearman value: 75.37029502504294 - type: manhattan_pearson value: 74.7846441654404 - type: manhattan_spearman value: 75.19664481480419 - task: type: STS dataset: name: MTEB STS17 (en-en) type: mteb/sts17-crosslingual-sts config: en-en split: test revision: af5e6fb845001ecf41f4c1e033ce921939a2a68d metrics: - type: cos_sim_pearson value: 82.66021611621103 - type: cos_sim_spearman value: 84.81452353756737 - type: euclidean_pearson value: 85.32338150846037 - type: euclidean_spearman value: 85.46672916577448 - type: manhattan_pearson value: 84.86427674633184 - type: manhattan_spearman value: 85.098246631915 - task: type: STS dataset: name: MTEB STS22 (en) type: mteb/sts22-crosslingual-sts config: en split: test revision: eea2b4fe26a775864c896887d910b76a8098ad3f metrics: - type: cos_sim_pearson value: 56.880105002604566 - type: cos_sim_spearman value: 62.56487199261157 - type: euclidean_pearson value: 57.49369653074593 - type: euclidean_spearman value: 61.038143206328854 - type: manhattan_pearson value: 57.85496348413732 - type: manhattan_spearman value: 61.22736674852764 - task: type: STS dataset: name: MTEB STSBenchmark type: mteb/stsbenchmark-sts config: default split: test revision: b0fddb56ed78048fa8b90373c8a3cfc37b684831 metrics: - type: cos_sim_pearson value: 75.41209102908195 - type: cos_sim_spearman value: 76.72196352753278 - type: euclidean_pearson value: 79.97933288080695 - type: euclidean_spearman value: 79.36350387100728 - type: manhattan_pearson value: 79.89865614781017 - type: manhattan_spearman value: 79.36099141428603 - task: type: Reranking dataset: name: MTEB SciDocsRR type: mteb/scidocs-reranking config: default split: test revision: d3c5e1fc0b855ab6097bf1cda04dd73947d7caab metrics: - type: map value: 70.81824436527221 - type: mrr value: 90.04096937920467 - task: type: Retrieval dataset: name: MTEB SciFact type: mteb/scifact config: default split: test revision: 0228b52cf27578f30900b9e5271d331663a030d7 metrics: - type: map_at_1 value: 33.567 - type: map_at_10 value: 41.409 - type: map_at_100 value: 42.281 - type: map_at_1000 value: 42.358000000000004 - type: map_at_20 value: 41.916 - type: map_at_3 value: 38.784 - type: map_at_5 value: 40.355999999999995 - type: mrr_at_1 value: 35.667 - type: mrr_at_10 value: 43.189 - type: mrr_at_100 value: 43.885000000000005 - type: mrr_at_1000 value: 43.95 - type: mrr_at_20 value: 43.584 - type: mrr_at_3 value: 41.0 - type: mrr_at_5 value: 42.266999999999996 - type: ndcg_at_1 value: 35.667 - type: ndcg_at_10 value: 45.999 - type: ndcg_at_100 value: 50.153000000000006 - type: ndcg_at_1000 value: 52.161 - type: ndcg_at_20 value: 47.662 - type: ndcg_at_3 value: 41.178 - type: ndcg_at_5 value: 43.59 - type: precision_at_1 value: 35.667 - type: precision_at_10 value: 6.6000000000000005 - type: precision_at_100 value: 0.89 - type: precision_at_1000 value: 0.106 - type: precision_at_20 value: 3.6830000000000003 - type: precision_at_3 value: 16.556 - type: precision_at_5 value: 11.466999999999999 - type: recall_at_1 value: 33.567 - type: recall_at_10 value: 58.599999999999994 - type: recall_at_100 value: 77.9 - type: recall_at_1000 value: 93.667 - type: recall_at_20 value: 64.878 - type: recall_at_3 value: 45.483000000000004 - type: recall_at_5 value: 51.4 - task: type: PairClassification dataset: name: MTEB SprintDuplicateQuestions type: mteb/sprintduplicatequestions-pairclassification config: default split: test revision: d66bd1f72af766a5cc4b0ca5e00c162f89e8cc46 metrics: - type: cos_sim_accuracy value: 99.6930693069307 - type: cos_sim_ap value: 89.25594498972691 - type: cos_sim_f1 value: 83.84499245093104 - type: cos_sim_precision value: 84.39716312056737 - type: cos_sim_recall value: 83.3 - type: dot_accuracy value: 99.48514851485149 - type: dot_ap value: 75.92127370670867 - type: dot_f1 value: 71.16104868913857 - type: dot_precision value: 76.52474108170311 - type: dot_recall value: 66.5 - type: euclidean_accuracy value: 99.6891089108911 - type: euclidean_ap value: 89.2180446358921 - type: euclidean_f1 value: 83.57142857142857 - type: euclidean_precision value: 85.3125 - type: euclidean_recall value: 81.89999999999999 - type: manhattan_accuracy value: 99.6980198019802 - type: manhattan_ap value: 89.43047814044381 - type: manhattan_f1 value: 84.07445708376422 - type: manhattan_precision value: 87.04496788008565 - type: manhattan_recall value: 81.3 - type: max_accuracy value: 99.6980198019802 - type: max_ap value: 89.43047814044381 - type: max_f1 value: 84.07445708376422 - task: type: Clustering dataset: name: MTEB StackExchangeClustering type: mteb/stackexchange-clustering config: default split: test revision: 6cbc1f7b2bc0622f2e39d2c77fa502909748c259 metrics: - type: v_measure value: 32.83904946173562 - type: v_measures value: - 0.30110380679104903 - 0.3953932981762184 - 0.24615493206657874 - 0.36457921033081425 - 0.37818468307341996 - 0.2458717382277342 - 0.24597349476879382 - 0.355495518705052 - 0.32617546899939204 - 0.3316784933295811 - 0.4879686282712542 - 0.4493952612804797 - 0.4289659003483834 - 0.25736076606300134 - 0.31347948561233624 - 0.32945691057021553 - 0.2802921851023466 - 0.30108517991402206 - 0.2906340312531131 - 0.3176973104574197 - 0.32121506900305036 - 0.27178906328240593 - 0.2736797450244378 - 0.3448789501821934 - 0.3512532346006118 - 0.30110380679104903 - 0.3953932981762184 - 0.24615493206657874 - 0.36457921033081425 - 0.37818468307341996 - 0.2458717382277342 - 0.24597349476879382 - 0.355495518705052 - 0.32617546899939204 - 0.3316784933295811 - 0.4879686282712542 - 0.4493952612804797 - 0.4289659003483834 - 0.25736076606300134 - 0.31347948561233624 - 0.32945691057021553 - 0.2802921851023466 - 0.30108517991402206 - 0.2906340312531131 - 0.3176973104574197 - 0.32121506900305036 - 0.27178906328240593 - 0.2736797450244378 - 0.3448789501821934 - 0.3512532346006118 - 0.30110380679104903 - 0.3953932981762184 - 0.24615493206657874 - 0.36457921033081425 - 0.37818468307341996 - 0.2458717382277342 - 0.24597349476879382 - 0.355495518705052 - 0.32617546899939204 - 0.3316784933295811 - 0.4879686282712542 - 0.4493952612804797 - 0.4289659003483834 - 0.25736076606300134 - 0.31347948561233624 - 0.32945691057021553 - 0.2802921851023466 - 0.30108517991402206 - 0.2906340312531131 - 0.3176973104574197 - 0.32121506900305036 - 0.27178906328240593 - 0.2736797450244378 - 0.3448789501821934 - 0.3512532346006118 - 0.30110380679104903 - 0.3953932981762184 - 0.24615493206657874 - 0.36457921033081425 - 0.37818468307341996 - 0.2458717382277342 - 0.24597349476879382 - 0.355495518705052 - 0.32617546899939204 - 0.3316784933295811 - 0.4879686282712542 - 0.4493952612804797 - 0.4289659003483834 - 0.25736076606300134 - 0.31347948561233624 - 0.32945691057021553 - 0.2802921851023466 - 0.30108517991402206 - 0.2906340312531131 - 0.3176973104574197 - 0.32121506900305036 - 0.27178906328240593 - 0.2736797450244378 - 0.3448789501821934 - 0.3512532346006118 - 0.30110380679104903 - 0.3953932981762184 - 0.24615493206657874 - 0.36457921033081425 - 0.37818468307341996 - 0.2458717382277342 - 0.24597349476879382 - 0.355495518705052 - 0.32617546899939204 - 0.3316784933295811 - 0.4879686282712542 - 0.4493952612804797 - 0.4289659003483834 - 0.25736076606300134 - 0.31347948561233624 - 0.32945691057021553 - 0.2802921851023466 - 0.30108517991402206 - 0.2906340312531131 - 0.3176973104574197 - 0.32121506900305036 - 0.27178906328240593 - 0.2736797450244378 - 0.3448789501821934 - 0.3512532346006118 - 0.30110380679104903 - 0.3953932981762184 - 0.24615493206657874 - 0.36457921033081425 - 0.37818468307341996 - 0.2458717382277342 - 0.24597349476879382 - 0.355495518705052 - 0.32617546899939204 - 0.3316784933295811 - 0.4879686282712542 - 0.4493952612804797 - 0.4289659003483834 - 0.25736076606300134 - 0.31347948561233624 - 0.32945691057021553 - 0.2802921851023466 - 0.30108517991402206 - 0.2906340312531131 - 0.3176973104574197 - 0.32121506900305036 - 0.27178906328240593 - 0.2736797450244378 - 0.3448789501821934 - 0.3512532346006118 - 0.30110380679104903 - 0.3953932981762184 - 0.24615493206657874 - 0.36457921033081425 - 0.37818468307341996 - 0.2458717382277342 - 0.24597349476879382 - 0.355495518705052 - 0.32617546899939204 - 0.3316784933295811 - 0.4879686282712542 - 0.4493952612804797 - 0.4289659003483834 - 0.25736076606300134 - 0.31347948561233624 - 0.32945691057021553 - 0.2802921851023466 - 0.30108517991402206 - 0.2906340312531131 - 0.3176973104574197 - 0.32121506900305036 - 0.27178906328240593 - 0.2736797450244378 - 0.3448789501821934 - 0.3512532346006118 - 0.30110380679104903 - 0.3953932981762184 - 0.24615493206657874 - 0.36457921033081425 - 0.37818468307341996 - 0.2458717382277342 - 0.24597349476879382 - 0.355495518705052 - 0.32617546899939204 - 0.3316784933295811 - 0.4879686282712542 - 0.4493952612804797 - 0.4289659003483834 - 0.25736076606300134 - 0.31347948561233624 - 0.32945691057021553 - 0.2802921851023466 - 0.30108517991402206 - 0.2906340312531131 - 0.3176973104574197 - 0.32121506900305036 - 0.27178906328240593 - 0.2736797450244378 - 0.3448789501821934 - 0.3512532346006118 - 0.30110380679104903 - 0.3953932981762184 - 0.24615493206657874 - 0.36457921033081425 - 0.37818468307341996 - 0.2458717382277342 - 0.24597349476879382 - 0.355495518705052 - 0.32617546899939204 - 0.3316784933295811 - 0.4879686282712542 - 0.4493952612804797 - 0.4289659003483834 - 0.25736076606300134 - 0.31347948561233624 - 0.32945691057021553 - 0.2802921851023466 - 0.30108517991402206 - 0.2906340312531131 - 0.3176973104574197 - 0.32121506900305036 - 0.27178906328240593 - 0.2736797450244378 - 0.3448789501821934 - 0.3512532346006118 - 0.30110380679104903 - 0.3953932981762184 - 0.24615493206657874 - 0.36457921033081425 - 0.37818468307341996 - 0.2458717382277342 - 0.24597349476879382 - 0.355495518705052 - 0.32617546899939204 - 0.3316784933295811 - 0.4879686282712542 - 0.4493952612804797 - 0.4289659003483834 - 0.25736076606300134 - 0.31347948561233624 - 0.32945691057021553 - 0.2802921851023466 - 0.30108517991402206 - 0.2906340312531131 - 0.3176973104574197 - 0.32121506900305036 - 0.27178906328240593 - 0.2736797450244378 - 0.3448789501821934 - 0.3512532346006118 - 0.30110380679104903 - 0.3953932981762184 - 0.24615493206657874 - 0.36457921033081425 - 0.37818468307341996 - 0.2458717382277342 - 0.24597349476879382 - 0.355495518705052 - 0.32617546899939204 - 0.3316784933295811 - 0.4879686282712542 - 0.4493952612804797 - 0.4289659003483834 - 0.25736076606300134 - 0.31347948561233624 - 0.32945691057021553 - 0.2802921851023466 - 0.30108517991402206 - 0.2906340312531131 - 0.3176973104574197 - 0.32121506900305036 - 0.27178906328240593 - 0.2736797450244378 - 0.3448789501821934 - 0.3512532346006118 - 0.30110380679104903 - 0.3953932981762184 - 0.24615493206657874 - 0.36457921033081425 - 0.37818468307341996 - 0.2458717382277342 - 0.24597349476879382 - 0.355495518705052 - 0.32617546899939204 - 0.3316784933295811 - 0.4879686282712542 - 0.4493952612804797 - 0.4289659003483834 - 0.25736076606300134 - 0.31347948561233624 - 0.32945691057021553 - 0.2802921851023466 - 0.30108517991402206 - 0.2906340312531131 - 0.3176973104574197 - 0.32121506900305036 - 0.27178906328240593 - 0.2736797450244378 - 0.3448789501821934 - 0.3512532346006118 - 0.30110380679104903 - 0.3953932981762184 - 0.24615493206657874 - 0.36457921033081425 - 0.37818468307341996 - 0.2458717382277342 - 0.24597349476879382 - 0.355495518705052 - 0.32617546899939204 - 0.3316784933295811 - 0.4879686282712542 - 0.4493952612804797 - 0.4289659003483834 - 0.25736076606300134 - 0.31347948561233624 - 0.32945691057021553 - 0.2802921851023466 - 0.30108517991402206 - 0.2906340312531131 - 0.3176973104574197 - 0.32121506900305036 - 0.27178906328240593 - 0.2736797450244378 - 0.3448789501821934 - 0.3512532346006118 - 0.30110380679104903 - 0.3953932981762184 - 0.24615493206657874 - 0.36457921033081425 - 0.37818468307341996 - 0.2458717382277342 - 0.24597349476879382 - 0.355495518705052 - 0.32617546899939204 - 0.3316784933295811 - 0.4879686282712542 - 0.4493952612804797 - 0.4289659003483834 - 0.25736076606300134 - 0.31347948561233624 - 0.32945691057021553 - 0.2802921851023466 - 0.30108517991402206 - 0.2906340312531131 - 0.3176973104574197 - 0.32121506900305036 - 0.27178906328240593 - 0.2736797450244378 - 0.3448789501821934 - 0.3512532346006118 - 0.30110380679104903 - 0.3953932981762184 - 0.24615493206657874 - 0.36457921033081425 - 0.37818468307341996 - 0.2458717382277342 - 0.24597349476879382 - 0.355495518705052 - 0.32617546899939204 - 0.3316784933295811 - 0.4879686282712542 - 0.4493952612804797 - 0.4289659003483834 - 0.25736076606300134 - 0.31347948561233624 - 0.32945691057021553 - 0.2802921851023466 - 0.30108517991402206 - 0.2906340312531131 - 0.3176973104574197 - 0.32121506900305036 - 0.27178906328240593 - 0.2736797450244378 - 0.3448789501821934 - 0.3512532346006118 - 0.30110380679104903 - 0.3953932981762184 - 0.24615493206657874 - 0.36457921033081425 - 0.37818468307341996 - 0.2458717382277342 - 0.24597349476879382 - 0.355495518705052 - 0.32617546899939204 - 0.3316784933295811 - 0.4879686282712542 - 0.4493952612804797 - 0.4289659003483834 - 0.25736076606300134 - 0.31347948561233624 - 0.32945691057021553 - 0.2802921851023466 - 0.30108517991402206 - 0.2906340312531131 - 0.3176973104574197 - 0.32121506900305036 - 0.27178906328240593 - 0.2736797450244378 - 0.3448789501821934 - 0.3512532346006118 - 0.30110380679104903 - 0.3953932981762184 - 0.24615493206657874 - 0.36457921033081425 - 0.37818468307341996 - 0.2458717382277342 - 0.24597349476879382 - 0.355495518705052 - 0.32617546899939204 - 0.3316784933295811 - 0.4879686282712542 - 0.4493952612804797 - 0.4289659003483834 - 0.25736076606300134 - 0.31347948561233624 - 0.32945691057021553 - 0.2802921851023466 - 0.30108517991402206 - 0.2906340312531131 - 0.3176973104574197 - 0.32121506900305036 - 0.27178906328240593 - 0.2736797450244378 - 0.3448789501821934 - 0.3512532346006118 - 0.30110380679104903 - 0.3953932981762184 - 0.24615493206657874 - 0.36457921033081425 - 0.37818468307341996 - 0.2458717382277342 - 0.24597349476879382 - 0.355495518705052 - 0.32617546899939204 - 0.3316784933295811 - 0.4879686282712542 - 0.4493952612804797 - 0.4289659003483834 - 0.25736076606300134 - 0.31347948561233624 - 0.32945691057021553 - 0.2802921851023466 - 0.30108517991402206 - 0.2906340312531131 - 0.3176973104574197 - 0.32121506900305036 - 0.27178906328240593 - 0.2736797450244378 - 0.3448789501821934 - 0.3512532346006118 - 0.30110380679104903 - 0.3953932981762184 - 0.24615493206657874 - 0.36457921033081425 - 0.37818468307341996 - 0.2458717382277342 - 0.24597349476879382 - 0.355495518705052 - 0.32617546899939204 - 0.3316784933295811 - 0.4879686282712542 - 0.4493952612804797 - 0.4289659003483834 - 0.25736076606300134 - 0.31347948561233624 - 0.32945691057021553 - 0.2802921851023466 - 0.30108517991402206 - 0.2906340312531131 - 0.3176973104574197 - 0.32121506900305036 - 0.27178906328240593 - 0.2736797450244378 - 0.3448789501821934 - 0.3512532346006118 - 0.30110380679104903 - 0.3953932981762184 - 0.24615493206657874 - 0.36457921033081425 - 0.37818468307341996 - 0.2458717382277342 - 0.24597349476879382 - 0.355495518705052 - 0.32617546899939204 - 0.3316784933295811 - 0.4879686282712542 - 0.4493952612804797 - 0.4289659003483834 - 0.25736076606300134 - 0.31347948561233624 - 0.32945691057021553 - 0.2802921851023466 - 0.30108517991402206 - 0.2906340312531131 - 0.3176973104574197 - 0.32121506900305036 - 0.27178906328240593 - 0.2736797450244378 - 0.3448789501821934 - 0.3512532346006118 - 0.30110380679104903 - 0.3953932981762184 - 0.24615493206657874 - 0.36457921033081425 - 0.37818468307341996 - 0.2458717382277342 - 0.24597349476879382 - 0.355495518705052 - 0.32617546899939204 - 0.3316784933295811 - 0.4879686282712542 - 0.4493952612804797 - 0.4289659003483834 - 0.25736076606300134 - 0.31347948561233624 - 0.32945691057021553 - 0.2802921851023466 - 0.30108517991402206 - 0.2906340312531131 - 0.3176973104574197 - 0.32121506900305036 - 0.27178906328240593 - 0.2736797450244378 - 0.3448789501821934 - 0.3512532346006118 - 0.30110380679104903 - 0.3953932981762184 - 0.24615493206657874 - 0.36457921033081425 - 0.37818468307341996 - 0.2458717382277342 - 0.24597349476879382 - 0.355495518705052 - 0.32617546899939204 - 0.3316784933295811 - 0.4879686282712542 - 0.4493952612804797 - 0.4289659003483834 - 0.25736076606300134 - 0.31347948561233624 - 0.32945691057021553 - 0.2802921851023466 - 0.30108517991402206 - 0.2906340312531131 - 0.3176973104574197 - 0.32121506900305036 - 0.27178906328240593 - 0.2736797450244378 - 0.3448789501821934 - 0.3512532346006118 - 0.30110380679104903 - 0.3953932981762184 - 0.24615493206657874 - 0.36457921033081425 - 0.37818468307341996 - 0.2458717382277342 - 0.24597349476879382 - 0.355495518705052 - 0.32617546899939204 - 0.3316784933295811 - 0.4879686282712542 - 0.4493952612804797 - 0.4289659003483834 - 0.25736076606300134 - 0.31347948561233624 - 0.32945691057021553 - 0.2802921851023466 - 0.30108517991402206 - 0.2906340312531131 - 0.3176973104574197 - 0.32121506900305036 - 0.27178906328240593 - 0.2736797450244378 - 0.3448789501821934 - 0.3512532346006118 - 0.30110380679104903 - 0.3953932981762184 - 0.24615493206657874 - 0.36457921033081425 - 0.37818468307341996 - 0.2458717382277342 - 0.24597349476879382 - 0.355495518705052 - 0.32617546899939204 - 0.3316784933295811 - 0.4879686282712542 - 0.4493952612804797 - 0.4289659003483834 - 0.25736076606300134 - 0.31347948561233624 - 0.32945691057021553 - 0.2802921851023466 - 0.30108517991402206 - 0.2906340312531131 - 0.3176973104574197 - 0.32121506900305036 - 0.27178906328240593 - 0.2736797450244378 - 0.3448789501821934 - 0.3512532346006118 - 0.30110380679104903 - 0.3953932981762184 - 0.24615493206657874 - 0.36457921033081425 - 0.37818468307341996 - 0.2458717382277342 - 0.24597349476879382 - 0.355495518705052 - 0.32617546899939204 - 0.3316784933295811 - 0.4879686282712542 - 0.4493952612804797 - 0.4289659003483834 - 0.25736076606300134 - 0.31347948561233624 - 0.32945691057021553 - 0.2802921851023466 - 0.30108517991402206 - 0.2906340312531131 - 0.3176973104574197 - 0.32121506900305036 - 0.27178906328240593 - 0.2736797450244378 - 0.3448789501821934 - 0.3512532346006118 - 0.30110380679104903 - 0.3953932981762184 - 0.24615493206657874 - 0.36457921033081425 - 0.37818468307341996 - 0.2458717382277342 - 0.24597349476879382 - 0.355495518705052 - 0.32617546899939204 - 0.3316784933295811 - 0.4879686282712542 - 0.4493952612804797 - 0.4289659003483834 - 0.25736076606300134 - 0.31347948561233624 - 0.32945691057021553 - 0.2802921851023466 - 0.30108517991402206 - 0.2906340312531131 - 0.3176973104574197 - 0.32121506900305036 - 0.27178906328240593 - 0.2736797450244378 - 0.3448789501821934 - 0.3512532346006118 - 0.30110380679104903 - 0.3953932981762184 - 0.24615493206657874 - 0.36457921033081425 - 0.37818468307341996 - 0.2458717382277342 - 0.24597349476879382 - 0.355495518705052 - 0.32617546899939204 - 0.3316784933295811 - 0.4879686282712542 - 0.4493952612804797 - 0.4289659003483834 - 0.25736076606300134 - 0.31347948561233624 - 0.32945691057021553 - 0.2802921851023466 - 0.30108517991402206 - 0.2906340312531131 - 0.3176973104574197 - 0.32121506900305036 - 0.27178906328240593 - 0.2736797450244378 - 0.3448789501821934 - 0.3512532346006118 - 0.30110380679104903 - 0.3953932981762184 - 0.24615493206657874 - 0.36457921033081425 - 0.37818468307341996 - 0.2458717382277342 - 0.24597349476879382 - 0.355495518705052 - 0.32617546899939204 - 0.3316784933295811 - 0.4879686282712542 - 0.4493952612804797 - 0.4289659003483834 - 0.25736076606300134 - 0.31347948561233624 - 0.32945691057021553 - 0.2802921851023466 - 0.30108517991402206 - 0.2906340312531131 - 0.3176973104574197 - 0.32121506900305036 - 0.27178906328240593 - 0.2736797450244378 - 0.3448789501821934 - 0.3512532346006118 - 0.30110380679104903 - 0.3953932981762184 - 0.24615493206657874 - 0.36457921033081425 - 0.37818468307341996 - 0.2458717382277342 - 0.24597349476879382 - 0.355495518705052 - 0.32617546899939204 - 0.3316784933295811 - 0.4879686282712542 - 0.4493952612804797 - 0.4289659003483834 - 0.25736076606300134 - 0.31347948561233624 - 0.32945691057021553 - 0.2802921851023466 - 0.30108517991402206 - 0.2906340312531131 - 0.3176973104574197 - 0.32121506900305036 - 0.27178906328240593 - 0.2736797450244378 - 0.3448789501821934 - 0.3512532346006118 - 0.30110380679104903 - 0.3953932981762184 - 0.24615493206657874 - 0.36457921033081425 - 0.37818468307341996 - 0.2458717382277342 - 0.24597349476879382 - 0.355495518705052 - 0.32617546899939204 - 0.3316784933295811 - 0.4879686282712542 - 0.4493952612804797 - 0.4289659003483834 - 0.25736076606300134 - 0.31347948561233624 - 0.32945691057021553 - 0.2802921851023466 - 0.30108517991402206 - 0.2906340312531131 - 0.3176973104574197 - 0.32121506900305036 - 0.27178906328240593 - 0.2736797450244378 - 0.3448789501821934 - 0.3512532346006118 - 0.30110380679104903 - 0.3953932981762184 - 0.24615493206657874 - 0.36457921033081425 - 0.37818468307341996 - 0.2458717382277342 - 0.24597349476879382 - 0.355495518705052 - 0.32617546899939204 - 0.3316784933295811 - 0.4879686282712542 - 0.4493952612804797 - 0.4289659003483834 - 0.25736076606300134 - 0.31347948561233624 - 0.32945691057021553 - 0.2802921851023466 - 0.30108517991402206 - 0.2906340312531131 - 0.3176973104574197 - 0.32121506900305036 - 0.27178906328240593 - 0.2736797450244378 - 0.3448789501821934 - 0.3512532346006118 - 0.30110380679104903 - 0.3953932981762184 - 0.24615493206657874 - 0.36457921033081425 - 0.37818468307341996 - 0.2458717382277342 - 0.24597349476879382 - 0.355495518705052 - 0.32617546899939204 - 0.3316784933295811 - 0.4879686282712542 - 0.4493952612804797 - 0.4289659003483834 - 0.25736076606300134 - 0.31347948561233624 - 0.32945691057021553 - 0.2802921851023466 - 0.30108517991402206 - 0.2906340312531131 - 0.3176973104574197 - 0.32121506900305036 - 0.27178906328240593 - 0.2736797450244378 - 0.3448789501821934 - 0.3512532346006118 - 0.30110380679104903 - 0.3953932981762184 - 0.24615493206657874 - 0.36457921033081425 - 0.37818468307341996 - 0.2458717382277342 - 0.24597349476879382 - 0.355495518705052 - 0.32617546899939204 - 0.3316784933295811 - 0.4879686282712542 - 0.4493952612804797 - 0.4289659003483834 - 0.25736076606300134 - 0.31347948561233624 - 0.32945691057021553 - 0.2802921851023466 - 0.30108517991402206 - 0.2906340312531131 - 0.3176973104574197 - 0.32121506900305036 - 0.27178906328240593 - 0.2736797450244378 - 0.3448789501821934 - 0.3512532346006118 - 0.30110380679104903 - 0.3953932981762184 - 0.24615493206657874 - 0.36457921033081425 - 0.37818468307341996 - 0.2458717382277342 - 0.24597349476879382 - 0.355495518705052 - 0.32617546899939204 - 0.3316784933295811 - 0.4879686282712542 - 0.4493952612804797 - 0.4289659003483834 - 0.25736076606300134 - 0.31347948561233624 - 0.32945691057021553 - 0.2802921851023466 - 0.30108517991402206 - 0.2906340312531131 - 0.3176973104574197 - 0.32121506900305036 - 0.27178906328240593 - 0.2736797450244378 - 0.3448789501821934 - 0.3512532346006118 - 0.30110380679104903 - 0.3953932981762184 - 0.24615493206657874 - 0.36457921033081425 - 0.37818468307341996 - 0.2458717382277342 - 0.24597349476879382 - 0.355495518705052 - 0.32617546899939204 - 0.3316784933295811 - 0.4879686282712542 - 0.4493952612804797 - 0.4289659003483834 - 0.25736076606300134 - 0.31347948561233624 - 0.32945691057021553 - 0.2802921851023466 - 0.30108517991402206 - 0.2906340312531131 - 0.3176973104574197 - 0.32121506900305036 - 0.27178906328240593 - 0.2736797450244378 - 0.3448789501821934 - 0.3512532346006118 - 0.30110380679104903 - 0.3953932981762184 - 0.24615493206657874 - 0.36457921033081425 - 0.37818468307341996 - 0.2458717382277342 - 0.24597349476879382 - 0.355495518705052 - 0.32617546899939204 - 0.3316784933295811 - 0.4879686282712542 - 0.4493952612804797 - 0.4289659003483834 - 0.25736076606300134 - 0.31347948561233624 - 0.32945691057021553 - 0.2802921851023466 - 0.30108517991402206 - 0.2906340312531131 - 0.3176973104574197 - 0.32121506900305036 - 0.27178906328240593 - 0.2736797450244378 - 0.3448789501821934 - 0.3512532346006118 - 0.30110380679104903 - 0.3953932981762184 - 0.24615493206657874 - 0.36457921033081425 - 0.37818468307341996 - 0.2458717382277342 - 0.24597349476879382 - 0.355495518705052 - 0.32617546899939204 - 0.3316784933295811 - 0.4879686282712542 - 0.4493952612804797 - 0.4289659003483834 - 0.25736076606300134 - 0.31347948561233624 - 0.32945691057021553 - 0.2802921851023466 - 0.30108517991402206 - 0.2906340312531131 - 0.3176973104574197 - 0.32121506900305036 - 0.27178906328240593 - 0.2736797450244378 - 0.3448789501821934 - 0.3512532346006118 - 0.30110380679104903 - 0.3953932981762184 - 0.24615493206657874 - 0.36457921033081425 - 0.37818468307341996 - 0.2458717382277342 - 0.24597349476879382 - 0.355495518705052 - 0.32617546899939204 - 0.3316784933295811 - 0.4879686282712542 - 0.4493952612804797 - 0.4289659003483834 - 0.25736076606300134 - 0.31347948561233624 - 0.32945691057021553 - 0.2802921851023466 - 0.30108517991402206 - 0.2906340312531131 - 0.3176973104574197 - 0.32121506900305036 - 0.27178906328240593 - 0.2736797450244378 - 0.3448789501821934 - 0.3512532346006118 - 0.30110380679104903 - 0.3953932981762184 - 0.24615493206657874 - 0.36457921033081425 - 0.37818468307341996 - 0.2458717382277342 - 0.24597349476879382 - 0.355495518705052 - 0.32617546899939204 - 0.3316784933295811 - 0.4879686282712542 - 0.4493952612804797 - 0.4289659003483834 - 0.25736076606300134 - 0.31347948561233624 - 0.32945691057021553 - 0.2802921851023466 - 0.30108517991402206 - 0.2906340312531131 - 0.3176973104574197 - 0.32121506900305036 - 0.27178906328240593 - 0.2736797450244378 - 0.3448789501821934 - 0.3512532346006118 - 0.30110380679104903 - 0.3953932981762184 - 0.24615493206657874 - 0.36457921033081425 - 0.37818468307341996 - 0.2458717382277342 - 0.24597349476879382 - 0.355495518705052 - 0.32617546899939204 - 0.3316784933295811 - 0.4879686282712542 - 0.4493952612804797 - 0.4289659003483834 - 0.25736076606300134 - 0.31347948561233624 - 0.32945691057021553 - 0.2802921851023466 - 0.30108517991402206 - 0.2906340312531131 - 0.3176973104574197 - 0.32121506900305036 - 0.27178906328240593 - 0.2736797450244378 - 0.3448789501821934 - 0.3512532346006118 - 0.30110380679104903 - 0.3953932981762184 - 0.24615493206657874 - 0.36457921033081425 - 0.37818468307341996 - 0.2458717382277342 - 0.24597349476879382 - 0.355495518705052 - 0.32617546899939204 - 0.3316784933295811 - 0.4879686282712542 - 0.4493952612804797 - 0.4289659003483834 - 0.25736076606300134 - 0.31347948561233624 - 0.32945691057021553 - 0.2802921851023466 - 0.30108517991402206 - 0.2906340312531131 - 0.3176973104574197 - 0.32121506900305036 - 0.27178906328240593 - 0.2736797450244378 - 0.3448789501821934 - 0.3512532346006118 - 0.30110380679104903 - 0.3953932981762184 - 0.24615493206657874 - 0.36457921033081425 - 0.37818468307341996 - 0.2458717382277342 - 0.24597349476879382 - 0.355495518705052 - 0.32617546899939204 - 0.3316784933295811 - 0.4879686282712542 - 0.4493952612804797 - 0.4289659003483834 - 0.25736076606300134 - 0.31347948561233624 - 0.32945691057021553 - 0.2802921851023466 - 0.30108517991402206 - 0.2906340312531131 - 0.3176973104574197 - 0.32121506900305036 - 0.27178906328240593 - 0.2736797450244378 - 0.3448789501821934 - 0.3512532346006118 - 0.30110380679104903 - 0.3953932981762184 - 0.24615493206657874 - 0.36457921033081425 - 0.37818468307341996 - 0.2458717382277342 - 0.24597349476879382 - 0.355495518705052 - 0.32617546899939204 - 0.3316784933295811 - 0.4879686282712542 - 0.4493952612804797 - 0.4289659003483834 - 0.25736076606300134 - 0.31347948561233624 - 0.32945691057021553 - 0.2802921851023466 - 0.30108517991402206 - 0.2906340312531131 - 0.3176973104574197 - 0.32121506900305036 - 0.27178906328240593 - 0.2736797450244378 - 0.3448789501821934 - 0.3512532346006118 - 0.30110380679104903 - 0.3953932981762184 - 0.24615493206657874 - 0.36457921033081425 - 0.37818468307341996 - 0.2458717382277342 - 0.24597349476879382 - 0.355495518705052 - 0.32617546899939204 - 0.3316784933295811 - 0.4879686282712542 - 0.4493952612804797 - 0.4289659003483834 - 0.25736076606300134 - 0.31347948561233624 - 0.32945691057021553 - 0.2802921851023466 - 0.30108517991402206 - 0.2906340312531131 - 0.3176973104574197 - 0.32121506900305036 - 0.27178906328240593 - 0.2736797450244378 - 0.3448789501821934 - 0.3512532346006118 - 0.30110380679104903 - 0.3953932981762184 - 0.24615493206657874 - 0.36457921033081425 - 0.37818468307341996 - 0.2458717382277342 - 0.24597349476879382 - 0.355495518705052 - 0.32617546899939204 - 0.3316784933295811 - 0.4879686282712542 - 0.4493952612804797 - 0.4289659003483834 - 0.25736076606300134 - 0.31347948561233624 - 0.32945691057021553 - 0.2802921851023466 - 0.30108517991402206 - 0.2906340312531131 - 0.3176973104574197 - 0.32121506900305036 - 0.27178906328240593 - 0.2736797450244378 - 0.3448789501821934 - 0.3512532346006118 - 0.30110380679104903 - 0.3953932981762184 - 0.24615493206657874 - 0.36457921033081425 - 0.37818468307341996 - 0.2458717382277342 - 0.24597349476879382 - 0.355495518705052 - 0.32617546899939204 - 0.3316784933295811 - 0.4879686282712542 - 0.4493952612804797 - 0.4289659003483834 - 0.25736076606300134 - 0.31347948561233624 - 0.32945691057021553 - 0.2802921851023466 - 0.30108517991402206 - 0.2906340312531131 - 0.3176973104574197 - 0.32121506900305036 - 0.27178906328240593 - 0.2736797450244378 - 0.3448789501821934 - 0.3512532346006118 - 0.30110380679104903 - 0.3953932981762184 - 0.24615493206657874 - 0.36457921033081425 - 0.37818468307341996 - 0.2458717382277342 - 0.24597349476879382 - 0.355495518705052 - 0.32617546899939204 - 0.3316784933295811 - 0.4879686282712542 - 0.4493952612804797 - 0.4289659003483834 - 0.25736076606300134 - 0.31347948561233624 - 0.32945691057021553 - 0.2802921851023466 - 0.30108517991402206 - 0.2906340312531131 - 0.3176973104574197 - 0.32121506900305036 - 0.27178906328240593 - 0.2736797450244378 - 0.3448789501821934 - 0.3512532346006118 - 0.30110380679104903 - 0.3953932981762184 - 0.24615493206657874 - 0.36457921033081425 - 0.37818468307341996 - 0.2458717382277342 - 0.24597349476879382 - 0.355495518705052 - 0.32617546899939204 - 0.3316784933295811 - 0.4879686282712542 - 0.4493952612804797 - 0.4289659003483834 - 0.25736076606300134 - 0.31347948561233624 - 0.32945691057021553 - 0.2802921851023466 - 0.30108517991402206 - 0.2906340312531131 - 0.3176973104574197 - 0.32121506900305036 - 0.27178906328240593 - 0.2736797450244378 - 0.3448789501821934 - 0.3512532346006118 - 0.30110380679104903 - 0.3953932981762184 - 0.24615493206657874 - 0.36457921033081425 - 0.37818468307341996 - 0.2458717382277342 - 0.24597349476879382 - 0.355495518705052 - 0.32617546899939204 - 0.3316784933295811 - 0.4879686282712542 - 0.4493952612804797 - 0.4289659003483834 - 0.25736076606300134 - 0.31347948561233624 - 0.32945691057021553 - 0.2802921851023466 - 0.30108517991402206 - 0.2906340312531131 - 0.3176973104574197 - 0.32121506900305036 - 0.27178906328240593 - 0.2736797450244378 - 0.3448789501821934 - 0.3512532346006118 - 0.30110380679104903 - 0.3953932981762184 - 0.24615493206657874 - 0.36457921033081425 - 0.37818468307341996 - 0.2458717382277342 - 0.24597349476879382 - 0.355495518705052 - 0.32617546899939204 - 0.3316784933295811 - 0.4879686282712542 - 0.4493952612804797 - 0.4289659003483834 - 0.25736076606300134 - 0.31347948561233624 - 0.32945691057021553 - 0.2802921851023466 - 0.30108517991402206 - 0.2906340312531131 - 0.3176973104574197 - 0.32121506900305036 - 0.27178906328240593 - 0.2736797450244378 - 0.3448789501821934 - 0.3512532346006118 - 0.30110380679104903 - 0.3953932981762184 - 0.24615493206657874 - 0.36457921033081425 - 0.37818468307341996 - 0.2458717382277342 - 0.24597349476879382 - 0.355495518705052 - 0.32617546899939204 - 0.3316784933295811 - 0.4879686282712542 - 0.4493952612804797 - 0.4289659003483834 - 0.25736076606300134 - 0.31347948561233624 - 0.32945691057021553 - 0.2802921851023466 - 0.30108517991402206 - 0.2906340312531131 - 0.3176973104574197 - 0.32121506900305036 - 0.27178906328240593 - 0.2736797450244378 - 0.3448789501821934 - 0.3512532346006118 - 0.30110380679104903 - 0.3953932981762184 - 0.24615493206657874 - 0.36457921033081425 - 0.37818468307341996 - 0.2458717382277342 - 0.24597349476879382 - 0.355495518705052 - 0.32617546899939204 - 0.3316784933295811 - 0.4879686282712542 - 0.4493952612804797 - 0.4289659003483834 - 0.25736076606300134 - 0.31347948561233624 - 0.32945691057021553 - 0.2802921851023466 - 0.30108517991402206 - 0.2906340312531131 - 0.3176973104574197 - 0.32121506900305036 - 0.27178906328240593 - 0.2736797450244378 - 0.3448789501821934 - 0.3512532346006118 - 0.30110380679104903 - 0.3953932981762184 - 0.24615493206657874 - 0.36457921033081425 - 0.37818468307341996 - 0.2458717382277342 - 0.24597349476879382 - 0.355495518705052 - 0.32617546899939204 - 0.3316784933295811 - 0.4879686282712542 - 0.4493952612804797 - 0.4289659003483834 - 0.25736076606300134 - 0.31347948561233624 - 0.32945691057021553 - 0.2802921851023466 - 0.30108517991402206 - 0.2906340312531131 - 0.3176973104574197 - 0.32121506900305036 - 0.27178906328240593 - 0.2736797450244378 - 0.3448789501821934 - 0.3512532346006118 - 0.30110380679104903 - 0.3953932981762184 - 0.24615493206657874 - 0.36457921033081425 - 0.37818468307341996 - 0.2458717382277342 - 0.24597349476879382 - 0.355495518705052 - 0.32617546899939204 - 0.3316784933295811 - 0.4879686282712542 - 0.4493952612804797 - 0.4289659003483834 - 0.25736076606300134 - 0.31347948561233624 - 0.32945691057021553 - 0.2802921851023466 - 0.30108517991402206 - 0.2906340312531131 - 0.3176973104574197 - 0.32121506900305036 - 0.27178906328240593 - 0.2736797450244378 - 0.3448789501821934 - 0.3512532346006118 - 0.30110380679104903 - 0.3953932981762184 - 0.24615493206657874 - 0.36457921033081425 - 0.37818468307341996 - 0.2458717382277342 - 0.24597349476879382 - 0.355495518705052 - 0.32617546899939204 - 0.3316784933295811 - 0.4879686282712542 - 0.4493952612804797 - 0.4289659003483834 - 0.25736076606300134 - 0.31347948561233624 - 0.32945691057021553 - 0.2802921851023466 - 0.30108517991402206 - 0.2906340312531131 - 0.3176973104574197 - 0.32121506900305036 - 0.27178906328240593 - 0.2736797450244378 - 0.3448789501821934 - 0.3512532346006118 - 0.30110380679104903 - 0.3953932981762184 - 0.24615493206657874 - 0.36457921033081425 - 0.37818468307341996 - 0.2458717382277342 - 0.24597349476879382 - 0.355495518705052 - 0.32617546899939204 - 0.3316784933295811 - 0.4879686282712542 - 0.4493952612804797 - 0.4289659003483834 - 0.25736076606300134 - 0.31347948561233624 - 0.32945691057021553 - 0.2802921851023466 - 0.30108517991402206 - 0.2906340312531131 - 0.3176973104574197 - 0.32121506900305036 - 0.27178906328240593 - 0.2736797450244378 - 0.3448789501821934 - 0.3512532346006118 - 0.30110380679104903 - 0.3953932981762184 - 0.24615493206657874 - 0.36457921033081425 - 0.37818468307341996 - 0.2458717382277342 - 0.24597349476879382 - 0.355495518705052 - 0.32617546899939204 - 0.3316784933295811 - 0.4879686282712542 - 0.4493952612804797 - 0.4289659003483834 - 0.25736076606300134 - 0.31347948561233624 - 0.32945691057021553 - 0.2802921851023466 - 0.30108517991402206 - 0.2906340312531131 - 0.3176973104574197 - 0.32121506900305036 - 0.27178906328240593 - 0.2736797450244378 - 0.3448789501821934 - 0.3512532346006118 - 0.30110380679104903 - 0.3953932981762184 - 0.24615493206657874 - 0.36457921033081425 - 0.37818468307341996 - 0.2458717382277342 - 0.24597349476879382 - 0.355495518705052 - 0.32617546899939204 - 0.3316784933295811 - 0.4879686282712542 - 0.4493952612804797 - 0.4289659003483834 - 0.25736076606300134 - 0.31347948561233624 - 0.32945691057021553 - 0.2802921851023466 - 0.30108517991402206 - 0.2906340312531131 - 0.3176973104574197 - 0.32121506900305036 - 0.27178906328240593 - 0.2736797450244378 - 0.3448789501821934 - 0.3512532346006118 - 0.30110380679104903 - 0.3953932981762184 - 0.24615493206657874 - 0.36457921033081425 - 0.37818468307341996 - 0.2458717382277342 - 0.24597349476879382 - 0.355495518705052 - 0.32617546899939204 - 0.3316784933295811 - 0.4879686282712542 - 0.4493952612804797 - 0.4289659003483834 - 0.25736076606300134 - 0.31347948561233624 - 0.32945691057021553 - 0.2802921851023466 - 0.30108517991402206 - 0.2906340312531131 - 0.3176973104574197 - 0.32121506900305036 - 0.27178906328240593 - 0.2736797450244378 - 0.3448789501821934 - 0.3512532346006118 - 0.30110380679104903 - 0.3953932981762184 - 0.24615493206657874 - 0.36457921033081425 - 0.37818468307341996 - 0.2458717382277342 - 0.24597349476879382 - 0.355495518705052 - 0.32617546899939204 - 0.3316784933295811 - 0.4879686282712542 - 0.4493952612804797 - 0.4289659003483834 - 0.25736076606300134 - 0.31347948561233624 - 0.32945691057021553 - 0.2802921851023466 - 0.30108517991402206 - 0.2906340312531131 - 0.3176973104574197 - 0.32121506900305036 - 0.27178906328240593 - 0.2736797450244378 - 0.3448789501821934 - 0.3512532346006118 - 0.30110380679104903 - 0.3953932981762184 - 0.24615493206657874 - 0.36457921033081425 - 0.37818468307341996 - 0.2458717382277342 - 0.24597349476879382 - 0.355495518705052 - 0.32617546899939204 - 0.3316784933295811 - 0.4879686282712542 - 0.4493952612804797 - 0.4289659003483834 - 0.25736076606300134 - 0.31347948561233624 - 0.32945691057021553 - 0.2802921851023466 - 0.30108517991402206 - 0.2906340312531131 - 0.3176973104574197 - 0.32121506900305036 - 0.27178906328240593 - 0.2736797450244378 - 0.3448789501821934 - 0.3512532346006118 - 0.30110380679104903 - 0.3953932981762184 - 0.24615493206657874 - 0.36457921033081425 - 0.37818468307341996 - 0.2458717382277342 - 0.24597349476879382 - 0.355495518705052 - 0.32617546899939204 - 0.3316784933295811 - 0.4879686282712542 - 0.4493952612804797 - 0.4289659003483834 - 0.25736076606300134 - 0.31347948561233624 - 0.32945691057021553 - 0.2802921851023466 - 0.30108517991402206 - 0.2906340312531131 - 0.3176973104574197 - 0.32121506900305036 - 0.27178906328240593 - 0.2736797450244378 - 0.3448789501821934 - 0.3512532346006118 - 0.30110380679104903 - 0.3953932981762184 - 0.24615493206657874 - 0.36457921033081425 - 0.37818468307341996 - 0.2458717382277342 - 0.24597349476879382 - 0.355495518705052 - 0.32617546899939204 - 0.3316784933295811 - 0.4879686282712542 - 0.4493952612804797 - 0.4289659003483834 - 0.25736076606300134 - 0.31347948561233624 - 0.32945691057021553 - 0.2802921851023466 - 0.30108517991402206 - 0.2906340312531131 - 0.3176973104574197 - 0.32121506900305036 - 0.27178906328240593 - 0.2736797450244378 - 0.3448789501821934 - 0.3512532346006118 - 0.30110380679104903 - 0.3953932981762184 - 0.24615493206657874 - 0.36457921033081425 - 0.37818468307341996 - 0.2458717382277342 - 0.24597349476879382 - 0.355495518705052 - 0.32617546899939204 - 0.3316784933295811 - 0.4879686282712542 - 0.4493952612804797 - 0.4289659003483834 - 0.25736076606300134 - 0.31347948561233624 - 0.32945691057021553 - 0.2802921851023466 - 0.30108517991402206 - 0.2906340312531131 - 0.3176973104574197 - 0.32121506900305036 - 0.27178906328240593 - 0.2736797450244378 - 0.3448789501821934 - 0.3512532346006118 - 0.30110380679104903 - 0.3953932981762184 - 0.24615493206657874 - 0.36457921033081425 - 0.37818468307341996 - 0.2458717382277342 - 0.24597349476879382 - 0.355495518705052 - 0.32617546899939204 - 0.3316784933295811 - 0.4879686282712542 - 0.4493952612804797 - 0.4289659003483834 - 0.25736076606300134 - 0.31347948561233624 - 0.32945691057021553 - 0.2802921851023466 - 0.30108517991402206 - 0.2906340312531131 - 0.3176973104574197 - 0.32121506900305036 - 0.27178906328240593 - 0.2736797450244378 - 0.3448789501821934 - 0.3512532346006118 - 0.30110380679104903 - 0.3953932981762184 - 0.24615493206657874 - 0.36457921033081425 - 0.37818468307341996 - 0.2458717382277342 - 0.24597349476879382 - 0.355495518705052 - 0.32617546899939204 - 0.3316784933295811 - 0.4879686282712542 - 0.4493952612804797 - 0.4289659003483834 - 0.25736076606300134 - 0.31347948561233624 - 0.32945691057021553 - 0.2802921851023466 - 0.30108517991402206 - 0.2906340312531131 - 0.3176973104574197 - 0.32121506900305036 - 0.27178906328240593 - 0.2736797450244378 - 0.3448789501821934 - 0.3512532346006118 - 0.30110380679104903 - 0.3953932981762184 - 0.24615493206657874 - 0.36457921033081425 - 0.37818468307341996 - 0.2458717382277342 - 0.24597349476879382 - 0.355495518705052 - 0.32617546899939204 - 0.3316784933295811 - 0.4879686282712542 - 0.4493952612804797 - 0.4289659003483834 - 0.25736076606300134 - 0.31347948561233624 - 0.32945691057021553 - 0.2802921851023466 - 0.30108517991402206 - 0.2906340312531131 - 0.3176973104574197 - 0.32121506900305036 - 0.27178906328240593 - 0.2736797450244378 - 0.3448789501821934 - 0.3512532346006118 - 0.30110380679104903 - 0.3953932981762184 - 0.24615493206657874 - 0.36457921033081425 - 0.37818468307341996 - 0.2458717382277342 - 0.24597349476879382 - 0.355495518705052 - 0.32617546899939204 - 0.3316784933295811 - 0.4879686282712542 - 0.4493952612804797 - 0.4289659003483834 - 0.25736076606300134 - 0.31347948561233624 - 0.32945691057021553 - 0.2802921851023466 - 0.30108517991402206 - 0.2906340312531131 - 0.3176973104574197 - 0.32121506900305036 - 0.27178906328240593 - 0.2736797450244378 - 0.3448789501821934 - 0.3512532346006118 - 0.30110380679104903 - 0.3953932981762184 - 0.24615493206657874 - 0.36457921033081425 - 0.37818468307341996 - 0.2458717382277342 - 0.24597349476879382 - 0.355495518705052 - 0.32617546899939204 - 0.3316784933295811 - 0.4879686282712542 - 0.4493952612804797 - 0.4289659003483834 - 0.25736076606300134 - 0.31347948561233624 - 0.32945691057021553 - 0.2802921851023466 - 0.30108517991402206 - 0.2906340312531131 - 0.3176973104574197 - 0.32121506900305036 - 0.27178906328240593 - 0.2736797450244378 - 0.3448789501821934 - 0.3512532346006118 - 0.30110380679104903 - 0.3953932981762184 - 0.24615493206657874 - 0.36457921033081425 - 0.37818468307341996 - 0.2458717382277342 - 0.24597349476879382 - 0.355495518705052 - 0.32617546899939204 - 0.3316784933295811 - 0.4879686282712542 - 0.4493952612804797 - 0.4289659003483834 - 0.25736076606300134 - 0.31347948561233624 - 0.32945691057021553 - 0.2802921851023466 - 0.30108517991402206 - 0.2906340312531131 - 0.3176973104574197 - 0.32121506900305036 - 0.27178906328240593 - 0.2736797450244378 - 0.3448789501821934 - 0.3512532346006118 - 0.30110380679104903 - 0.3953932981762184 - 0.24615493206657874 - 0.36457921033081425 - 0.37818468307341996 - 0.2458717382277342 - 0.24597349476879382 - 0.355495518705052 - 0.32617546899939204 - 0.3316784933295811 - 0.4879686282712542 - 0.4493952612804797 - 0.4289659003483834 - 0.25736076606300134 - 0.31347948561233624 - 0.32945691057021553 - 0.2802921851023466 - 0.30108517991402206 - 0.2906340312531131 - 0.3176973104574197 - 0.32121506900305036 - 0.27178906328240593 - 0.2736797450244378 - 0.3448789501821934 - 0.3512532346006118 - 0.30110380679104903 - 0.3953932981762184 - 0.24615493206657874 - 0.36457921033081425 - 0.37818468307341996 - 0.2458717382277342 - 0.24597349476879382 - 0.355495518705052 - 0.32617546899939204 - 0.3316784933295811 - 0.4879686282712542 - 0.4493952612804797 - 0.4289659003483834 - 0.25736076606300134 - 0.31347948561233624 - 0.32945691057021553 - 0.2802921851023466 - 0.30108517991402206 - 0.2906340312531131 - 0.3176973104574197 - 0.32121506900305036 - 0.27178906328240593 - 0.2736797450244378 - 0.3448789501821934 - 0.3512532346006118 - 0.30110380679104903 - 0.3953932981762184 - 0.24615493206657874 - 0.36457921033081425 - 0.37818468307341996 - 0.2458717382277342 - 0.24597349476879382 - 0.355495518705052 - 0.32617546899939204 - 0.3316784933295811 - 0.4879686282712542 - 0.4493952612804797 - 0.4289659003483834 - 0.25736076606300134 - 0.31347948561233624 - 0.32945691057021553 - 0.2802921851023466 - 0.30108517991402206 - 0.2906340312531131 - 0.3176973104574197 - 0.32121506900305036 - 0.27178906328240593 - 0.2736797450244378 - 0.3448789501821934 - 0.3512532346006118 - 0.30110380679104903 - 0.3953932981762184 - 0.24615493206657874 - 0.36457921033081425 - 0.37818468307341996 - 0.2458717382277342 - 0.24597349476879382 - 0.355495518705052 - 0.32617546899939204 - 0.3316784933295811 - 0.4879686282712542 - 0.4493952612804797 - 0.4289659003483834 - 0.25736076606300134 - 0.31347948561233624 - 0.32945691057021553 - 0.2802921851023466 - 0.30108517991402206 - 0.2906340312531131 - 0.3176973104574197 - 0.32121506900305036 - 0.27178906328240593 - 0.2736797450244378 - 0.3448789501821934 - 0.3512532346006118 - 0.30110380679104903 - 0.3953932981762184 - 0.24615493206657874 - 0.36457921033081425 - 0.37818468307341996 - 0.2458717382277342 - 0.24597349476879382 - 0.355495518705052 - 0.32617546899939204 - 0.3316784933295811 - 0.4879686282712542 - 0.4493952612804797 - 0.4289659003483834 - 0.25736076606300134 - 0.31347948561233624 - 0.32945691057021553 - 0.2802921851023466 - 0.30108517991402206 - 0.2906340312531131 - 0.3176973104574197 - 0.32121506900305036 - 0.27178906328240593 - 0.2736797450244378 - 0.3448789501821934 - 0.3512532346006118 - 0.30110380679104903 - 0.3953932981762184 - 0.24615493206657874 - 0.36457921033081425 - 0.37818468307341996 - 0.2458717382277342 - 0.24597349476879382 - 0.355495518705052 - 0.32617546899939204 - 0.3316784933295811 - 0.4879686282712542 - 0.4493952612804797 - 0.4289659003483834 - 0.25736076606300134 - 0.31347948561233624 - 0.32945691057021553 - 0.2802921851023466 - 0.30108517991402206 - 0.2906340312531131 - 0.3176973104574197 - 0.32121506900305036 - 0.27178906328240593 - 0.2736797450244378 - 0.3448789501821934 - 0.3512532346006118 - 0.30110380679104903 - 0.3953932981762184 - 0.24615493206657874 - 0.36457921033081425 - 0.37818468307341996 - 0.2458717382277342 - 0.24597349476879382 - 0.355495518705052 - 0.32617546899939204 - 0.3316784933295811 - 0.4879686282712542 - 0.4493952612804797 - 0.4289659003483834 - 0.25736076606300134 - 0.31347948561233624 - 0.32945691057021553 - 0.2802921851023466 - 0.30108517991402206 - 0.2906340312531131 - 0.3176973104574197 - 0.32121506900305036 - 0.27178906328240593 - 0.2736797450244378 - 0.3448789501821934 - 0.3512532346006118 - 0.30110380679104903 - 0.3953932981762184 - 0.24615493206657874 - 0.36457921033081425 - 0.37818468307341996 - 0.2458717382277342 - 0.24597349476879382 - 0.355495518705052 - 0.32617546899939204 - 0.3316784933295811 - 0.4879686282712542 - 0.4493952612804797 - 0.4289659003483834 - 0.25736076606300134 - 0.31347948561233624 - 0.32945691057021553 - 0.2802921851023466 - 0.30108517991402206 - 0.2906340312531131 - 0.3176973104574197 - 0.32121506900305036 - 0.27178906328240593 - 0.2736797450244378 - 0.3448789501821934 - 0.3512532346006118 - 0.30110380679104903 - 0.3953932981762184 - 0.24615493206657874 - 0.36457921033081425 - 0.37818468307341996 - 0.2458717382277342 - 0.24597349476879382 - 0.355495518705052 - 0.32617546899939204 - 0.3316784933295811 - 0.4879686282712542 - 0.4493952612804797 - 0.4289659003483834 - 0.25736076606300134 - 0.31347948561233624 - 0.32945691057021553 - 0.2802921851023466 - 0.30108517991402206 - 0.2906340312531131 - 0.3176973104574197 - 0.32121506900305036 - 0.27178906328240593 - 0.2736797450244378 - 0.3448789501821934 - 0.3512532346006118 - 0.30110380679104903 - 0.3953932981762184 - 0.24615493206657874 - 0.36457921033081425 - 0.37818468307341996 - 0.2458717382277342 - 0.24597349476879382 - 0.355495518705052 - 0.32617546899939204 - 0.3316784933295811 - 0.4879686282712542 - 0.4493952612804797 - 0.4289659003483834 - 0.25736076606300134 - 0.31347948561233624 - 0.32945691057021553 - 0.2802921851023466 - 0.30108517991402206 - 0.2906340312531131 - 0.3176973104574197 - 0.32121506900305036 - 0.27178906328240593 - 0.2736797450244378 - 0.3448789501821934 - 0.3512532346006118 - 0.30110380679104903 - 0.3953932981762184 - 0.24615493206657874 - 0.36457921033081425 - 0.37818468307341996 - 0.2458717382277342 - 0.24597349476879382 - 0.355495518705052 - 0.32617546899939204 - 0.3316784933295811 - 0.4879686282712542 - 0.4493952612804797 - 0.4289659003483834 - 0.25736076606300134 - 0.31347948561233624 - 0.32945691057021553 - 0.2802921851023466 - 0.30108517991402206 - 0.2906340312531131 - 0.3176973104574197 - 0.32121506900305036 - 0.27178906328240593 - 0.2736797450244378 - 0.3448789501821934 - 0.3512532346006118 - 0.30110380679104903 - 0.3953932981762184 - 0.24615493206657874 - 0.36457921033081425 - 0.37818468307341996 - 0.2458717382277342 - 0.24597349476879382 - 0.355495518705052 - 0.32617546899939204 - 0.3316784933295811 - 0.4879686282712542 - 0.4493952612804797 - 0.4289659003483834 - 0.25736076606300134 - 0.31347948561233624 - 0.32945691057021553 - 0.2802921851023466 - 0.30108517991402206 - 0.2906340312531131 - 0.3176973104574197 - 0.32121506900305036 - 0.27178906328240593 - 0.2736797450244378 - 0.3448789501821934 - 0.3512532346006118 - 0.30110380679104903 - 0.3953932981762184 - 0.24615493206657874 - 0.36457921033081425 - 0.37818468307341996 - 0.2458717382277342 - 0.24597349476879382 - 0.355495518705052 - 0.32617546899939204 - 0.3316784933295811 - 0.4879686282712542 - 0.4493952612804797 - 0.4289659003483834 - 0.25736076606300134 - 0.31347948561233624 - 0.32945691057021553 - 0.2802921851023466 - 0.30108517991402206 - 0.2906340312531131 - 0.3176973104574197 - 0.32121506900305036 - 0.27178906328240593 - 0.2736797450244378 - 0.3448789501821934 - 0.3512532346006118 - 0.30110380679104903 - 0.3953932981762184 - 0.24615493206657874 - 0.36457921033081425 - 0.37818468307341996 - 0.2458717382277342 - 0.24597349476879382 - 0.355495518705052 - 0.32617546899939204 - 0.3316784933295811 - 0.4879686282712542 - 0.4493952612804797 - 0.4289659003483834 - 0.25736076606300134 - 0.31347948561233624 - 0.32945691057021553 - 0.2802921851023466 - 0.30108517991402206 - 0.2906340312531131 - 0.3176973104574197 - 0.32121506900305036 - 0.27178906328240593 - 0.2736797450244378 - 0.3448789501821934 - 0.3512532346006118 - 0.30110380679104903 - 0.3953932981762184 - 0.24615493206657874 - 0.36457921033081425 - 0.37818468307341996 - 0.2458717382277342 - 0.24597349476879382 - 0.355495518705052 - 0.32617546899939204 - 0.3316784933295811 - 0.4879686282712542 - 0.4493952612804797 - 0.4289659003483834 - 0.25736076606300134 - 0.31347948561233624 - 0.32945691057021553 - 0.2802921851023466 - 0.30108517991402206 - 0.2906340312531131 - 0.3176973104574197 - 0.32121506900305036 - 0.27178906328240593 - 0.2736797450244378 - 0.3448789501821934 - 0.3512532346006118 - 0.30110380679104903 - 0.3953932981762184 - 0.24615493206657874 - 0.36457921033081425 - 0.37818468307341996 - 0.2458717382277342 - 0.24597349476879382 - 0.355495518705052 - 0.32617546899939204 - 0.3316784933295811 - 0.4879686282712542 - 0.4493952612804797 - 0.4289659003483834 - 0.25736076606300134 - 0.31347948561233624 - 0.32945691057021553 - 0.2802921851023466 - 0.30108517991402206 - 0.2906340312531131 - 0.3176973104574197 - 0.32121506900305036 - 0.27178906328240593 - 0.2736797450244378 - 0.3448789501821934 - 0.3512532346006118 - 0.30110380679104903 - 0.3953932981762184 - 0.24615493206657874 - 0.36457921033081425 - 0.37818468307341996 - 0.2458717382277342 - 0.24597349476879382 - 0.355495518705052 - 0.32617546899939204 - 0.3316784933295811 - 0.4879686282712542 - 0.4493952612804797 - 0.4289659003483834 - 0.25736076606300134 - 0.31347948561233624 - 0.32945691057021553 - 0.2802921851023466 - 0.30108517991402206 - 0.2906340312531131 - 0.3176973104574197 - 0.32121506900305036 - 0.27178906328240593 - 0.2736797450244378 - 0.3448789501821934 - 0.3512532346006118 - 0.30110380679104903 - 0.3953932981762184 - 0.24615493206657874 - 0.36457921033081425 - 0.37818468307341996 - 0.2458717382277342 - 0.24597349476879382 - 0.355495518705052 - 0.32617546899939204 - 0.3316784933295811 - 0.4879686282712542 - 0.4493952612804797 - 0.4289659003483834 - 0.25736076606300134 - 0.31347948561233624 - 0.32945691057021553 - 0.2802921851023466 - 0.30108517991402206 - 0.2906340312531131 - 0.3176973104574197 - 0.32121506900305036 - 0.27178906328240593 - 0.2736797450244378 - 0.3448789501821934 - 0.3512532346006118 - 0.30110380679104903 - 0.3953932981762184 - 0.24615493206657874 - 0.36457921033081425 - 0.37818468307341996 - 0.2458717382277342 - 0.24597349476879382 - 0.355495518705052 - 0.32617546899939204 - 0.3316784933295811 - 0.4879686282712542 - 0.4493952612804797 - 0.4289659003483834 - 0.25736076606300134 - 0.31347948561233624 - 0.32945691057021553 - 0.2802921851023466 - 0.30108517991402206 - 0.2906340312531131 - 0.3176973104574197 - 0.32121506900305036 - 0.27178906328240593 - 0.2736797450244378 - 0.3448789501821934 - 0.3512532346006118 - 0.30110380679104903 - 0.3953932981762184 - 0.24615493206657874 - 0.36457921033081425 - 0.37818468307341996 - 0.2458717382277342 - 0.24597349476879382 - 0.355495518705052 - 0.32617546899939204 - 0.3316784933295811 - 0.4879686282712542 - 0.4493952612804797 - 0.4289659003483834 - 0.25736076606300134 - 0.31347948561233624 - 0.32945691057021553 - 0.2802921851023466 - 0.30108517991402206 - 0.2906340312531131 - 0.3176973104574197 - 0.32121506900305036 - 0.27178906328240593 - 0.2736797450244378 - 0.3448789501821934 - 0.3512532346006118 - 0.30110380679104903 - 0.3953932981762184 - 0.24615493206657874 - 0.36457921033081425 - 0.37818468307341996 - 0.2458717382277342 - 0.24597349476879382 - 0.355495518705052 - 0.32617546899939204 - 0.3316784933295811 - 0.4879686282712542 - 0.4493952612804797 - 0.4289659003483834 - 0.25736076606300134 - 0.31347948561233624 - 0.32945691057021553 - 0.2802921851023466 - 0.30108517991402206 - 0.2906340312531131 - 0.3176973104574197 - 0.32121506900305036 - 0.27178906328240593 - 0.2736797450244378 - 0.3448789501821934 - 0.3512532346006118 - 0.30110380679104903 - 0.3953932981762184 - 0.24615493206657874 - 0.36457921033081425 - 0.37818468307341996 - 0.2458717382277342 - 0.24597349476879382 - 0.355495518705052 - 0.32617546899939204 - 0.3316784933295811 - 0.4879686282712542 - 0.4493952612804797 - 0.4289659003483834 - 0.25736076606300134 - 0.31347948561233624 - 0.32945691057021553 - 0.2802921851023466 - 0.30108517991402206 - 0.2906340312531131 - 0.3176973104574197 - 0.32121506900305036 - 0.27178906328240593 - 0.2736797450244378 - 0.3448789501821934 - 0.3512532346006118 - 0.30110380679104903 - 0.3953932981762184 - 0.24615493206657874 - 0.36457921033081425 - 0.37818468307341996 - 0.2458717382277342 - 0.24597349476879382 - 0.355495518705052 - 0.32617546899939204 - 0.3316784933295811 - 0.4879686282712542 - 0.4493952612804797 - 0.4289659003483834 - 0.25736076606300134 - 0.31347948561233624 - 0.32945691057021553 - 0.2802921851023466 - 0.30108517991402206 - 0.2906340312531131 - 0.3176973104574197 - 0.32121506900305036 - 0.27178906328240593 - 0.2736797450244378 - 0.3448789501821934 - 0.3512532346006118 - 0.30110380679104903 - 0.3953932981762184 - 0.24615493206657874 - 0.36457921033081425 - 0.37818468307341996 - 0.2458717382277342 - 0.24597349476879382 - 0.355495518705052 - 0.32617546899939204 - 0.3316784933295811 - 0.4879686282712542 - 0.4493952612804797 - 0.4289659003483834 - 0.25736076606300134 - 0.31347948561233624 - 0.32945691057021553 - 0.2802921851023466 - 0.30108517991402206 - 0.2906340312531131 - 0.3176973104574197 - 0.32121506900305036 - 0.27178906328240593 - 0.2736797450244378 - 0.3448789501821934 - 0.3512532346006118 - 0.30110380679104903 - 0.3953932981762184 - 0.24615493206657874 - 0.36457921033081425 - 0.37818468307341996 - 0.2458717382277342 - 0.24597349476879382 - 0.355495518705052 - 0.32617546899939204 - 0.3316784933295811 - 0.4879686282712542 - 0.4493952612804797 - 0.4289659003483834 - 0.25736076606300134 - 0.31347948561233624 - 0.32945691057021553 - 0.2802921851023466 - 0.30108517991402206 - 0.2906340312531131 - 0.3176973104574197 - 0.32121506900305036 - 0.27178906328240593 - 0.2736797450244378 - 0.3448789501821934 - 0.3512532346006118 - 0.30110380679104903 - 0.3953932981762184 - 0.24615493206657874 - 0.36457921033081425 - 0.37818468307341996 - 0.2458717382277342 - 0.24597349476879382 - 0.355495518705052 - 0.32617546899939204 - 0.3316784933295811 - 0.4879686282712542 - 0.4493952612804797 - 0.4289659003483834 - 0.25736076606300134 - 0.31347948561233624 - 0.32945691057021553 - 0.2802921851023466 - 0.30108517991402206 - 0.2906340312531131 - 0.3176973104574197 - 0.32121506900305036 - 0.27178906328240593 - 0.2736797450244378 - 0.3448789501821934 - 0.3512532346006118 - 0.30110380679104903 - 0.3953932981762184 - 0.24615493206657874 - 0.36457921033081425 - 0.37818468307341996 - 0.2458717382277342 - 0.24597349476879382 - 0.355495518705052 - 0.32617546899939204 - 0.3316784933295811 - 0.4879686282712542 - 0.4493952612804797 - 0.4289659003483834 - 0.25736076606300134 - 0.31347948561233624 - 0.32945691057021553 - 0.2802921851023466 - 0.30108517991402206 - 0.2906340312531131 - 0.3176973104574197 - 0.32121506900305036 - 0.27178906328240593 - 0.2736797450244378 - 0.3448789501821934 - 0.3512532346006118 - 0.30110380679104903 - 0.3953932981762184 - 0.24615493206657874 - 0.36457921033081425 - 0.37818468307341996 - 0.2458717382277342 - 0.24597349476879382 - 0.355495518705052 - 0.32617546899939204 - 0.3316784933295811 - 0.4879686282712542 - 0.4493952612804797 - 0.4289659003483834 - 0.25736076606300134 - 0.31347948561233624 - 0.32945691057021553 - 0.2802921851023466 - 0.30108517991402206 - 0.2906340312531131 - 0.3176973104574197 - 0.32121506900305036 - 0.27178906328240593 - 0.2736797450244378 - 0.3448789501821934 - 0.3512532346006118 - 0.30110380679104903 - 0.3953932981762184 - 0.24615493206657874 - 0.36457921033081425 - 0.37818468307341996 - 0.2458717382277342 - 0.24597349476879382 - 0.355495518705052 - 0.32617546899939204 - 0.3316784933295811 - 0.4879686282712542 - 0.4493952612804797 - 0.4289659003483834 - 0.25736076606300134 - 0.31347948561233624 - 0.32945691057021553 - 0.2802921851023466 - 0.30108517991402206 - 0.2906340312531131 - 0.3176973104574197 - 0.32121506900305036 - 0.27178906328240593 - 0.2736797450244378 - 0.3448789501821934 - 0.3512532346006118 - 0.30110380679104903 - 0.3953932981762184 - 0.24615493206657874 - 0.36457921033081425 - 0.37818468307341996 - 0.2458717382277342 - 0.24597349476879382 - 0.355495518705052 - 0.32617546899939204 - 0.3316784933295811 - 0.4879686282712542 - 0.4493952612804797 - 0.4289659003483834 - 0.25736076606300134 - 0.31347948561233624 - 0.32945691057021553 - 0.2802921851023466 - 0.30108517991402206 - 0.2906340312531131 - 0.3176973104574197 - 0.32121506900305036 - 0.27178906328240593 - 0.2736797450244378 - 0.3448789501821934 - 0.3512532346006118 - task: type: Clustering dataset: name: MTEB StackExchangeClusteringP2P type: mteb/stackexchange-clustering-p2p config: default split: test revision: 815ca46b2622cec33ccafc3735d572c266efdb44 metrics: - type: v_measure value: 27.476810145753827 - type: v_measures value: - 0.262007031213021 - 0.2603632068581035 - 0.25388262071363726 - 0.25745089384059566 - 0.257990103854705 - 0.29704373180003885 - 0.28480533084783555 - 0.286509500865553 - 0.2947033679639156 - 0.2929252266179773 - 0.262007031213021 - 0.2603632068581035 - 0.25388262071363726 - 0.25745089384059566 - 0.257990103854705 - 0.29704373180003885 - 0.28480533084783555 - 0.286509500865553 - 0.2947033679639156 - 0.2929252266179773 - 0.262007031213021 - 0.2603632068581035 - 0.25388262071363726 - 0.25745089384059566 - 0.257990103854705 - 0.29704373180003885 - 0.28480533084783555 - 0.286509500865553 - 0.2947033679639156 - 0.2929252266179773 - 0.262007031213021 - 0.2603632068581035 - 0.25388262071363726 - 0.25745089384059566 - 0.257990103854705 - 0.29704373180003885 - 0.28480533084783555 - 0.286509500865553 - 0.2947033679639156 - 0.2929252266179773 - 0.262007031213021 - 0.2603632068581035 - 0.25388262071363726 - 0.25745089384059566 - 0.257990103854705 - 0.29704373180003885 - 0.28480533084783555 - 0.286509500865553 - 0.2947033679639156 - 0.2929252266179773 - 0.262007031213021 - 0.2603632068581035 - 0.25388262071363726 - 0.25745089384059566 - 0.257990103854705 - 0.29704373180003885 - 0.28480533084783555 - 0.286509500865553 - 0.2947033679639156 - 0.2929252266179773 - 0.262007031213021 - 0.2603632068581035 - 0.25388262071363726 - 0.25745089384059566 - 0.257990103854705 - 0.29704373180003885 - 0.28480533084783555 - 0.286509500865553 - 0.2947033679639156 - 0.2929252266179773 - 0.262007031213021 - 0.2603632068581035 - 0.25388262071363726 - 0.25745089384059566 - 0.257990103854705 - 0.29704373180003885 - 0.28480533084783555 - 0.286509500865553 - 0.2947033679639156 - 0.2929252266179773 - 0.262007031213021 - 0.2603632068581035 - 0.25388262071363726 - 0.25745089384059566 - 0.257990103854705 - 0.29704373180003885 - 0.28480533084783555 - 0.286509500865553 - 0.2947033679639156 - 0.2929252266179773 - 0.262007031213021 - 0.2603632068581035 - 0.25388262071363726 - 0.25745089384059566 - 0.257990103854705 - 0.29704373180003885 - 0.28480533084783555 - 0.286509500865553 - 0.2947033679639156 - 0.2929252266179773 - 0.262007031213021 - 0.2603632068581035 - 0.25388262071363726 - 0.25745089384059566 - 0.257990103854705 - 0.29704373180003885 - 0.28480533084783555 - 0.286509500865553 - 0.2947033679639156 - 0.2929252266179773 - 0.262007031213021 - 0.2603632068581035 - 0.25388262071363726 - 0.25745089384059566 - 0.257990103854705 - 0.29704373180003885 - 0.28480533084783555 - 0.286509500865553 - 0.2947033679639156 - 0.2929252266179773 - 0.262007031213021 - 0.2603632068581035 - 0.25388262071363726 - 0.25745089384059566 - 0.257990103854705 - 0.29704373180003885 - 0.28480533084783555 - 0.286509500865553 - 0.2947033679639156 - 0.2929252266179773 - 0.262007031213021 - 0.2603632068581035 - 0.25388262071363726 - 0.25745089384059566 - 0.257990103854705 - 0.29704373180003885 - 0.28480533084783555 - 0.286509500865553 - 0.2947033679639156 - 0.2929252266179773 - 0.262007031213021 - 0.2603632068581035 - 0.25388262071363726 - 0.25745089384059566 - 0.257990103854705 - 0.29704373180003885 - 0.28480533084783555 - 0.286509500865553 - 0.2947033679639156 - 0.2929252266179773 - 0.262007031213021 - 0.2603632068581035 - 0.25388262071363726 - 0.25745089384059566 - 0.257990103854705 - 0.29704373180003885 - 0.28480533084783555 - 0.286509500865553 - 0.2947033679639156 - 0.2929252266179773 - 0.262007031213021 - 0.2603632068581035 - 0.25388262071363726 - 0.25745089384059566 - 0.257990103854705 - 0.29704373180003885 - 0.28480533084783555 - 0.286509500865553 - 0.2947033679639156 - 0.2929252266179773 - 0.262007031213021 - 0.2603632068581035 - 0.25388262071363726 - 0.25745089384059566 - 0.257990103854705 - 0.29704373180003885 - 0.28480533084783555 - 0.286509500865553 - 0.2947033679639156 - 0.2929252266179773 - 0.262007031213021 - 0.2603632068581035 - 0.25388262071363726 - 0.25745089384059566 - 0.257990103854705 - 0.29704373180003885 - 0.28480533084783555 - 0.286509500865553 - 0.2947033679639156 - 0.2929252266179773 - 0.262007031213021 - 0.2603632068581035 - 0.25388262071363726 - 0.25745089384059566 - 0.257990103854705 - 0.29704373180003885 - 0.28480533084783555 - 0.286509500865553 - 0.2947033679639156 - 0.2929252266179773 - 0.262007031213021 - 0.2603632068581035 - 0.25388262071363726 - 0.25745089384059566 - 0.257990103854705 - 0.29704373180003885 - 0.28480533084783555 - 0.286509500865553 - 0.2947033679639156 - 0.2929252266179773 - 0.262007031213021 - 0.2603632068581035 - 0.25388262071363726 - 0.25745089384059566 - 0.257990103854705 - 0.29704373180003885 - 0.28480533084783555 - 0.286509500865553 - 0.2947033679639156 - 0.2929252266179773 - 0.262007031213021 - 0.2603632068581035 - 0.25388262071363726 - 0.25745089384059566 - 0.257990103854705 - 0.29704373180003885 - 0.28480533084783555 - 0.286509500865553 - 0.2947033679639156 - 0.2929252266179773 - 0.262007031213021 - 0.2603632068581035 - 0.25388262071363726 - 0.25745089384059566 - 0.257990103854705 - 0.29704373180003885 - 0.28480533084783555 - 0.286509500865553 - 0.2947033679639156 - 0.2929252266179773 - 0.262007031213021 - 0.2603632068581035 - 0.25388262071363726 - 0.25745089384059566 - 0.257990103854705 - 0.29704373180003885 - 0.28480533084783555 - 0.286509500865553 - 0.2947033679639156 - 0.2929252266179773 - 0.262007031213021 - 0.2603632068581035 - 0.25388262071363726 - 0.25745089384059566 - 0.257990103854705 - 0.29704373180003885 - 0.28480533084783555 - 0.286509500865553 - 0.2947033679639156 - 0.2929252266179773 - 0.262007031213021 - 0.2603632068581035 - 0.25388262071363726 - 0.25745089384059566 - 0.257990103854705 - 0.29704373180003885 - 0.28480533084783555 - 0.286509500865553 - 0.2947033679639156 - 0.2929252266179773 - 0.262007031213021 - 0.2603632068581035 - 0.25388262071363726 - 0.25745089384059566 - 0.257990103854705 - 0.29704373180003885 - 0.28480533084783555 - 0.286509500865553 - 0.2947033679639156 - 0.2929252266179773 - 0.262007031213021 - 0.2603632068581035 - 0.25388262071363726 - 0.25745089384059566 - 0.257990103854705 - 0.29704373180003885 - 0.28480533084783555 - 0.286509500865553 - 0.2947033679639156 - 0.2929252266179773 - 0.262007031213021 - 0.2603632068581035 - 0.25388262071363726 - 0.25745089384059566 - 0.257990103854705 - 0.29704373180003885 - 0.28480533084783555 - 0.286509500865553 - 0.2947033679639156 - 0.2929252266179773 - 0.262007031213021 - 0.2603632068581035 - 0.25388262071363726 - 0.25745089384059566 - 0.257990103854705 - 0.29704373180003885 - 0.28480533084783555 - 0.286509500865553 - 0.2947033679639156 - 0.2929252266179773 - 0.262007031213021 - 0.2603632068581035 - 0.25388262071363726 - 0.25745089384059566 - 0.257990103854705 - 0.29704373180003885 - 0.28480533084783555 - 0.286509500865553 - 0.2947033679639156 - 0.2929252266179773 - 0.262007031213021 - 0.2603632068581035 - 0.25388262071363726 - 0.25745089384059566 - 0.257990103854705 - 0.29704373180003885 - 0.28480533084783555 - 0.286509500865553 - 0.2947033679639156 - 0.2929252266179773 - 0.262007031213021 - 0.2603632068581035 - 0.25388262071363726 - 0.25745089384059566 - 0.257990103854705 - 0.29704373180003885 - 0.28480533084783555 - 0.286509500865553 - 0.2947033679639156 - 0.2929252266179773 - 0.262007031213021 - 0.2603632068581035 - 0.25388262071363726 - 0.25745089384059566 - 0.257990103854705 - 0.29704373180003885 - 0.28480533084783555 - 0.286509500865553 - 0.2947033679639156 - 0.2929252266179773 - 0.262007031213021 - 0.2603632068581035 - 0.25388262071363726 - 0.25745089384059566 - 0.257990103854705 - 0.29704373180003885 - 0.28480533084783555 - 0.286509500865553 - 0.2947033679639156 - 0.2929252266179773 - 0.262007031213021 - 0.2603632068581035 - 0.25388262071363726 - 0.25745089384059566 - 0.257990103854705 - 0.29704373180003885 - 0.28480533084783555 - 0.286509500865553 - 0.2947033679639156 - 0.2929252266179773 - 0.262007031213021 - 0.2603632068581035 - 0.25388262071363726 - 0.25745089384059566 - 0.257990103854705 - 0.29704373180003885 - 0.28480533084783555 - 0.286509500865553 - 0.2947033679639156 - 0.2929252266179773 - 0.262007031213021 - 0.2603632068581035 - 0.25388262071363726 - 0.25745089384059566 - 0.257990103854705 - 0.29704373180003885 - 0.28480533084783555 - 0.286509500865553 - 0.2947033679639156 - 0.2929252266179773 - 0.262007031213021 - 0.2603632068581035 - 0.25388262071363726 - 0.25745089384059566 - 0.257990103854705 - 0.29704373180003885 - 0.28480533084783555 - 0.286509500865553 - 0.2947033679639156 - 0.2929252266179773 - 0.262007031213021 - 0.2603632068581035 - 0.25388262071363726 - 0.25745089384059566 - 0.257990103854705 - 0.29704373180003885 - 0.28480533084783555 - 0.286509500865553 - 0.2947033679639156 - 0.2929252266179773 - 0.262007031213021 - 0.2603632068581035 - 0.25388262071363726 - 0.25745089384059566 - 0.257990103854705 - 0.29704373180003885 - 0.28480533084783555 - 0.286509500865553 - 0.2947033679639156 - 0.2929252266179773 - 0.262007031213021 - 0.2603632068581035 - 0.25388262071363726 - 0.25745089384059566 - 0.257990103854705 - 0.29704373180003885 - 0.28480533084783555 - 0.286509500865553 - 0.2947033679639156 - 0.2929252266179773 - 0.262007031213021 - 0.2603632068581035 - 0.25388262071363726 - 0.25745089384059566 - 0.257990103854705 - 0.29704373180003885 - 0.28480533084783555 - 0.286509500865553 - 0.2947033679639156 - 0.2929252266179773 - 0.262007031213021 - 0.2603632068581035 - 0.25388262071363726 - 0.25745089384059566 - 0.257990103854705 - 0.29704373180003885 - 0.28480533084783555 - 0.286509500865553 - 0.2947033679639156 - 0.2929252266179773 - 0.262007031213021 - 0.2603632068581035 - 0.25388262071363726 - 0.25745089384059566 - 0.257990103854705 - 0.29704373180003885 - 0.28480533084783555 - 0.286509500865553 - 0.2947033679639156 - 0.2929252266179773 - 0.262007031213021 - 0.2603632068581035 - 0.25388262071363726 - 0.25745089384059566 - 0.257990103854705 - 0.29704373180003885 - 0.28480533084783555 - 0.286509500865553 - 0.2947033679639156 - 0.2929252266179773 - 0.262007031213021 - 0.2603632068581035 - 0.25388262071363726 - 0.25745089384059566 - 0.257990103854705 - 0.29704373180003885 - 0.28480533084783555 - 0.286509500865553 - 0.2947033679639156 - 0.2929252266179773 - 0.262007031213021 - 0.2603632068581035 - 0.25388262071363726 - 0.25745089384059566 - 0.257990103854705 - 0.29704373180003885 - 0.28480533084783555 - 0.286509500865553 - 0.2947033679639156 - 0.2929252266179773 - 0.262007031213021 - 0.2603632068581035 - 0.25388262071363726 - 0.25745089384059566 - 0.257990103854705 - 0.29704373180003885 - 0.28480533084783555 - 0.286509500865553 - 0.2947033679639156 - 0.2929252266179773 - 0.262007031213021 - 0.2603632068581035 - 0.25388262071363726 - 0.25745089384059566 - 0.257990103854705 - 0.29704373180003885 - 0.28480533084783555 - 0.286509500865553 - 0.2947033679639156 - 0.2929252266179773 - 0.262007031213021 - 0.2603632068581035 - 0.25388262071363726 - 0.25745089384059566 - 0.257990103854705 - 0.29704373180003885 - 0.28480533084783555 - 0.286509500865553 - 0.2947033679639156 - 0.2929252266179773 - 0.262007031213021 - 0.2603632068581035 - 0.25388262071363726 - 0.25745089384059566 - 0.257990103854705 - 0.29704373180003885 - 0.28480533084783555 - 0.286509500865553 - 0.2947033679639156 - 0.2929252266179773 - 0.262007031213021 - 0.2603632068581035 - 0.25388262071363726 - 0.25745089384059566 - 0.257990103854705 - 0.29704373180003885 - 0.28480533084783555 - 0.286509500865553 - 0.2947033679639156 - 0.2929252266179773 - 0.262007031213021 - 0.2603632068581035 - 0.25388262071363726 - 0.25745089384059566 - 0.257990103854705 - 0.29704373180003885 - 0.28480533084783555 - 0.286509500865553 - 0.2947033679639156 - 0.2929252266179773 - 0.262007031213021 - 0.2603632068581035 - 0.25388262071363726 - 0.25745089384059566 - 0.257990103854705 - 0.29704373180003885 - 0.28480533084783555 - 0.286509500865553 - 0.2947033679639156 - 0.2929252266179773 - 0.262007031213021 - 0.2603632068581035 - 0.25388262071363726 - 0.25745089384059566 - 0.257990103854705 - 0.29704373180003885 - 0.28480533084783555 - 0.286509500865553 - 0.2947033679639156 - 0.2929252266179773 - 0.262007031213021 - 0.2603632068581035 - 0.25388262071363726 - 0.25745089384059566 - 0.257990103854705 - 0.29704373180003885 - 0.28480533084783555 - 0.286509500865553 - 0.2947033679639156 - 0.2929252266179773 - 0.262007031213021 - 0.2603632068581035 - 0.25388262071363726 - 0.25745089384059566 - 0.257990103854705 - 0.29704373180003885 - 0.28480533084783555 - 0.286509500865553 - 0.2947033679639156 - 0.2929252266179773 - 0.262007031213021 - 0.2603632068581035 - 0.25388262071363726 - 0.25745089384059566 - 0.257990103854705 - 0.29704373180003885 - 0.28480533084783555 - 0.286509500865553 - 0.2947033679639156 - 0.2929252266179773 - 0.262007031213021 - 0.2603632068581035 - 0.25388262071363726 - 0.25745089384059566 - 0.257990103854705 - 0.29704373180003885 - 0.28480533084783555 - 0.286509500865553 - 0.2947033679639156 - 0.2929252266179773 - 0.262007031213021 - 0.2603632068581035 - 0.25388262071363726 - 0.25745089384059566 - 0.257990103854705 - 0.29704373180003885 - 0.28480533084783555 - 0.286509500865553 - 0.2947033679639156 - 0.2929252266179773 - 0.262007031213021 - 0.2603632068581035 - 0.25388262071363726 - 0.25745089384059566 - 0.257990103854705 - 0.29704373180003885 - 0.28480533084783555 - 0.286509500865553 - 0.2947033679639156 - 0.2929252266179773 - 0.262007031213021 - 0.2603632068581035 - 0.25388262071363726 - 0.25745089384059566 - 0.257990103854705 - 0.29704373180003885 - 0.28480533084783555 - 0.286509500865553 - 0.2947033679639156 - 0.2929252266179773 - 0.262007031213021 - 0.2603632068581035 - 0.25388262071363726 - 0.25745089384059566 - 0.257990103854705 - 0.29704373180003885 - 0.28480533084783555 - 0.286509500865553 - 0.2947033679639156 - 0.2929252266179773 - 0.262007031213021 - 0.2603632068581035 - 0.25388262071363726 - 0.25745089384059566 - 0.257990103854705 - 0.29704373180003885 - 0.28480533084783555 - 0.286509500865553 - 0.2947033679639156 - 0.2929252266179773 - 0.262007031213021 - 0.2603632068581035 - 0.25388262071363726 - 0.25745089384059566 - 0.257990103854705 - 0.29704373180003885 - 0.28480533084783555 - 0.286509500865553 - 0.2947033679639156 - 0.2929252266179773 - 0.262007031213021 - 0.2603632068581035 - 0.25388262071363726 - 0.25745089384059566 - 0.257990103854705 - 0.29704373180003885 - 0.28480533084783555 - 0.286509500865553 - 0.2947033679639156 - 0.2929252266179773 - 0.262007031213021 - 0.2603632068581035 - 0.25388262071363726 - 0.25745089384059566 - 0.257990103854705 - 0.29704373180003885 - 0.28480533084783555 - 0.286509500865553 - 0.2947033679639156 - 0.2929252266179773 - 0.262007031213021 - 0.2603632068581035 - 0.25388262071363726 - 0.25745089384059566 - 0.257990103854705 - 0.29704373180003885 - 0.28480533084783555 - 0.286509500865553 - 0.2947033679639156 - 0.2929252266179773 - 0.262007031213021 - 0.2603632068581035 - 0.25388262071363726 - 0.25745089384059566 - 0.257990103854705 - 0.29704373180003885 - 0.28480533084783555 - 0.286509500865553 - 0.2947033679639156 - 0.2929252266179773 - 0.262007031213021 - 0.2603632068581035 - 0.25388262071363726 - 0.25745089384059566 - 0.257990103854705 - 0.29704373180003885 - 0.28480533084783555 - 0.286509500865553 - 0.2947033679639156 - 0.2929252266179773 - 0.262007031213021 - 0.2603632068581035 - 0.25388262071363726 - 0.25745089384059566 - 0.257990103854705 - 0.29704373180003885 - 0.28480533084783555 - 0.286509500865553 - 0.2947033679639156 - 0.2929252266179773 - 0.262007031213021 - 0.2603632068581035 - 0.25388262071363726 - 0.25745089384059566 - 0.257990103854705 - 0.29704373180003885 - 0.28480533084783555 - 0.286509500865553 - 0.2947033679639156 - 0.2929252266179773 - 0.262007031213021 - 0.2603632068581035 - 0.25388262071363726 - 0.25745089384059566 - 0.257990103854705 - 0.29704373180003885 - 0.28480533084783555 - 0.286509500865553 - 0.2947033679639156 - 0.2929252266179773 - 0.262007031213021 - 0.2603632068581035 - 0.25388262071363726 - 0.25745089384059566 - 0.257990103854705 - 0.29704373180003885 - 0.28480533084783555 - 0.286509500865553 - 0.2947033679639156 - 0.2929252266179773 - 0.262007031213021 - 0.2603632068581035 - 0.25388262071363726 - 0.25745089384059566 - 0.257990103854705 - 0.29704373180003885 - 0.28480533084783555 - 0.286509500865553 - 0.2947033679639156 - 0.2929252266179773 - 0.262007031213021 - 0.2603632068581035 - 0.25388262071363726 - 0.25745089384059566 - 0.257990103854705 - 0.29704373180003885 - 0.28480533084783555 - 0.286509500865553 - 0.2947033679639156 - 0.2929252266179773 - 0.262007031213021 - 0.2603632068581035 - 0.25388262071363726 - 0.25745089384059566 - 0.257990103854705 - 0.29704373180003885 - 0.28480533084783555 - 0.286509500865553 - 0.2947033679639156 - 0.2929252266179773 - 0.262007031213021 - 0.2603632068581035 - 0.25388262071363726 - 0.25745089384059566 - 0.257990103854705 - 0.29704373180003885 - 0.28480533084783555 - 0.286509500865553 - 0.2947033679639156 - 0.2929252266179773 - 0.262007031213021 - 0.2603632068581035 - 0.25388262071363726 - 0.25745089384059566 - 0.257990103854705 - 0.29704373180003885 - 0.28480533084783555 - 0.286509500865553 - 0.2947033679639156 - 0.2929252266179773 - 0.262007031213021 - 0.2603632068581035 - 0.25388262071363726 - 0.25745089384059566 - 0.257990103854705 - 0.29704373180003885 - 0.28480533084783555 - 0.286509500865553 - 0.2947033679639156 - 0.2929252266179773 - 0.262007031213021 - 0.2603632068581035 - 0.25388262071363726 - 0.25745089384059566 - 0.257990103854705 - 0.29704373180003885 - 0.28480533084783555 - 0.286509500865553 - 0.2947033679639156 - 0.2929252266179773 - 0.262007031213021 - 0.2603632068581035 - 0.25388262071363726 - 0.25745089384059566 - 0.257990103854705 - 0.29704373180003885 - 0.28480533084783555 - 0.286509500865553 - 0.2947033679639156 - 0.2929252266179773 - 0.262007031213021 - 0.2603632068581035 - 0.25388262071363726 - 0.25745089384059566 - 0.257990103854705 - 0.29704373180003885 - 0.28480533084783555 - 0.286509500865553 - 0.2947033679639156 - 0.2929252266179773 - 0.262007031213021 - 0.2603632068581035 - 0.25388262071363726 - 0.25745089384059566 - 0.257990103854705 - 0.29704373180003885 - 0.28480533084783555 - 0.286509500865553 - 0.2947033679639156 - 0.2929252266179773 - 0.262007031213021 - 0.2603632068581035 - 0.25388262071363726 - 0.25745089384059566 - 0.257990103854705 - 0.29704373180003885 - 0.28480533084783555 - 0.286509500865553 - 0.2947033679639156 - 0.2929252266179773 - 0.262007031213021 - 0.2603632068581035 - 0.25388262071363726 - 0.25745089384059566 - 0.257990103854705 - 0.29704373180003885 - 0.28480533084783555 - 0.286509500865553 - 0.2947033679639156 - 0.2929252266179773 - 0.262007031213021 - 0.2603632068581035 - 0.25388262071363726 - 0.25745089384059566 - 0.257990103854705 - 0.29704373180003885 - 0.28480533084783555 - 0.286509500865553 - 0.2947033679639156 - 0.2929252266179773 - 0.262007031213021 - 0.2603632068581035 - 0.25388262071363726 - 0.25745089384059566 - 0.257990103854705 - 0.29704373180003885 - 0.28480533084783555 - 0.286509500865553 - 0.2947033679639156 - 0.2929252266179773 - 0.262007031213021 - 0.2603632068581035 - 0.25388262071363726 - 0.25745089384059566 - 0.257990103854705 - 0.29704373180003885 - 0.28480533084783555 - 0.286509500865553 - 0.2947033679639156 - 0.2929252266179773 - 0.262007031213021 - 0.2603632068581035 - 0.25388262071363726 - 0.25745089384059566 - 0.257990103854705 - 0.29704373180003885 - 0.28480533084783555 - 0.286509500865553 - 0.2947033679639156 - 0.2929252266179773 - 0.262007031213021 - 0.2603632068581035 - 0.25388262071363726 - 0.25745089384059566 - 0.257990103854705 - 0.29704373180003885 - 0.28480533084783555 - 0.286509500865553 - 0.2947033679639156 - 0.2929252266179773 - 0.262007031213021 - 0.2603632068581035 - 0.25388262071363726 - 0.25745089384059566 - 0.257990103854705 - 0.29704373180003885 - 0.28480533084783555 - 0.286509500865553 - 0.2947033679639156 - 0.2929252266179773 - 0.262007031213021 - 0.2603632068581035 - 0.25388262071363726 - 0.25745089384059566 - 0.257990103854705 - 0.29704373180003885 - 0.28480533084783555 - 0.286509500865553 - 0.2947033679639156 - 0.2929252266179773 - 0.262007031213021 - 0.2603632068581035 - 0.25388262071363726 - 0.25745089384059566 - 0.257990103854705 - 0.29704373180003885 - 0.28480533084783555 - 0.286509500865553 - 0.2947033679639156 - 0.2929252266179773 - 0.262007031213021 - 0.2603632068581035 - 0.25388262071363726 - 0.25745089384059566 - 0.257990103854705 - 0.29704373180003885 - 0.28480533084783555 - 0.286509500865553 - 0.2947033679639156 - 0.2929252266179773 - 0.262007031213021 - 0.2603632068581035 - 0.25388262071363726 - 0.25745089384059566 - 0.257990103854705 - 0.29704373180003885 - 0.28480533084783555 - 0.286509500865553 - 0.2947033679639156 - 0.2929252266179773 - 0.262007031213021 - 0.2603632068581035 - 0.25388262071363726 - 0.25745089384059566 - 0.257990103854705 - 0.29704373180003885 - 0.28480533084783555 - 0.286509500865553 - 0.2947033679639156 - 0.2929252266179773 - 0.262007031213021 - 0.2603632068581035 - 0.25388262071363726 - 0.25745089384059566 - 0.257990103854705 - 0.29704373180003885 - 0.28480533084783555 - 0.286509500865553 - 0.2947033679639156 - 0.2929252266179773 - task: type: Reranking dataset: name: MTEB StackOverflowDupQuestions type: mteb/stackoverflowdupquestions-reranking config: default split: test revision: e185fbe320c72810689fc5848eb6114e1ef5ec69 metrics: - type: map value: 43.14055223869571 - type: mrr value: 43.506533295136244 - task: type: Summarization dataset: name: MTEB SummEval type: mteb/summeval config: default split: test revision: cda12ad7615edc362dbf25a00fdd61d3b1eaf93c metrics: - type: cos_sim_pearson value: 30.24218821701958 - type: cos_sim_spearman value: 29.907749825179124 - type: dot_pearson value: 27.348198725124227 - type: dot_spearman value: 25.950835375041038 - task: type: Retrieval dataset: name: MTEB TRECCOVID type: mteb/trec-covid config: default split: test revision: bb9466bac8153a0349341eb1b22e06409e78ef4e metrics: - type: map_at_1 value: 0.1 - type: map_at_10 value: 0.505 - type: map_at_100 value: 2.207 - type: map_at_1000 value: 6.0600000000000005 - type: map_at_20 value: 0.814 - type: map_at_3 value: 0.218 - type: map_at_5 value: 0.329 - type: mrr_at_1 value: 44.0 - type: mrr_at_10 value: 54.763 - type: mrr_at_100 value: 55.345 - type: mrr_at_1000 value: 55.349000000000004 - type: mrr_at_20 value: 55.035000000000004 - type: mrr_at_3 value: 51.333 - type: mrr_at_5 value: 52.632999999999996 - type: ndcg_at_1 value: 39.0 - type: ndcg_at_10 value: 30.272 - type: ndcg_at_100 value: 21.906 - type: ndcg_at_1000 value: 22.439 - type: ndcg_at_20 value: 28.316000000000003 - type: ndcg_at_3 value: 35.235 - type: ndcg_at_5 value: 33.843 - type: precision_at_1 value: 44.0 - type: precision_at_10 value: 32.0 - type: precision_at_100 value: 22.5 - type: precision_at_1000 value: 10.9 - type: precision_at_20 value: 29.7 - type: precision_at_3 value: 38.0 - type: precision_at_5 value: 36.0 - type: recall_at_1 value: 0.1 - type: recall_at_10 value: 0.719 - type: recall_at_100 value: 4.7620000000000005 - type: recall_at_1000 value: 22.285 - type: recall_at_20 value: 1.277 - type: recall_at_3 value: 0.244 - type: recall_at_5 value: 0.40299999999999997 - task: type: Retrieval dataset: name: MTEB Touche2020 type: mteb/touche2020 config: default split: test revision: a34f9a33db75fa0cbb21bb5cfc3dae8dc8bec93f metrics: - type: map_at_1 value: 0.865 - type: map_at_10 value: 2.962 - type: map_at_100 value: 5.713 - type: map_at_1000 value: 6.719 - type: map_at_20 value: 3.939 - type: map_at_3 value: 1.582 - type: map_at_5 value: 2.215 - type: mrr_at_1 value: 14.285999999999998 - type: mrr_at_10 value: 24.844 - type: mrr_at_100 value: 26.861 - type: mrr_at_1000 value: 26.904 - type: mrr_at_20 value: 26.375999999999998 - type: mrr_at_3 value: 20.068 - type: mrr_at_5 value: 22.619 - type: ndcg_at_1 value: 12.245000000000001 - type: ndcg_at_10 value: 10.508000000000001 - type: ndcg_at_100 value: 18.935 - type: ndcg_at_1000 value: 29.747 - type: ndcg_at_20 value: 11.701 - type: ndcg_at_3 value: 10.381 - type: ndcg_at_5 value: 11.339 - type: precision_at_1 value: 14.285999999999998 - type: precision_at_10 value: 10.612 - type: precision_at_100 value: 4.531000000000001 - type: precision_at_1000 value: 1.133 - type: precision_at_20 value: 8.98 - type: precision_at_3 value: 11.565 - type: precision_at_5 value: 12.653 - type: recall_at_1 value: 0.865 - type: recall_at_10 value: 6.493 - type: recall_at_100 value: 28.16 - type: recall_at_1000 value: 61.026 - type: recall_at_20 value: 11.726 - type: recall_at_3 value: 2.221 - type: recall_at_5 value: 3.849 - task: type: Classification dataset: name: MTEB ToxicConversationsClassification type: mteb/toxic_conversations_50k config: default split: test revision: edfaf9da55d3dd50d43143d90c1ac476895ae6de metrics: - type: accuracy value: 64.4091796875 - type: ap value: 11.076947197887051 - type: f1 value: 49.07978901357373 - task: type: Classification dataset: name: MTEB TweetSentimentExtractionClassification type: mteb/tweet_sentiment_extraction config: default split: test revision: d604517c81ca91fe16a244d1248fc021f9ecee7a metrics: - type: accuracy value: 59.663271080928126 - type: f1 value: 59.99492026885337 - task: type: Clustering dataset: name: MTEB TwentyNewsgroupsClustering type: mteb/twentynewsgroups-clustering config: default split: test revision: 6125ec4e24fa026cec8a478383ee943acfbd5449 metrics: - type: v_measure value: 26.09282097093625 - type: v_measures value: - 0.26849676299945785 - 0.2669514566616348 - 0.2891149570883449 - 0.24392859342532378 - 0.22545659657952322 - 0.27033814887951974 - 0.25403361548721237 - 0.27404718032226466 - 0.23497638522536846 - 0.28193840042497487 - 0.26849676299945785 - 0.2669514566616348 - 0.2891149570883449 - 0.24392859342532378 - 0.22545659657952322 - 0.27033814887951974 - 0.25403361548721237 - 0.27404718032226466 - 0.23497638522536846 - 0.28193840042497487 - 0.26849676299945785 - 0.2669514566616348 - 0.2891149570883449 - 0.24392859342532378 - 0.22545659657952322 - 0.27033814887951974 - 0.25403361548721237 - 0.27404718032226466 - 0.23497638522536846 - 0.28193840042497487 - 0.26849676299945785 - 0.2669514566616348 - 0.2891149570883449 - 0.24392859342532378 - 0.22545659657952322 - 0.27033814887951974 - 0.25403361548721237 - 0.27404718032226466 - 0.23497638522536846 - 0.28193840042497487 - 0.26849676299945785 - 0.2669514566616348 - 0.2891149570883449 - 0.24392859342532378 - 0.22545659657952322 - 0.27033814887951974 - 0.25403361548721237 - 0.27404718032226466 - 0.23497638522536846 - 0.28193840042497487 - 0.26849676299945785 - 0.2669514566616348 - 0.2891149570883449 - 0.24392859342532378 - 0.22545659657952322 - 0.27033814887951974 - 0.25403361548721237 - 0.27404718032226466 - 0.23497638522536846 - 0.28193840042497487 - 0.26849676299945785 - 0.2669514566616348 - 0.2891149570883449 - 0.24392859342532378 - 0.22545659657952322 - 0.27033814887951974 - 0.25403361548721237 - 0.27404718032226466 - 0.23497638522536846 - 0.28193840042497487 - 0.26849676299945785 - 0.2669514566616348 - 0.2891149570883449 - 0.24392859342532378 - 0.22545659657952322 - 0.27033814887951974 - 0.25403361548721237 - 0.27404718032226466 - 0.23497638522536846 - 0.28193840042497487 - 0.26849676299945785 - 0.2669514566616348 - 0.2891149570883449 - 0.24392859342532378 - 0.22545659657952322 - 0.27033814887951974 - 0.25403361548721237 - 0.27404718032226466 - 0.23497638522536846 - 0.28193840042497487 - 0.26849676299945785 - 0.2669514566616348 - 0.2891149570883449 - 0.24392859342532378 - 0.22545659657952322 - 0.27033814887951974 - 0.25403361548721237 - 0.27404718032226466 - 0.23497638522536846 - 0.28193840042497487 - 0.26849676299945785 - 0.2669514566616348 - 0.2891149570883449 - 0.24392859342532378 - 0.22545659657952322 - 0.27033814887951974 - 0.25403361548721237 - 0.27404718032226466 - 0.23497638522536846 - 0.28193840042497487 - 0.26849676299945785 - 0.2669514566616348 - 0.2891149570883449 - 0.24392859342532378 - 0.22545659657952322 - 0.27033814887951974 - 0.25403361548721237 - 0.27404718032226466 - 0.23497638522536846 - 0.28193840042497487 - 0.26849676299945785 - 0.2669514566616348 - 0.2891149570883449 - 0.24392859342532378 - 0.22545659657952322 - 0.27033814887951974 - 0.25403361548721237 - 0.27404718032226466 - 0.23497638522536846 - 0.28193840042497487 - 0.26849676299945785 - 0.2669514566616348 - 0.2891149570883449 - 0.24392859342532378 - 0.22545659657952322 - 0.27033814887951974 - 0.25403361548721237 - 0.27404718032226466 - 0.23497638522536846 - 0.28193840042497487 - 0.26849676299945785 - 0.2669514566616348 - 0.2891149570883449 - 0.24392859342532378 - 0.22545659657952322 - 0.27033814887951974 - 0.25403361548721237 - 0.27404718032226466 - 0.23497638522536846 - 0.28193840042497487 - 0.26849676299945785 - 0.2669514566616348 - 0.2891149570883449 - 0.24392859342532378 - 0.22545659657952322 - 0.27033814887951974 - 0.25403361548721237 - 0.27404718032226466 - 0.23497638522536846 - 0.28193840042497487 - 0.26849676299945785 - 0.2669514566616348 - 0.2891149570883449 - 0.24392859342532378 - 0.22545659657952322 - 0.27033814887951974 - 0.25403361548721237 - 0.27404718032226466 - 0.23497638522536846 - 0.28193840042497487 - 0.26849676299945785 - 0.2669514566616348 - 0.2891149570883449 - 0.24392859342532378 - 0.22545659657952322 - 0.27033814887951974 - 0.25403361548721237 - 0.27404718032226466 - 0.23497638522536846 - 0.28193840042497487 - 0.26849676299945785 - 0.2669514566616348 - 0.2891149570883449 - 0.24392859342532378 - 0.22545659657952322 - 0.27033814887951974 - 0.25403361548721237 - 0.27404718032226466 - 0.23497638522536846 - 0.28193840042497487 - 0.26849676299945785 - 0.2669514566616348 - 0.2891149570883449 - 0.24392859342532378 - 0.22545659657952322 - 0.27033814887951974 - 0.25403361548721237 - 0.27404718032226466 - 0.23497638522536846 - 0.28193840042497487 - 0.26849676299945785 - 0.2669514566616348 - 0.2891149570883449 - 0.24392859342532378 - 0.22545659657952322 - 0.27033814887951974 - 0.25403361548721237 - 0.27404718032226466 - 0.23497638522536846 - 0.28193840042497487 - 0.26849676299945785 - 0.2669514566616348 - 0.2891149570883449 - 0.24392859342532378 - 0.22545659657952322 - 0.27033814887951974 - 0.25403361548721237 - 0.27404718032226466 - 0.23497638522536846 - 0.28193840042497487 - 0.26849676299945785 - 0.2669514566616348 - 0.2891149570883449 - 0.24392859342532378 - 0.22545659657952322 - 0.27033814887951974 - 0.25403361548721237 - 0.27404718032226466 - 0.23497638522536846 - 0.28193840042497487 - 0.26849676299945785 - 0.2669514566616348 - 0.2891149570883449 - 0.24392859342532378 - 0.22545659657952322 - 0.27033814887951974 - 0.25403361548721237 - 0.27404718032226466 - 0.23497638522536846 - 0.28193840042497487 - 0.26849676299945785 - 0.2669514566616348 - 0.2891149570883449 - 0.24392859342532378 - 0.22545659657952322 - 0.27033814887951974 - 0.25403361548721237 - 0.27404718032226466 - 0.23497638522536846 - 0.28193840042497487 - 0.26849676299945785 - 0.2669514566616348 - 0.2891149570883449 - 0.24392859342532378 - 0.22545659657952322 - 0.27033814887951974 - 0.25403361548721237 - 0.27404718032226466 - 0.23497638522536846 - 0.28193840042497487 - 0.26849676299945785 - 0.2669514566616348 - 0.2891149570883449 - 0.24392859342532378 - 0.22545659657952322 - 0.27033814887951974 - 0.25403361548721237 - 0.27404718032226466 - 0.23497638522536846 - 0.28193840042497487 - 0.26849676299945785 - 0.2669514566616348 - 0.2891149570883449 - 0.24392859342532378 - 0.22545659657952322 - 0.27033814887951974 - 0.25403361548721237 - 0.27404718032226466 - 0.23497638522536846 - 0.28193840042497487 - 0.26849676299945785 - 0.2669514566616348 - 0.2891149570883449 - 0.24392859342532378 - 0.22545659657952322 - 0.27033814887951974 - 0.25403361548721237 - 0.27404718032226466 - 0.23497638522536846 - 0.28193840042497487 - 0.26849676299945785 - 0.2669514566616348 - 0.2891149570883449 - 0.24392859342532378 - 0.22545659657952322 - 0.27033814887951974 - 0.25403361548721237 - 0.27404718032226466 - 0.23497638522536846 - 0.28193840042497487 - 0.26849676299945785 - 0.2669514566616348 - 0.2891149570883449 - 0.24392859342532378 - 0.22545659657952322 - 0.27033814887951974 - 0.25403361548721237 - 0.27404718032226466 - 0.23497638522536846 - 0.28193840042497487 - 0.26849676299945785 - 0.2669514566616348 - 0.2891149570883449 - 0.24392859342532378 - 0.22545659657952322 - 0.27033814887951974 - 0.25403361548721237 - 0.27404718032226466 - 0.23497638522536846 - 0.28193840042497487 - 0.26849676299945785 - 0.2669514566616348 - 0.2891149570883449 - 0.24392859342532378 - 0.22545659657952322 - 0.27033814887951974 - 0.25403361548721237 - 0.27404718032226466 - 0.23497638522536846 - 0.28193840042497487 - 0.26849676299945785 - 0.2669514566616348 - 0.2891149570883449 - 0.24392859342532378 - 0.22545659657952322 - 0.27033814887951974 - 0.25403361548721237 - 0.27404718032226466 - 0.23497638522536846 - 0.28193840042497487 - 0.26849676299945785 - 0.2669514566616348 - 0.2891149570883449 - 0.24392859342532378 - 0.22545659657952322 - 0.27033814887951974 - 0.25403361548721237 - 0.27404718032226466 - 0.23497638522536846 - 0.28193840042497487 - 0.26849676299945785 - 0.2669514566616348 - 0.2891149570883449 - 0.24392859342532378 - 0.22545659657952322 - 0.27033814887951974 - 0.25403361548721237 - 0.27404718032226466 - 0.23497638522536846 - 0.28193840042497487 - 0.26849676299945785 - 0.2669514566616348 - 0.2891149570883449 - 0.24392859342532378 - 0.22545659657952322 - 0.27033814887951974 - 0.25403361548721237 - 0.27404718032226466 - 0.23497638522536846 - 0.28193840042497487 - 0.26849676299945785 - 0.2669514566616348 - 0.2891149570883449 - 0.24392859342532378 - 0.22545659657952322 - 0.27033814887951974 - 0.25403361548721237 - 0.27404718032226466 - 0.23497638522536846 - 0.28193840042497487 - 0.26849676299945785 - 0.2669514566616348 - 0.2891149570883449 - 0.24392859342532378 - 0.22545659657952322 - 0.27033814887951974 - 0.25403361548721237 - 0.27404718032226466 - 0.23497638522536846 - 0.28193840042497487 - 0.26849676299945785 - 0.2669514566616348 - 0.2891149570883449 - 0.24392859342532378 - 0.22545659657952322 - 0.27033814887951974 - 0.25403361548721237 - 0.27404718032226466 - 0.23497638522536846 - 0.28193840042497487 - 0.26849676299945785 - 0.2669514566616348 - 0.2891149570883449 - 0.24392859342532378 - 0.22545659657952322 - 0.27033814887951974 - 0.25403361548721237 - 0.27404718032226466 - 0.23497638522536846 - 0.28193840042497487 - 0.26849676299945785 - 0.2669514566616348 - 0.2891149570883449 - 0.24392859342532378 - 0.22545659657952322 - 0.27033814887951974 - 0.25403361548721237 - 0.27404718032226466 - 0.23497638522536846 - 0.28193840042497487 - 0.26849676299945785 - 0.2669514566616348 - 0.2891149570883449 - 0.24392859342532378 - 0.22545659657952322 - 0.27033814887951974 - 0.25403361548721237 - 0.27404718032226466 - 0.23497638522536846 - 0.28193840042497487 - 0.26849676299945785 - 0.2669514566616348 - 0.2891149570883449 - 0.24392859342532378 - 0.22545659657952322 - 0.27033814887951974 - 0.25403361548721237 - 0.27404718032226466 - 0.23497638522536846 - 0.28193840042497487 - 0.26849676299945785 - 0.2669514566616348 - 0.2891149570883449 - 0.24392859342532378 - 0.22545659657952322 - 0.27033814887951974 - 0.25403361548721237 - 0.27404718032226466 - 0.23497638522536846 - 0.28193840042497487 - 0.26849676299945785 - 0.2669514566616348 - 0.2891149570883449 - 0.24392859342532378 - 0.22545659657952322 - 0.27033814887951974 - 0.25403361548721237 - 0.27404718032226466 - 0.23497638522536846 - 0.28193840042497487 - 0.26849676299945785 - 0.2669514566616348 - 0.2891149570883449 - 0.24392859342532378 - 0.22545659657952322 - 0.27033814887951974 - 0.25403361548721237 - 0.27404718032226466 - 0.23497638522536846 - 0.28193840042497487 - 0.26849676299945785 - 0.2669514566616348 - 0.2891149570883449 - 0.24392859342532378 - 0.22545659657952322 - 0.27033814887951974 - 0.25403361548721237 - 0.27404718032226466 - 0.23497638522536846 - 0.28193840042497487 - 0.26849676299945785 - 0.2669514566616348 - 0.2891149570883449 - 0.24392859342532378 - 0.22545659657952322 - 0.27033814887951974 - 0.25403361548721237 - 0.27404718032226466 - 0.23497638522536846 - 0.28193840042497487 - 0.26849676299945785 - 0.2669514566616348 - 0.2891149570883449 - 0.24392859342532378 - 0.22545659657952322 - 0.27033814887951974 - 0.25403361548721237 - 0.27404718032226466 - 0.23497638522536846 - 0.28193840042497487 - 0.26849676299945785 - 0.2669514566616348 - 0.2891149570883449 - 0.24392859342532378 - 0.22545659657952322 - 0.27033814887951974 - 0.25403361548721237 - 0.27404718032226466 - 0.23497638522536846 - 0.28193840042497487 - 0.26849676299945785 - 0.2669514566616348 - 0.2891149570883449 - 0.24392859342532378 - 0.22545659657952322 - 0.27033814887951974 - 0.25403361548721237 - 0.27404718032226466 - 0.23497638522536846 - 0.28193840042497487 - 0.26849676299945785 - 0.2669514566616348 - 0.2891149570883449 - 0.24392859342532378 - 0.22545659657952322 - 0.27033814887951974 - 0.25403361548721237 - 0.27404718032226466 - 0.23497638522536846 - 0.28193840042497487 - 0.26849676299945785 - 0.2669514566616348 - 0.2891149570883449 - 0.24392859342532378 - 0.22545659657952322 - 0.27033814887951974 - 0.25403361548721237 - 0.27404718032226466 - 0.23497638522536846 - 0.28193840042497487 - 0.26849676299945785 - 0.2669514566616348 - 0.2891149570883449 - 0.24392859342532378 - 0.22545659657952322 - 0.27033814887951974 - 0.25403361548721237 - 0.27404718032226466 - 0.23497638522536846 - 0.28193840042497487 - 0.26849676299945785 - 0.2669514566616348 - 0.2891149570883449 - 0.24392859342532378 - 0.22545659657952322 - 0.27033814887951974 - 0.25403361548721237 - 0.27404718032226466 - 0.23497638522536846 - 0.28193840042497487 - 0.26849676299945785 - 0.2669514566616348 - 0.2891149570883449 - 0.24392859342532378 - 0.22545659657952322 - 0.27033814887951974 - 0.25403361548721237 - 0.27404718032226466 - 0.23497638522536846 - 0.28193840042497487 - 0.26849676299945785 - 0.2669514566616348 - 0.2891149570883449 - 0.24392859342532378 - 0.22545659657952322 - 0.27033814887951974 - 0.25403361548721237 - 0.27404718032226466 - 0.23497638522536846 - 0.28193840042497487 - 0.26849676299945785 - 0.2669514566616348 - 0.2891149570883449 - 0.24392859342532378 - 0.22545659657952322 - 0.27033814887951974 - 0.25403361548721237 - 0.27404718032226466 - 0.23497638522536846 - 0.28193840042497487 - 0.26849676299945785 - 0.2669514566616348 - 0.2891149570883449 - 0.24392859342532378 - 0.22545659657952322 - 0.27033814887951974 - 0.25403361548721237 - 0.27404718032226466 - 0.23497638522536846 - 0.28193840042497487 - 0.26849676299945785 - 0.2669514566616348 - 0.2891149570883449 - 0.24392859342532378 - 0.22545659657952322 - 0.27033814887951974 - 0.25403361548721237 - 0.27404718032226466 - 0.23497638522536846 - 0.28193840042497487 - 0.26849676299945785 - 0.2669514566616348 - 0.2891149570883449 - 0.24392859342532378 - 0.22545659657952322 - 0.27033814887951974 - 0.25403361548721237 - 0.27404718032226466 - 0.23497638522536846 - 0.28193840042497487 - 0.26849676299945785 - 0.2669514566616348 - 0.2891149570883449 - 0.24392859342532378 - 0.22545659657952322 - 0.27033814887951974 - 0.25403361548721237 - 0.27404718032226466 - 0.23497638522536846 - 0.28193840042497487 - 0.26849676299945785 - 0.2669514566616348 - 0.2891149570883449 - 0.24392859342532378 - 0.22545659657952322 - 0.27033814887951974 - 0.25403361548721237 - 0.27404718032226466 - 0.23497638522536846 - 0.28193840042497487 - 0.26849676299945785 - 0.2669514566616348 - 0.2891149570883449 - 0.24392859342532378 - 0.22545659657952322 - 0.27033814887951974 - 0.25403361548721237 - 0.27404718032226466 - 0.23497638522536846 - 0.28193840042497487 - 0.26849676299945785 - 0.2669514566616348 - 0.2891149570883449 - 0.24392859342532378 - 0.22545659657952322 - 0.27033814887951974 - 0.25403361548721237 - 0.27404718032226466 - 0.23497638522536846 - 0.28193840042497487 - 0.26849676299945785 - 0.2669514566616348 - 0.2891149570883449 - 0.24392859342532378 - 0.22545659657952322 - 0.27033814887951974 - 0.25403361548721237 - 0.27404718032226466 - 0.23497638522536846 - 0.28193840042497487 - 0.26849676299945785 - 0.2669514566616348 - 0.2891149570883449 - 0.24392859342532378 - 0.22545659657952322 - 0.27033814887951974 - 0.25403361548721237 - 0.27404718032226466 - 0.23497638522536846 - 0.28193840042497487 - 0.26849676299945785 - 0.2669514566616348 - 0.2891149570883449 - 0.24392859342532378 - 0.22545659657952322 - 0.27033814887951974 - 0.25403361548721237 - 0.27404718032226466 - 0.23497638522536846 - 0.28193840042497487 - 0.26849676299945785 - 0.2669514566616348 - 0.2891149570883449 - 0.24392859342532378 - 0.22545659657952322 - 0.27033814887951974 - 0.25403361548721237 - 0.27404718032226466 - 0.23497638522536846 - 0.28193840042497487 - 0.26849676299945785 - 0.2669514566616348 - 0.2891149570883449 - 0.24392859342532378 - 0.22545659657952322 - 0.27033814887951974 - 0.25403361548721237 - 0.27404718032226466 - 0.23497638522536846 - 0.28193840042497487 - 0.26849676299945785 - 0.2669514566616348 - 0.2891149570883449 - 0.24392859342532378 - 0.22545659657952322 - 0.27033814887951974 - 0.25403361548721237 - 0.27404718032226466 - 0.23497638522536846 - 0.28193840042497487 - 0.26849676299945785 - 0.2669514566616348 - 0.2891149570883449 - 0.24392859342532378 - 0.22545659657952322 - 0.27033814887951974 - 0.25403361548721237 - 0.27404718032226466 - 0.23497638522536846 - 0.28193840042497487 - 0.26849676299945785 - 0.2669514566616348 - 0.2891149570883449 - 0.24392859342532378 - 0.22545659657952322 - 0.27033814887951974 - 0.25403361548721237 - 0.27404718032226466 - 0.23497638522536846 - 0.28193840042497487 - 0.26849676299945785 - 0.2669514566616348 - 0.2891149570883449 - 0.24392859342532378 - 0.22545659657952322 - 0.27033814887951974 - 0.25403361548721237 - 0.27404718032226466 - 0.23497638522536846 - 0.28193840042497487 - 0.26849676299945785 - 0.2669514566616348 - 0.2891149570883449 - 0.24392859342532378 - 0.22545659657952322 - 0.27033814887951974 - 0.25403361548721237 - 0.27404718032226466 - 0.23497638522536846 - 0.28193840042497487 - 0.26849676299945785 - 0.2669514566616348 - 0.2891149570883449 - 0.24392859342532378 - 0.22545659657952322 - 0.27033814887951974 - 0.25403361548721237 - 0.27404718032226466 - 0.23497638522536846 - 0.28193840042497487 - 0.26849676299945785 - 0.2669514566616348 - 0.2891149570883449 - 0.24392859342532378 - 0.22545659657952322 - 0.27033814887951974 - 0.25403361548721237 - 0.27404718032226466 - 0.23497638522536846 - 0.28193840042497487 - 0.26849676299945785 - 0.2669514566616348 - 0.2891149570883449 - 0.24392859342532378 - 0.22545659657952322 - 0.27033814887951974 - 0.25403361548721237 - 0.27404718032226466 - 0.23497638522536846 - 0.28193840042497487 - 0.26849676299945785 - 0.2669514566616348 - 0.2891149570883449 - 0.24392859342532378 - 0.22545659657952322 - 0.27033814887951974 - 0.25403361548721237 - 0.27404718032226466 - 0.23497638522536846 - 0.28193840042497487 - 0.26849676299945785 - 0.2669514566616348 - 0.2891149570883449 - 0.24392859342532378 - 0.22545659657952322 - 0.27033814887951974 - 0.25403361548721237 - 0.27404718032226466 - 0.23497638522536846 - 0.28193840042497487 - 0.26849676299945785 - 0.2669514566616348 - 0.2891149570883449 - 0.24392859342532378 - 0.22545659657952322 - 0.27033814887951974 - 0.25403361548721237 - 0.27404718032226466 - 0.23497638522536846 - 0.28193840042497487 - 0.26849676299945785 - 0.2669514566616348 - 0.2891149570883449 - 0.24392859342532378 - 0.22545659657952322 - 0.27033814887951974 - 0.25403361548721237 - 0.27404718032226466 - 0.23497638522536846 - 0.28193840042497487 - 0.26849676299945785 - 0.2669514566616348 - 0.2891149570883449 - 0.24392859342532378 - 0.22545659657952322 - 0.27033814887951974 - 0.25403361548721237 - 0.27404718032226466 - 0.23497638522536846 - 0.28193840042497487 - 0.26849676299945785 - 0.2669514566616348 - 0.2891149570883449 - 0.24392859342532378 - 0.22545659657952322 - 0.27033814887951974 - 0.25403361548721237 - 0.27404718032226466 - 0.23497638522536846 - 0.28193840042497487 - 0.26849676299945785 - 0.2669514566616348 - 0.2891149570883449 - 0.24392859342532378 - 0.22545659657952322 - 0.27033814887951974 - 0.25403361548721237 - 0.27404718032226466 - 0.23497638522536846 - 0.28193840042497487 - 0.26849676299945785 - 0.2669514566616348 - 0.2891149570883449 - 0.24392859342532378 - 0.22545659657952322 - 0.27033814887951974 - 0.25403361548721237 - 0.27404718032226466 - 0.23497638522536846 - 0.28193840042497487 - 0.26849676299945785 - 0.2669514566616348 - 0.2891149570883449 - 0.24392859342532378 - 0.22545659657952322 - 0.27033814887951974 - 0.25403361548721237 - 0.27404718032226466 - 0.23497638522536846 - 0.28193840042497487 - 0.26849676299945785 - 0.2669514566616348 - 0.2891149570883449 - 0.24392859342532378 - 0.22545659657952322 - 0.27033814887951974 - 0.25403361548721237 - 0.27404718032226466 - 0.23497638522536846 - 0.28193840042497487 - 0.26849676299945785 - 0.2669514566616348 - 0.2891149570883449 - 0.24392859342532378 - 0.22545659657952322 - 0.27033814887951974 - 0.25403361548721237 - 0.27404718032226466 - 0.23497638522536846 - 0.28193840042497487 - 0.26849676299945785 - 0.2669514566616348 - 0.2891149570883449 - 0.24392859342532378 - 0.22545659657952322 - 0.27033814887951974 - 0.25403361548721237 - 0.27404718032226466 - 0.23497638522536846 - 0.28193840042497487 - 0.26849676299945785 - 0.2669514566616348 - 0.2891149570883449 - 0.24392859342532378 - 0.22545659657952322 - 0.27033814887951974 - 0.25403361548721237 - 0.27404718032226466 - 0.23497638522536846 - 0.28193840042497487 - 0.26849676299945785 - 0.2669514566616348 - 0.2891149570883449 - 0.24392859342532378 - 0.22545659657952322 - 0.27033814887951974 - 0.25403361548721237 - 0.27404718032226466 - 0.23497638522536846 - 0.28193840042497487 - 0.26849676299945785 - 0.2669514566616348 - 0.2891149570883449 - 0.24392859342532378 - 0.22545659657952322 - 0.27033814887951974 - 0.25403361548721237 - 0.27404718032226466 - 0.23497638522536846 - 0.28193840042497487 - 0.26849676299945785 - 0.2669514566616348 - 0.2891149570883449 - 0.24392859342532378 - 0.22545659657952322 - 0.27033814887951974 - 0.25403361548721237 - 0.27404718032226466 - 0.23497638522536846 - 0.28193840042497487 - 0.26849676299945785 - 0.2669514566616348 - 0.2891149570883449 - 0.24392859342532378 - 0.22545659657952322 - 0.27033814887951974 - 0.25403361548721237 - 0.27404718032226466 - 0.23497638522536846 - 0.28193840042497487 - 0.26849676299945785 - 0.2669514566616348 - 0.2891149570883449 - 0.24392859342532378 - 0.22545659657952322 - 0.27033814887951974 - 0.25403361548721237 - 0.27404718032226466 - 0.23497638522536846 - 0.28193840042497487 - 0.26849676299945785 - 0.2669514566616348 - 0.2891149570883449 - 0.24392859342532378 - 0.22545659657952322 - 0.27033814887951974 - 0.25403361548721237 - 0.27404718032226466 - 0.23497638522536846 - 0.28193840042497487 - 0.26849676299945785 - 0.2669514566616348 - 0.2891149570883449 - 0.24392859342532378 - 0.22545659657952322 - 0.27033814887951974 - 0.25403361548721237 - 0.27404718032226466 - 0.23497638522536846 - 0.28193840042497487 - 0.26849676299945785 - 0.2669514566616348 - 0.2891149570883449 - 0.24392859342532378 - 0.22545659657952322 - 0.27033814887951974 - 0.25403361548721237 - 0.27404718032226466 - 0.23497638522536846 - 0.28193840042497487 - task: type: PairClassification dataset: name: MTEB TwitterSemEval2015 type: mteb/twittersemeval2015-pairclassification config: default split: test revision: 70970daeab8776df92f5ea462b6173c0b46fd2d1 metrics: - type: cos_sim_accuracy value: 84.88406747332658 - type: cos_sim_ap value: 69.26105491403395 - type: cos_sim_f1 value: 65.52488910793494 - type: cos_sim_precision value: 61.465557096625055 - type: cos_sim_recall value: 70.15831134564644 - type: dot_accuracy value: 82.16606067830959 - type: dot_ap value: 61.09102948421686 - type: dot_f1 value: 57.59054713588492 - type: dot_precision value: 56.106106106106104 - type: dot_recall value: 59.155672823219 - type: euclidean_accuracy value: 84.85426476724086 - type: euclidean_ap value: 69.32917418684202 - type: euclidean_f1 value: 65.59770252482949 - type: euclidean_precision value: 60.01751696956427 - type: euclidean_recall value: 72.32189973614776 - type: manhattan_accuracy value: 84.83638314358943 - type: manhattan_ap value: 69.13012845791405 - type: manhattan_f1 value: 65.35336124107363 - type: manhattan_precision value: 61.26500461680517 - type: manhattan_recall value: 70.0263852242744 - type: max_accuracy value: 84.88406747332658 - type: max_ap value: 69.32917418684202 - type: max_f1 value: 65.59770252482949 - task: type: PairClassification dataset: name: MTEB TwitterURLCorpus type: mteb/twitterurlcorpus-pairclassification config: default split: test revision: 8b6510b0b1fa4e4c4f879467980e9be563ec1cdf metrics: - type: cos_sim_accuracy value: 87.81387045445726 - type: cos_sim_ap value: 83.19376576098023 - type: cos_sim_f1 value: 75.85641331494391 - type: cos_sim_precision value: 73.52409856203484 - type: cos_sim_recall value: 78.34154604250077 - type: dot_accuracy value: 85.33007334963325 - type: dot_ap value: 75.69925817222503 - type: dot_f1 value: 70.44983722994968 - type: dot_precision value: 67.80119624038736 - type: dot_recall value: 73.31382814906067 - type: euclidean_accuracy value: 87.78864439011139 - type: euclidean_ap value: 83.33289584854239 - type: euclidean_f1 value: 75.70217471433837 - type: euclidean_precision value: 72.61349172677131 - type: euclidean_recall value: 79.06529103788112 - type: manhattan_accuracy value: 87.73819226141964 - type: manhattan_ap value: 83.29254385989515 - type: manhattan_f1 value: 75.70975618644992 - type: manhattan_precision value: 71.8773787281157 - type: manhattan_recall value: 79.97382198952879 - type: max_accuracy value: 87.81387045445726 - type: max_ap value: 83.33289584854239 - type: max_f1 value: 75.85641331494391 --- # Venusaur This is a distill of [Bulbasaur](https://huggingface.co/Mihaiii/Bulbasaur) using [qa-assistant](https://huggingface.co/datasets/Mihaiii/qa-assistant). ## Intended purpose <span style="color:blue">This model is designed for use in semantic-autocomplete ([click here for demo](https://mihaiii.github.io/semantic-autocomplete/)).</span> ## Usage (Sentence-Transformers) (same as [gte-tiny](https://huggingface.co/TaylorAI/gte-tiny)) Using this model becomes easy when you have [sentence-transformers](https://www.SBERT.net) installed: ``` pip install -U sentence-transformers ``` Then you can use the model like this: ```python from sentence_transformers import SentenceTransformer sentences = ["This is an example sentence", "Each sentence is converted"] model = SentenceTransformer('Mihaiii/Venusaur') embeddings = model.encode(sentences) print(embeddings) ``` ## Usage (HuggingFace Transformers) (same as [gte-tiny](https://huggingface.co/TaylorAI/gte-tiny)) Without [sentence-transformers](https://www.SBERT.net), you can use the model like this: First, you pass your input through the transformer model, then you have to apply the right pooling-operation on-top of the contextualized word embeddings. ```python from transformers import AutoTokenizer, AutoModel import torch #Mean Pooling - Take attention mask into account for correct averaging def mean_pooling(model_output, attention_mask): token_embeddings = model_output[0] #First element of model_output contains all token embeddings input_mask_expanded = attention_mask.unsqueeze(-1).expand(token_embeddings.size()).float() return torch.sum(token_embeddings * input_mask_expanded, 1) / torch.clamp(input_mask_expanded.sum(1), min=1e-9) # Sentences we want sentence embeddings for sentences = ['This is an example sentence', 'Each sentence is converted'] # Load model from HuggingFace Hub tokenizer = AutoTokenizer.from_pretrained('Mihaiii/Venusaur') model = AutoModel.from_pretrained('Mihaiii/Venusaur') # Tokenize sentences encoded_input = tokenizer(sentences, padding=True, truncation=True, return_tensors='pt') # Compute token embeddings with torch.no_grad(): model_output = model(**encoded_input) # Perform pooling. In this case, mean pooling. sentence_embeddings = mean_pooling(model_output, encoded_input['attention_mask']) print("Sentence embeddings:") print(sentence_embeddings) ``` ### Limitation (same as [gte-small](https://huggingface.co/thenlper/gte-small)) This model exclusively caters to English texts, and any lengthy texts will be truncated to a maximum of 512 tokens.
[ "SUMMARIZATION" ]
[ "BIOSSES", "SCIFACT" ]
sultan/BioM-ELECTRA-Large-Discriminator
sultan
null
[ "transformers", "pytorch", "electra", "pretraining", "endpoints_compatible", "region:us" ]
2022-03-02T23:29:05
2023-11-04T23:07:03
184
1
--- {} --- # BioM-Transformers: Building Large Biomedical Language Models with BERT, ALBERT and ELECTRA # Abstract The impact of design choices on the performance of biomedical language models recently has been a subject for investigation. In this paper, we empirically study biomedical domain adaptation with large transformer models using different design choices. We evaluate the performance of our pretrained models against other existing biomedical language models in the literature. Our results show that we achieve state-of-the-art results on several biomedical domain tasks despite using similar or less computational cost compared to other models in the literature. Our findings highlight the significant effect of design choices on improving the performance of biomedical language models. # Model Description This model was pre-trained on PubMed Abstracts only with biomedical domain vocabulary for 434K steps with a batch size of 4096 on TPUv3-512 unit. In order to help researchers with limited resources to fine-tune larger models, we created an example with PyTorch XLA. PyTorch XLA (https://github.com/pytorch/xla) is a library that allows you to use PyTorch on TPU units, which is provided for free by Google Colab and Kaggle. Follow this example to work with PyTorch/XLA [Link](https://github.com/salrowili/BioM-Transformers/blob/main/examples/Fine_Tuning_Biomedical_Models_on_Text_Classification_Task_With_HuggingFace_Transformers_and_PyTorch_XLA.ipynb) Check our GitHub repo at https://github.com/salrowili/BioM-Transformers for TensorFlow and GluonNLP checkpoints. We also updated this repo with a couple of examples on how to fine-tune LMs on text classification and questions answering tasks such as ChemProt, SQuAD, and BioASQ. # Colab Notebook Examples BioM-ELECTRA-LARGE on NER and ChemProt Task [![Open In Colab][COLAB]](https://colab.research.google.com/github/salrowili/BioM-Transformers/blob/main/examples/Example_of_NER_and_ChemProt_Task_on_TPU.ipynb) BioM-ELECTRA-Large on SQuAD2.0 and BioASQ7B Factoid tasks [![Open In Colab][COLAB]](https://colab.research.google.com/github/salrowili/BioM-Transformers/blob/main/examples/Example_of_SQuAD2_0_and_BioASQ7B_tasks_with_BioM_ELECTRA_Large_on_TPU.ipynb) BioM-ALBERT-xxlarge on SQuAD2.0 and BioASQ7B Factoid tasks [![Open In Colab][COLAB]](https://colab.research.google.com/github/salrowili/BioM-Transformers/blob/main/examples/Example_of_SQuAD2_0_and_BioASQ7B_tasks_with_BioM_ALBERT_xxlarge_on_TPU.ipynb) Text Classification Task With HuggingFace Transformers and PyTorchXLA on Free TPU [![Open In Colab][COLAB]](https://colab.research.google.com/github/salrowili/BioM-Transformers/blob/main/examples/Fine_Tuning_Biomedical_Models_on_Text_Classification_Task_With_HuggingFace_Transformers_and_PyTorch_XLA.ipynb) Reproducing our BLURB results with JAX [![Open In Colab][COLAB]](https://colab.research.google.com/github/salrowili/BioM-Transformers/blob/main/examples/BLURB_LeaderBoard_with_TPU_VM.ipynb) Finetunning BioM-Transformers with Jax/Flax on TPUv3-8 with free Kaggle resource [![Open In Colab][COLAB]](https://www.kaggle.com/code/sultanalrowili/biom-transoformers-with-flax-on-tpu-with-kaggle) [COLAB]: https://colab.research.google.com/assets/colab-badge.svg # Acknowledgment We would like to acknowledge the support we have from Tensorflow Research Cloud (TFRC) team to grant us access to TPUv3 units. # Citation ```bibtex @inproceedings{alrowili-shanker-2021-biom, title = "{B}io{M}-Transformers: Building Large Biomedical Language Models with {BERT}, {ALBERT} and {ELECTRA}", author = "Alrowili, Sultan and Shanker, Vijay", booktitle = "Proceedings of the 20th Workshop on Biomedical Language Processing", month = jun, year = "2021", address = "Online", publisher = "Association for Computational Linguistics", url = "https://www.aclweb.org/anthology/2021.bionlp-1.24", pages = "221--227", abstract = "The impact of design choices on the performance of biomedical language models recently has been a subject for investigation. In this paper, we empirically study biomedical domain adaptation with large transformer models using different design choices. We evaluate the performance of our pretrained models against other existing biomedical language models in the literature. Our results show that we achieve state-of-the-art results on several biomedical domain tasks despite using similar or less computational cost compared to other models in the literature. Our findings highlight the significant effect of design choices on improving the performance of biomedical language models.", } ```
[ "TEXT_CLASSIFICATION" ]
[ "BLURB", "CHEMPROT" ]
nickprock/Italian-ModernBERT-base-embed-mmarco-triplet
nickprock
sentence-similarity
[ "sentence-transformers", "safetensors", "modernbert", "sentence-similarity", "feature-extraction", "generated_from_trainer", "dataset_size:39780811", "loss:TripletLoss", "arxiv:1908.10084", "arxiv:1703.07737", "base_model:DeepMount00/Italian-ModernBERT-base", "base_model:finetune:DeepMount00/Italian-ModernBERT-base", "model-index", "autotrain_compatible", "endpoints_compatible", "region:us" ]
2025-02-25T11:57:03
2025-02-25T14:17:05
183
4
--- base_model: DeepMount00/Italian-ModernBERT-base library_name: sentence-transformers metrics: - cosine_accuracy pipeline_tag: sentence-similarity tags: - sentence-transformers - sentence-similarity - feature-extraction - generated_from_trainer - dataset_size:39780811 - loss:TripletLoss widget: - source_sentence: il lichen sclerosus può influenzare l'ano? sentences: - 1 HSV 1 di solito provoca l'herpes sulla bocca. 2 Le persone con il virus possono avere herpes labiale o bolle di febbre sulla bocca. 3 L'HSV 1 può diffondersi ai genitali durante il sesso orale.4 L'HSV2 di solito causa l'herpes intorno ai genitali o all'ano. 5 Le persone con questo virus possono avere piaghe o vesciche intorno ai genitali o all'ano. L'HSV 1 può essere diffuso ai genitali durante il sesso orale. 2 L'HSV2 di solito causa l'herpes intorno ai genitali o all'ano. 3 Le persone con questo virus possono avere piaghe o vesciche intorno ai genitali o all'ano. - 4a edizione del Delray Beach Wine & Seafood Festival 7 e 8 novembre Giunto alla sua quarta edizione, il Delray Beach Wine & Seafood Festival 2015 offre ai visitatori l'opportunità di passeggiare per le strade fiancheggiate da palme di Delray sorseggiando un ottimo bicchiere di vino o un piatto di prelibatezze frutti di mare. - Il lichen sclerosus può colpire la pelle in qualsiasi parte del corpo. Ma più spesso coinvolge la pelle della vulva, il prepuzio del pene o la pelle intorno all'ano. Chiunque può contrarre il lichen sclerosus, ma le donne in postmenopausa hanno un rischio elevato. A volte il lichen sclerosus migliora da solo e non avrai bisogno di alcun trattamento. Il ichen sclerosus può colpire la pelle in qualsiasi parte del corpo. Ma più spesso coinvolge la pelle della vulva, il prepuzio del pene o la pelle intorno all'ano. Chiunque può contrarre il lichen sclerosus, ma le donne in postmenopausa hanno un rischio elevato. - source_sentence: quanto tempo grigliare una patata al forno? sentences: - 'Da g, grammo a lb, quantità di libbre. Quantità: 1 g, grammo di Patata dolce, cotta, al forno con la buccia, senza sale. Equivale a: 0.0022 di libbre, libbra di patate dolci, cotte, al forno con la pelle, senza sale. TOGGLE: da lb, pound a g, quantità in grammi al contrario. Inserisci un nuovo valore g, grammo da cui convertire.' - USA / Florida / Minneola / Mondo / USA / Florida / Minneola World / Stati Uniti / Florida. lago Aggiungi categoria. È a forma di fagiolo e raggiunge una profondità di 26 piedi vicino alla costa di Clermont. lakewatch.ifas.ufl.edu/RevisedMaps05/LakeMaps/MinneolaL... 96 piedi sopra il livello del mare. - Grigliare le patate al forno intere richiede 30-45 minuti al cartoccio sulla griglia, se le patate sono intere. Se hai poco tempo, sbollenta le patate per 10 minuti prima di metterle sulla griglia o nel microonde per 3 minuti per lato e dovrai solo grigliare le patate al forno per 5-10 minuti. Verifica se le patate sono cotte correttamente frugando con uno stuzzicadenti. Grigliare le patate al forno che sono state tagliate a fette o a spicchi richiede solo 5-10 minuti sulla griglia a fuoco medio, a seconda dello spessore dei pezzi di patate. Per vedere se le patate sono cotte correttamente, picchiettandole con uno stuzzicadenti. Grigliare le patate al forno tagliate a fette o a spicchi richiede solo 5-10 minuti sulla griglia a fuoco medio, a seconda dello spessore dei pezzi di patate. - source_sentence: cosa sono le razze prepotenti? sentences: - Le razze Bully (Staffordshire Bull Terrier, American Staffordshire Terrier e American Pit Bull Terrier) sono un gruppo di tre razze spesso conosciute collettivamente come Pit Bull, ma molti allevatori di Staffordshire Bull Terrier e American Staffordshire Terrier disprezzano notevolmente questo termine. le razze iniziarono nel 1835. In quell'anno, il Parlamento britannico approvò il Cruelty to Animals Act. Questo atto ha vietato due degli sport più popolari in Inghilterra, Bear Baiting e Bull Baiting; sport di sangue che opponevano mastini contro orsi e bulldog contro tori, spesso in combattimenti all'ultimo sangue. - diritto distributivo. (matematica). Una regola che stabilisce come devono comportarsi l'una rispetto all'altra due operazioni binarie su un insieme; in particolare, se +, ° sono due di tali operazioni allora ° distribuisce su + significa a ° (b + c) = (a ° b) + (a ° c) per tutti gli a,b,c nell'insieme.1 Facebook. 2 Twitter.legge distributiva. (matematica). Una regola che stabilisce come devono comportarsi l'una rispetto all'altra due operazioni binarie su un insieme; in particolare, se +, ° sono due di tali operazioni allora ° distribuisce su + significa a ° (b + c) = (a ° b) + (a ° c) per tutti gli a,b,c dell'insieme. 1 Facebook. - Contare le pecore. Razze ovine. È difficile sapere quante razze di pecore ci siano nel mondo, poiché solo i paesi sviluppati di solito mantengono registri di razza. Tuttavia, si ritiene che esistano più razze ovine che razze di qualsiasi altra specie di bestiame, ad eccezione del pollame. In tutto il mondo, si stima che ci siano più di 1000 razze ovine distinte. Ci sono più di 50 razze solo negli Stati Uniti. - source_sentence: cos'è il documento sull'architettura del software? sentences: - 'Migliora le funzioni renali: poiché il cavolo può aiutare a mantenere sotto controllo il livello di zucchero nel sangue, aiuta anche a mantenere le funzioni renali. Quando un diabetico ha livelli di zucchero nel sangue estremamente alti (oltre 600 mg/dl), i reni cercano di eliminare lo zucchero nel sangue in eccesso eliminandolo attraverso l''urina.' - Questo documento descrive in dettaglio quali versioni di IBM JDK sono supportate in ogni versione e release del sistema operativo IBM i. Ambiente. Risolvere il problema. Per ulteriori spiegazioni sui JDK IBM i e su come utilizzarli nell'ambiente del server IBM i, fare riferimento al seguente documento tecnico del software IBM. - Il Software Architecture Document (SAD) fornisce una panoramica completa dell'architettura dell'Online Catering Service 1.0 offerto da Yummy Inc. Presenta una serie di diverse visualizzazioni dell'architettura per rappresentare i diversi aspetti del sistema. - source_sentence: qual è il più grande il mare o l'oceano sentences: - L'Oceano Pacifico è il più grande dei cinque oceani del mondo, seguito dall'Oceano Atlantico, dall'Oceano Indiano, dall'Oceano Meridionale e dall'Oceano Artico. Copre un'area di circa 155 milioni di chilometri quadrati (circa 60 milioni di miglia quadrate). L'Oceano Pacifico è il più grande dei cinque oceani del mondo, seguito dall'Oceano Atlantico e dall'Oceano Indiano , Oceano Meridionale e Oceano Artico. Copre un'area di circa 155 milioni di chilometri quadrati (circa 60 milioni di miglia quadrate). - Il tamarino leone d'oro è dichiarato specie in pericolo di estinzione dall'Unione internazionale per la conservazione della natura a causa della drastica deforestazione del suo habitat naturale. A causa di... Il tamarino leone d'oro è dichiarato specie in pericolo di estinzione dall'Unione internazionale per la conservazione della natura a causa della drastica deforestazione del suo habitat naturale. - Il fiume Chang Jiang, o fiume Yangtze, sfocia nel mare della Cina orientale. Questo mare è una parte dell'Oceano Indiano vicino all'Asia. 3 persone l'hanno trovato utile. Modificare. model-index: - name: SentenceTransformer based on DeepMount00/Italian-ModernBERT-base results: - task: type: triplet name: Triplet dataset: name: mmarco dev type: mmarco_dev metrics: - type: cosine_accuracy value: 0.9089999794960022 name: Cosine Accuracy on Dev - type: cosine_accuracy value: 0.9210000038146973 name: Cosine Accuracy on Test --- <br> ![image/png](https://cdn-uploads.huggingface.co/production/uploads/624402e3c839178f5521d963/ZGkxItfkgXqn_t1VDn4HH.png) <br> # Italian-ModernBERT-base-embed-mmarco-triplet This is a [sentence-transformers](https://www.SBERT.net) model finetuned from [DeepMount00/Italian-ModernBERT-base](https://huggingface.co/DeepMount00/Italian-ModernBERT-base) on the mmarco dataset. It maps sentences & paragraphs to a 768-dimensional dense vector space and can be used for semantic textual similarity, semantic search, paraphrase mining, text classification, clustering, and more. ## Model Details ### Model Description - **Model Type:** Sentence Transformer - **Base model:** [DeepMount00/Italian-ModernBERT-base](https://huggingface.co/DeepMount00/Italian-ModernBERT-base) <!-- at revision 34324c2191a0209b17c8cd27dbf2f4f9d2821189 --> - **Maximum Sequence Length:** 8192 tokens - **Output Dimensionality:** 768 dimensions - **Similarity Function:** Cosine Similarity - **Training Dataset:** - mmarco <!-- - **Language:** Unknown --> <!-- - **License:** Unknown --> ### Model Sources - **Documentation:** [Sentence Transformers Documentation](https://sbert.net) - **Repository:** [Sentence Transformers on GitHub](https://github.com/UKPLab/sentence-transformers) - **Hugging Face:** [Sentence Transformers on Hugging Face](https://huggingface.co/models?library=sentence-transformers) ### Full Model Architecture ``` SentenceTransformer( (0): Transformer({'max_seq_length': 8192, 'do_lower_case': False}) with Transformer model: ModernBertModel (1): Pooling({'word_embedding_dimension': 768, 'pooling_mode_cls_token': False, 'pooling_mode_mean_tokens': True, 'pooling_mode_max_tokens': False, 'pooling_mode_mean_sqrt_len_tokens': False, 'pooling_mode_weightedmean_tokens': False, 'pooling_mode_lasttoken': False, 'include_prompt': True}) ) ``` ## Usage ### Direct Usage (Sentence Transformers) First install the Sentence Transformers library: ```bash pip install -U sentence-transformers ``` Then you can load this model and run inference. ```python from sentence_transformers import SentenceTransformer # Download from the 🤗 Hub model = SentenceTransformer("Italian-ModernBERT-base-embed-mmarco-triplet") # Run inference sentences = [ "qual è il più grande il mare o l'oceano", "L'Oceano Pacifico è il più grande dei cinque oceani del mondo, seguito dall'Oceano Atlantico, dall'Oceano Indiano, dall'Oceano Meridionale e dall'Oceano Artico. Copre un'area di circa 155 milioni di chilometri quadrati (circa 60 milioni di miglia quadrate). L'Oceano Pacifico è il più grande dei cinque oceani del mondo, seguito dall'Oceano Atlantico e dall'Oceano Indiano , Oceano Meridionale e Oceano Artico. Copre un'area di circa 155 milioni di chilometri quadrati (circa 60 milioni di miglia quadrate).", "Il fiume Chang Jiang, o fiume Yangtze, sfocia nel mare della Cina orientale. Questo mare è una parte dell'Oceano Indiano vicino all'Asia. 3 persone l'hanno trovato utile. Modificare.", ] embeddings = model.encode(sentences) print(embeddings.shape) # [3, 768] # Get the similarity scores for the embeddings similarities = model.similarity(embeddings, embeddings) print(similarities.shape) # [3, 3] ``` <!-- ### Direct Usage (Transformers) <details><summary>Click to see the direct usage in Transformers</summary> </details> --> <!-- ### Downstream Usage (Sentence Transformers) You can finetune this model on your own dataset. <details><summary>Click to expand</summary> </details> --> <!-- ### Out-of-Scope Use *List how the model may foreseeably be misused and address what users ought not to do with the model.* --> ## Evaluation ### Metrics #### Triplet * Dataset: `mmarco_dev` * Evaluated with [<code>TripletEvaluator</code>](https://sbert.net/docs/package_reference/sentence_transformer/evaluation.html#sentence_transformers.evaluation.TripletEvaluator) | Metric | Value | |:--------------------|:----------| | **cosine_accuracy** | **0.909** | #### Triplet * Dataset: `mmarco_test` * Evaluated with [<code>TripletEvaluator</code>](https://sbert.net/docs/package_reference/sentence_transformer/evaluation.html#sentence_transformers.evaluation.TripletEvaluator) | Metric | Value | |:--------------------|:----------| | **cosine_accuracy** | **0.921** | <!-- ## Bias, Risks and Limitations *What are the known or foreseeable issues stemming from this model? You could also flag here known failure cases or weaknesses of the model.* --> <!-- ### Recommendations *What are recommendations with respect to the foreseeable issues? For example, filtering explicit content.* --> ## Training Details ### Training Dataset #### mmarco * Dataset: mmarco * Total Size: 39,780,811 * Size: 50,000 training samples * Columns: <code>query</code>, <code>positive</code>, and <code>negative</code> * Approximate statistics based on the first 1000 samples: | | query | positive | negative | |:--------|:----------------------------------------------------------------------------------|:------------------------------------------------------------------------------------|:------------------------------------------------------------------------------------| | type | string | string | string | | details | <ul><li>min: 4 tokens</li><li>mean: 10.91 tokens</li><li>max: 24 tokens</li></ul> | <ul><li>min: 22 tokens</li><li>mean: 92.84 tokens</li><li>max: 276 tokens</li></ul> | <ul><li>min: 19 tokens</li><li>mean: 88.55 tokens</li><li>max: 242 tokens</li></ul> | * Samples: | query | positive | negative | |:----------------------------------------------------------------|:-------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------|:---------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------| | <code>chi suona è granger su ncis la</code> | <code>Firmato per un ruolo ricorrente in NCIS: Los Angeles come assistente del direttore del servizio investigativo criminale navale Owen Granger, Ferrer è stato promosso a personaggio regolare per la quinta stagione il 6 febbraio 2013. È anche apparso nel film del 2013 Iron Man 3 come il vice. -Presidente.</code> | <code>NCIS: L'agente Abigail Borin di CGIS (Coast Guard Investigative Service) è interpretata da Diane Neal, che ha anche interpretato ADA Casey Novak in Law and Order, SVU. L'agente Borin è simile nello stile di comando e nel temperamento all'agente speciale Leroy Jethro Gibbs dell'NCIS.</code> | | <code>come si chiama la strada principale di hershey pa?</code> | <code>Chocolate Avenue è una strada di Hershey, in Pennsylvania, che passa davanti all'originale Hershey's Chocolate Factory ed è considerata la strada principale della città.</code> | <code>Milton S. Hershey è nato il 13 settembre 1857 da Henry e Veronica Fanny Snavely Hershey. La sua famiglia erano membri della comunità mennonita della Pennsylvania. I suoi antenati erano svizzeri e tedeschi e si erano stabiliti in Pennsylvania nei primi anni del 1700. Crebbe parlando l'olandese della Pennsylvania. ilton S. Hershey nacque il 13 settembre 1857 da Henry e Veronica Fanny Snavely Hershey. La sua famiglia erano membri della comunità mennonita della Pennsylvania. I suoi antenati erano svizzeri e tedeschi e si erano stabiliti in Pennsylvania nei primi anni del 1700. È cresciuto parlando l'olandese della Pennsylvania.</code> | | <code>definizione di dissoluzione in geologia</code> | <code>Rocce solubili (dissoluzione) Sinkhole; mentre l'acqua filtra attraverso le rocce sopra, il sale inizia a dissolversi e le rocce sopra cadono a formare una dolina. La dissoluzione del suolo si verifica quando l'acqua che passa attraverso rocce solubili produce cavità sotterranee e sistemi di grotte. Tali cavità riducono l'appoggio al terreno sovrastante e possono provocare cedimenti localizzati delle rocce e dei depositi sovrastanti.</code> | <code>Per porre fine all'esistenza di una società o LLC, un'entità deve presentare gli articoli di scioglimento o altra forma di scioglimento o annullamento dei documenti con il Segretario di Stato. I requisiti variano in base alla giurisdizione.</code> | * Loss: [<code>TripletLoss</code>](https://sbert.net/docs/package_reference/sentence_transformer/losses.html#tripletloss) with these parameters: ```json { "distance_metric": "TripletDistanceMetric.EUCLIDEAN", "triplet_margin": 5 } ``` ### Evaluation Dataset #### mmarco * Dataset: mmarco * Size: 2,000 evaluation samples * Columns: <code>query</code>, <code>positive</code>, and <code>negative</code> * Approximate statistics based on the first 1000 samples: | | query | positive | negative | |:--------|:----------------------------------------------------------------------------------|:------------------------------------------------------------------------------------|:------------------------------------------------------------------------------------| | type | string | string | string | | details | <ul><li>min: 4 tokens</li><li>mean: 11.25 tokens</li><li>max: 37 tokens</li></ul> | <ul><li>min: 15 tokens</li><li>mean: 94.27 tokens</li><li>max: 309 tokens</li></ul> | <ul><li>min: 24 tokens</li><li>mean: 90.05 tokens</li><li>max: 305 tokens</li></ul> | * Samples: | query | positive | negative | |:------------------------------------------------------------|:-------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------|:---------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------| | <code>significato del tatuaggio del drago e del lupo</code> | <code>Wolf Tattoo: significati e disegni. Il tatuaggio del lupo è un ottimo modo per mostrare lealtà e devozione alla famiglia. Alcune persone hanno il loro intero branco di persone care inchiostrate o si fanno tatuare una sola zampa di lupo e hanno i nomi dei propri cari inchiostrati all'interno del disegno. Il tatuaggio del lupo può anche rappresentare l'amore.</code> | <code>Qual è il significato di Apocalisse capitolo 12? Di cosa parla Apocalisse 12? Chi sono la donna, il bambino e il drago descritti in Apocalisse capitolo 12? Qual è il significato di Apocalisse capitolo 12? Apocalisse capitolo 12 contiene una descrizione di una donna, che è vestita di sole, con la luna sotto i suoi piedi e una corona di dodici stelle in cima alla sua testa. La donna è incinta e sta per partorire. È inseguita da un grande drago rosso, la cui coda ha spazzato via un terzo delle stelle dal cielo e le ha gettate sulla terra.</code> | | <code>cosa significa amianto friabile</code> | <code>L'amianto friabile si riferisce a qualsiasi tipo di amianto fragile che può sgretolarsi con poca forza o pressione. Qualsiasi materiale che può essere frantumato con le mani e contiene amianto è considerato amianto friabile e deve essere rimosso.</code> | <code>Il prezzo del test dell'aria dell'amianto di solito varia a seconda del tipo e del numero di test richiesti, dovresti ottenere stime specifiche per il tuo lavoro e dovrebbero essere inclusi nel calcolo di quanto costa rimuovere l'amianto. Ottieni ulteriori informazioni sui costi dei test dell'aria dell'amianto.</code> | | <code>che ha giocato a lois lane in superman</code> | <code>Noel Neill. Noel Darleen Neill (25 novembre 1920  3 luglio 2016) è stata un'attrice statunitense. È nota per aver interpretato Lois Lane nei serial cinematografici Superman (1948) e Atom Man vs. Superman (1950), nonché nella serie televisiva degli anni '50 Adventures of Superman. È apparsa in 80 film e serie televisive nella sua carriera.</code> | <code>esiste ed è un supplente di . Wonder Woman è un classico supereroe della DC Comics. Condivide la sua eredità con personaggi del calibro di Superman, Batman e Flash. Wonder Woman è un classico supereroe della DC Comics. Condivide la sua eredità con personaggi del calibro di Superman, Batman e Flash.</code> | * Loss: [<code>TripletLoss</code>](https://sbert.net/docs/package_reference/sentence_transformer/losses.html#tripletloss) with these parameters: ```json { "distance_metric": "TripletDistanceMetric.EUCLIDEAN", "triplet_margin": 5 } ``` ### Training Hyperparameters #### Non-Default Hyperparameters - `eval_strategy`: steps - `per_device_train_batch_size`: 16 - `per_device_eval_batch_size`: 16 - `learning_rate`: 2e-05 - `num_train_epochs`: 1 - `warmup_ratio`: 0.1 - `fp16`: True - `load_best_model_at_end`: True - `batch_sampler`: no_duplicates #### All Hyperparameters <details><summary>Click to expand</summary> - `overwrite_output_dir`: False - `do_predict`: False - `eval_strategy`: steps - `prediction_loss_only`: True - `per_device_train_batch_size`: 16 - `per_device_eval_batch_size`: 16 - `per_gpu_train_batch_size`: None - `per_gpu_eval_batch_size`: None - `gradient_accumulation_steps`: 1 - `eval_accumulation_steps`: None - `torch_empty_cache_steps`: None - `learning_rate`: 2e-05 - `weight_decay`: 0.0 - `adam_beta1`: 0.9 - `adam_beta2`: 0.999 - `adam_epsilon`: 1e-08 - `max_grad_norm`: 1.0 - `num_train_epochs`: 1 - `max_steps`: -1 - `lr_scheduler_type`: linear - `lr_scheduler_kwargs`: {} - `warmup_ratio`: 0.1 - `warmup_steps`: 0 - `log_level`: passive - `log_level_replica`: warning - `log_on_each_node`: True - `logging_nan_inf_filter`: True - `save_safetensors`: True - `save_on_each_node`: False - `save_only_model`: False - `restore_callback_states_from_checkpoint`: False - `no_cuda`: False - `use_cpu`: False - `use_mps_device`: False - `seed`: 42 - `data_seed`: None - `jit_mode_eval`: False - `use_ipex`: False - `bf16`: False - `fp16`: True - `fp16_opt_level`: O1 - `half_precision_backend`: auto - `bf16_full_eval`: False - `fp16_full_eval`: False - `tf32`: None - `local_rank`: 0 - `ddp_backend`: None - `tpu_num_cores`: None - `tpu_metrics_debug`: False - `debug`: [] - `dataloader_drop_last`: False - `dataloader_num_workers`: 0 - `dataloader_prefetch_factor`: None - `past_index`: -1 - `disable_tqdm`: False - `remove_unused_columns`: True - `label_names`: None - `load_best_model_at_end`: True - `ignore_data_skip`: False - `fsdp`: [] - `fsdp_min_num_params`: 0 - `fsdp_config`: {'min_num_params': 0, 'xla': False, 'xla_fsdp_v2': False, 'xla_fsdp_grad_ckpt': False} - `tp_size`: 0 - `fsdp_transformer_layer_cls_to_wrap`: None - `accelerator_config`: {'split_batches': False, 'dispatch_batches': None, 'even_batches': True, 'use_seedable_sampler': True, 'non_blocking': False, 'gradient_accumulation_kwargs': None} - `deepspeed`: None - `label_smoothing_factor`: 0.0 - `optim`: adamw_torch - `optim_args`: None - `adafactor`: False - `group_by_length`: False - `length_column_name`: length - `ddp_find_unused_parameters`: None - `ddp_bucket_cap_mb`: None - `ddp_broadcast_buffers`: False - `dataloader_pin_memory`: True - `dataloader_persistent_workers`: False - `skip_memory_metrics`: True - `use_legacy_prediction_loop`: False - `push_to_hub`: False - `resume_from_checkpoint`: None - `hub_model_id`: None - `hub_strategy`: every_save - `hub_private_repo`: None - `hub_always_push`: False - `gradient_checkpointing`: False - `gradient_checkpointing_kwargs`: None - `include_inputs_for_metrics`: False - `include_for_metrics`: [] - `eval_do_concat_batches`: True - `fp16_backend`: auto - `push_to_hub_model_id`: None - `push_to_hub_organization`: None - `mp_parameters`: - `auto_find_batch_size`: False - `full_determinism`: False - `torchdynamo`: None - `ray_scope`: last - `ddp_timeout`: 1800 - `torch_compile`: False - `torch_compile_backend`: None - `torch_compile_mode`: None - `dispatch_batches`: None - `split_batches`: None - `include_tokens_per_second`: False - `include_num_input_tokens_seen`: False - `neftune_noise_alpha`: None - `optim_target_modules`: None - `batch_eval_metrics`: False - `eval_on_start`: False - `use_liger_kernel`: False - `eval_use_gather_object`: False - `average_tokens_across_devices`: False - `prompts`: None - `batch_sampler`: no_duplicates - `multi_dataset_batch_sampler`: proportional </details> ### Training Logs | Epoch | Step | Training Loss | Validation Loss | mmarco_dev_cosine_accuracy | |:-------:|:--------:|:-------------:|:---------------:|:--------------------------:| | -1 | -1 | - | - | 0.6285 | | 0.08 | 250 | 1.8669 | 2.4032 | 0.7960 | | 0.16 | 500 | 1.793 | 1.6328 | 0.8600 | | 0.24 | 750 | 1.4301 | 1.3971 | 0.8930 | | 0.32 | 1000 | 1.3361 | 1.2686 | 0.8955 | | **0.4** | **1250** | **1.237** | **1.2151** | **0.909** | ## Test Logs | Epoch | Step | Training Loss | Validation Loss | mmarco_dev_cosine_accuracy | |:-------:|:--------:|:-------------:|:---------------:|:--------------------------:| | -1 | -1 | - | - | 0.9210 | * The bold row denotes the saved checkpoint. ### Framework Versions - Python: 3.11.11 - Sentence Transformers: 3.5.0.dev0 - Transformers: 4.50.0.dev0 - PyTorch: 2.5.1+cu124 - Accelerate: 1.3.0 - Datasets: 3.3.2 - Tokenizers: 0.21.0 ## Citation ### BibTeX #### Sentence Transformers ```bibtex @inproceedings{reimers-2019-sentence-bert, title = "Sentence-BERT: Sentence Embeddings using Siamese BERT-Networks", author = "Reimers, Nils and Gurevych, Iryna", booktitle = "Proceedings of the 2019 Conference on Empirical Methods in Natural Language Processing", month = "11", year = "2019", publisher = "Association for Computational Linguistics", url = "https://arxiv.org/abs/1908.10084", } ``` #### TripletLoss ```bibtex @misc{hermans2017defense, title={In Defense of the Triplet Loss for Person Re-Identification}, author={Alexander Hermans and Lucas Beyer and Bastian Leibe}, year={2017}, eprint={1703.07737}, archivePrefix={arXiv}, primaryClass={cs.CV} } ``` <!-- ## Glossary *Clearly define terms in order to be accessible across audiences.* --> <!-- ## Model Card Authors *Lists the people who create the model card, providing recognition and accountability for the detailed work that goes into its construction.* --> <!-- ## Model Card Contact *Provides a way for people who have updates to the Model Card, suggestions, or questions, to contact the Model Card authors.* -->
[ "TEXT_CLASSIFICATION" ]
[ "BEAR" ]
pruas/BENT-PubMedBERT-NER-Anatomical
pruas
token-classification
[ "transformers", "pytorch", "bert", "token-classification", "en", "license:apache-2.0", "autotrain_compatible", "endpoints_compatible", "region:us" ]
2023-01-14T12:29:03
2024-03-02T10:09:23
182
4
--- language: - en license: apache-2.0 pipeline_tag: token-classification --- Named Entity Recognition (NER) model to recognize anatomical entities. Please cite our work: ``` @article{NILNKER2022, title = {NILINKER: Attention-based approach to NIL Entity Linking}, journal = {Journal of Biomedical Informatics}, volume = {132}, pages = {104137}, year = {2022}, issn = {1532-0464}, doi = {https://doi.org/10.1016/j.jbi.2022.104137}, url = {https://www.sciencedirect.com/science/article/pii/S1532046422001526}, author = {Pedro Ruas and Francisco M. Couto}, } ``` [PubMedBERT](https://huggingface.co/microsoft/BiomedNLP-PubMedBERT-base-uncased-abstract-fulltext) fine-tuned on the following datasets: - [MANTRA](https://www.ncbi.nlm.nih.gov/pmc/articles/PMC4986661/pdf/ocv037.pdf): entity type "ANAT" - [CellFinder](http://cellfinder.org/about/annotation/): entity type "Anatomical parts" - [CRAFT](https://github.com/UCDenver-ccp/CRAFT/tree/master/concept-annotation): entity type "UBERON" - [MLEE](http://nactem.ac.uk/MLEE/): entity types "Anatomical_system", "Organ", "Multi-tissue structure", "Tissue", "Immaterial_anatomical_entity" - [AnatEM](https://github.com/cambridgeltl/MTL-Bioinformatics-2016/tree/master/data/AnatEM-IOB) - [BioNLP13CG](): entity types "Multi-tissue structure", "Tissue", "Organ", "Immaterial anatomical entity", "Anatomical system" - [GREC](http://www.nactem.ac.uk/GREC/standoff.php): entity type "Tissue"
[ "NAMED_ENTITY_RECOGNITION" ]
[ "ANATEM", "CRAFT", "CELLFINDER", "MLEE" ]
pruas/BENT-PubMedBERT-NER-Cell-Component
pruas
token-classification
[ "transformers", "pytorch", "bert", "token-classification", "en", "license:apache-2.0", "autotrain_compatible", "endpoints_compatible", "region:us" ]
2023-01-14T14:27:59
2024-03-02T10:08:57
182
2
--- language: - en license: apache-2.0 pipeline_tag: token-classification --- Named Entity Recognition (NER) model to recognize cell component entities. Please cite our work: ``` @article{NILNKER2022, title = {NILINKER: Attention-based approach to NIL Entity Linking}, journal = {Journal of Biomedical Informatics}, volume = {132}, pages = {104137}, year = {2022}, issn = {1532-0464}, doi = {https://doi.org/10.1016/j.jbi.2022.104137}, url = {https://www.sciencedirect.com/science/article/pii/S1532046422001526}, author = {Pedro Ruas and Francisco M. Couto}, } ``` [PubMedBERT](https://huggingface.co/microsoft/BiomedNLP-PubMedBERT-base-uncased-abstract-fulltext) fine-tuned on the following datasets: - [CRAFT](https://github.com/UCDenver-ccp/CRAFT/tree/master/concept-annotation): entity type "GO-CC" - [MLEE](http://nactem.ac.uk/MLEE/): entity type "Cellular_component" - [BioNLP13CG-cc](https://github.com/cambridgeltl/MTL-Bioinformatics-2016/tree/master/data/BioNLP13CG-cc-IOB)
[ "NAMED_ENTITY_RECOGNITION" ]
[ "CRAFT", "MLEE" ]
sultan/BioM-ELECTRA-Base-Discriminator
sultan
null
[ "transformers", "pytorch", "electra", "pretraining", "endpoints_compatible", "region:us" ]
2022-03-02T23:29:05
2023-11-04T23:06:42
179
3
--- {} --- # BioM-Transformers: Building Large Biomedical Language Models with BERT, ALBERT and ELECTRA # Abstract The impact of design choices on the performance of biomedical language models recently has been a subject for investigation. In this paper, we empirically study biomedical domain adaptation with large transformer models using different design choices. We evaluate the performance of our pretrained models against other existing biomedical language models in the literature. Our results show that we achieve state-of-the-art results on several biomedical domain tasks despite using similar or less computational cost compared to other models in the literature. Our findings highlight the significant effect of design choices on improving the performance of biomedical language models. # Model Description This model was pre-trained on PubMed Abstracts only with biomedical domain vocabulary for 500K steps with a batch size of 1024 on TPUv3-32 unit. Check our GitHub repo at https://github.com/salrowili/BioM-Transformers for TensorFlow and GluonNLP checkpoints. # Colab Notebook Examples BioM-ELECTRA-LARGE on NER and ChemProt Task [![Open In Colab][COLAB]](https://colab.research.google.com/github/salrowili/BioM-Transformers/blob/main/examples/Example_of_NER_and_ChemProt_Task_on_TPU.ipynb) BioM-ELECTRA-Large on SQuAD2.0 and BioASQ7B Factoid tasks [![Open In Colab][COLAB]](https://colab.research.google.com/github/salrowili/BioM-Transformers/blob/main/examples/Example_of_SQuAD2_0_and_BioASQ7B_tasks_with_BioM_ELECTRA_Large_on_TPU.ipynb) BioM-ALBERT-xxlarge on SQuAD2.0 and BioASQ7B Factoid tasks [![Open In Colab][COLAB]](https://colab.research.google.com/github/salrowili/BioM-Transformers/blob/main/examples/Example_of_SQuAD2_0_and_BioASQ7B_tasks_with_BioM_ALBERT_xxlarge_on_TPU.ipynb) Text Classification Task With HuggingFace Transformers and PyTorchXLA on Free TPU [![Open In Colab][COLAB]](https://colab.research.google.com/github/salrowili/BioM-Transformers/blob/main/examples/Fine_Tuning_Biomedical_Models_on_Text_Classification_Task_With_HuggingFace_Transformers_and_PyTorch_XLA.ipynb) Reproducing our BLURB results with JAX [![Open In Colab][COLAB]](https://colab.research.google.com/github/salrowili/BioM-Transformers/blob/main/examples/BLURB_LeaderBoard_with_TPU_VM.ipynb) Finetunning BioM-Transformers with Jax/Flax on TPUv3-8 with free Kaggle resource [![Open In Colab][COLAB]](https://www.kaggle.com/code/sultanalrowili/biom-transoformers-with-flax-on-tpu-with-kaggle) [COLAB]: https://colab.research.google.com/assets/colab-badge.svg # Acknowledgment We would like to acknowledge the support we have from Tensorflow Research Cloud (TFRC) team to grant us access to TPUv3 units. # Citation ```bibtex @inproceedings{alrowili-shanker-2021-biom, title = "{B}io{M}-Transformers: Building Large Biomedical Language Models with {BERT}, {ALBERT} and {ELECTRA}", author = "Alrowili, Sultan and Shanker, Vijay", booktitle = "Proceedings of the 20th Workshop on Biomedical Language Processing", month = jun, year = "2021", address = "Online", publisher = "Association for Computational Linguistics", url = "https://www.aclweb.org/anthology/2021.bionlp-1.24", pages = "221--227", abstract = "The impact of design choices on the performance of biomedical language models recently has been a subject for investigation. In this paper, we empirically study biomedical domain adaptation with large transformer models using different design choices. We evaluate the performance of our pretrained models against other existing biomedical language models in the literature. Our results show that we achieve state-of-the-art results on several biomedical domain tasks despite using similar or less computational cost compared to other models in the literature. Our findings highlight the significant effect of design choices on improving the performance of biomedical language models.", } ```
[ "TEXT_CLASSIFICATION" ]
[ "BLURB", "CHEMPROT" ]
PlanTL-GOB-ES/roberta-base-biomedical-es
PlanTL-GOB-ES
fill-mask
[ "transformers", "pytorch", "roberta", "fill-mask", "biomedical", "spanish", "es", "arxiv:2109.03570", "arxiv:2109.07765", "license:apache-2.0", "autotrain_compatible", "endpoints_compatible", "region:us" ]
2022-03-02T23:29:04
2022-11-15T15:19:06
178
3
--- language: - es license: apache-2.0 metrics: - ppl tags: - biomedical - spanish widget: - text: El único antecedente personal a reseñar era la <mask> arterial. - text: Las radiologías óseas de cuerpo entero no detectan alteraciones <mask>, ni alteraciones vertebrales. - text: En el <mask> toraco-abdómino-pélvico no se encontraron hallazgos patológicos de interés. --- # Biomedical language model for Spanish ## Table of contents <details> <summary>Click to expand</summary> - [Model description](#model-description) - [Intended uses and limitations](#intended-use) - [How to use](#how-to-use) - [Limitations and bias](#limitations-and-bias) - [Training](#training) - [Tokenization and model pretraining](#Tokenization-pretraining) - [Training corpora and preprocessing](#training-corpora-preprocessing) - [Evaluation](#evaluation) - [Additional information](#additional-information) - [Author](#author) - [Contact information](#contact-information) - [Copyright](#copyright) - [Licensing information](#licensing-information) - [Funding](#funding) - [Disclaimer](#disclaimer) </details> ## Model description Biomedical pretrained language model for Spanish. For more details about the corpus, the pretraining and the evaluation, check the official [repository](https://github.com/PlanTL-SANIDAD/lm-biomedical-clinical-es) and read our [preprint](https://arxiv.org/abs/2109.03570). ## Intended uses and limitations The model is ready-to-use only for masked language modelling to perform the Fill Mask task (try the inference API or read the next section). However, it is intended to be fine-tuned on downstream tasks such as Named Entity Recognition or Text Classification. ## How to use ```python from transformers import AutoTokenizer, AutoModelForMaskedLM tokenizer = AutoTokenizer.from_pretrained("BSC-TeMU/roberta-base-biomedical-es") model = AutoModelForMaskedLM.from_pretrained("BSC-TeMU/roberta-base-biomedical-es") from transformers import pipeline unmasker = pipeline('fill-mask', model="BSC-TeMU/roberta-base-biomedical-es") unmasker("El único antecedente personal a reseñar era la <mask> arterial.") ``` ``` # Output [ { "sequence": " El único antecedente personal a reseñar era la hipertensión arterial.", "score": 0.9855039715766907, "token": 3529, "token_str": " hipertensión" }, { "sequence": " El único antecedente personal a reseñar era la diabetes arterial.", "score": 0.0039140828885138035, "token": 1945, "token_str": " diabetes" }, { "sequence": " El único antecedente personal a reseñar era la hipotensión arterial.", "score": 0.002484665485098958, "token": 11483, "token_str": " hipotensión" }, { "sequence": " El único antecedente personal a reseñar era la Hipertensión arterial.", "score": 0.0023484621196985245, "token": 12238, "token_str": " Hipertensión" }, { "sequence": " El único antecedente personal a reseñar era la presión arterial.", "score": 0.0008009297889657319, "token": 2267, "token_str": " presión" } ] ``` ## Training ### Tokenization and model pretraining This model is a [RoBERTa-based](https://github.com/pytorch/fairseq/tree/master/examples/roberta) model trained on a **biomedical** corpus in Spanish collected from several sources (see next section). The training corpus has been tokenized using a byte version of [Byte-Pair Encoding (BPE)](https://github.com/openai/gpt-2) used in the original [RoBERTA](https://github.com/pytorch/fairseq/tree/master/examples/roberta) model with a vocabulary size of 52,000 tokens. The pretraining consists of a masked language model training at the subword level following the approach employed for the RoBERTa base model with the same hyperparameters as in the original work. The training lasted a total of 48 hours with 16 NVIDIA V100 GPUs of 16GB DDRAM, using Adam optimizer with a peak learning rate of 0.0005 and an effective batch size of 2,048 sentences. ### Training corpora and preprocessing The training corpus is composed of several biomedical corpora in Spanish, collected from publicly available corpora and crawlers. To obtain a high-quality training corpus, a cleaning pipeline with the following operations has been applied: - data parsing in different formats - sentence splitting - language detection - filtering of ill-formed sentences - deduplication of repetitive contents - keep the original document boundaries Finally, the corpora are concatenated and further global deduplication among the corpora have been applied. The result is a medium-size biomedical corpus for Spanish composed of about 963M tokens. The table below shows some basic statistics of the individual cleaned corpora: | Name | No. tokens | Description | |-----------------------------------------------------------------------------------------|-------------|------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------| | [Medical crawler](https://zenodo.org/record/4561970) | 745,705,946 | Crawler of more than 3,000 URLs belonging to Spanish biomedical and health domains. | | Clinical cases misc. | 102,855,267 | A miscellany of medical content, essentially clinical cases. Note that a clinical case report is a scientific publication where medical practitioners share patient cases and it is different from a clinical note or document. | | [Scielo](https://github.com/PlanTL-SANIDAD/SciELO-Spain-Crawler) | 60,007,289 | Publications written in Spanish crawled from the Spanish SciELO server in 2017. | | [BARR2_background](https://temu.bsc.es/BARR2/downloads/background_set.raw_text.tar.bz2) | 24,516,442 | Biomedical Abbreviation Recognition and Resolution (BARR2) containing Spanish clinical case study sections from a variety of clinical disciplines. | | Wikipedia_life_sciences | 13,890,501 | Wikipedia articles crawled 04/01/2021 with the [Wikipedia API python library](https://pypi.org/project/Wikipedia-API/) starting from the "Ciencias\_de\_la\_vida" category up to a maximum of 5 subcategories. Multiple links to the same articles are then discarded to avoid repeating content. | | Patents | 13,463,387 | Google Patent in Medical Domain for Spain (Spanish). The accepted codes (Medical Domain) for Json files of patents are: "A61B", "A61C","A61F", "A61H", "A61K", "A61L","A61M", "A61B", "A61P". | | [EMEA](http://opus.nlpl.eu/download.php?f=EMEA/v3/moses/en-es.txt.zip) | 5,377,448 | Spanish-side documents extracted from parallel corpora made out of PDF documents from the European Medicines Agency. | | [mespen_Medline](https://zenodo.org/record/3562536#.YTt1fH2xXbR) | 4,166,077 | Spanish-side articles extracted from a collection of Spanish-English parallel corpus consisting of biomedical scientific literature. The collection of parallel resources are aggregated from the MedlinePlus source. | | PubMed | 1,858,966 | Open-access articles from the PubMed repository crawled in 2017. | ## Evaluation The model has been evaluated on the Named Entity Recognition (NER) using the following datasets: - [PharmaCoNER](https://zenodo.org/record/4270158): is a track on chemical and drug mention recognition from Spanish medical texts (for more info see: https://temu.bsc.es/pharmaconer/). - [CANTEMIST](https://zenodo.org/record/3978041#.YTt5qH2xXbQ): is a shared task specifically focusing on named entity recognition of tumor morphology, in Spanish (for more info see: https://zenodo.org/record/3978041#.YTt5qH2xXbQ). - ICTUSnet: consists of 1,006 hospital discharge reports of patients admitted for stroke from 18 different Spanish hospitals. It contains more than 79,000 annotations for 51 different kinds of variables. The evaluation results are compared against the [mBERT](https://huggingface.co/bert-base-multilingual-cased) and [BETO](https://huggingface.co/dccuchile/bert-base-spanish-wwm-cased) models: | F1 - Precision - Recall | roberta-base-biomedical-es | mBERT | BETO | |---------------------------|----------------------------|-------------------------------|-------------------------| | PharmaCoNER | **89.48** - **87.85** - **91.18** | 87.46 - 86.50 - 88.46 | 88.18 - 87.12 - 89.28 | | CANTEMIST | **83.87** - **81.70** - **86.17** | 82.61 - 81.12 - 84.15 | 82.42 - 80.91 - 84.00 | | ICTUSnet | **88.12** - **85.56** - **90.83** | 86.75 - 83.53 - 90.23 | 85.95 - 83.10 - 89.02 | ## Additional information ### Author Text Mining Unit (TeMU) at the Barcelona Supercomputing Center ([email protected]) ### Contact information For further information, send an email to <[email protected]> ### Copyright Copyright by the Spanish State Secretariat for Digitalization and Artificial Intelligence (SEDIA) (2022) ### Licensing information [Apache License, Version 2.0](https://www.apache.org/licenses/LICENSE-2.0) ### Funding This work was funded by the Spanish State Secretariat for Digitalization and Artificial Intelligence (SEDIA) within the framework of the Plan-TL. ## Citation information If you use our models, please cite our latest preprint: ```bibtex @misc{carrino2021biomedical, title={Biomedical and Clinical Language Models for Spanish: On the Benefits of Domain-Specific Pretraining in a Mid-Resource Scenario}, author={Casimiro Pio Carrino and Jordi Armengol-Estapé and Asier Gutiérrez-Fandiño and Joan Llop-Palao and Marc Pàmies and Aitor Gonzalez-Agirre and Marta Villegas}, year={2021}, eprint={2109.03570}, archivePrefix={arXiv}, primaryClass={cs.CL} } ``` If you use our Medical Crawler corpus, please cite the preprint: ```bibtex @misc{carrino2021spanish, title={Spanish Biomedical Crawled Corpus: A Large, Diverse Dataset for Spanish Biomedical Language Models}, author={Casimiro Pio Carrino and Jordi Armengol-Estapé and Ona de Gibert Bonet and Asier Gutiérrez-Fandiño and Aitor Gonzalez-Agirre and Martin Krallinger and Marta Villegas}, year={2021}, eprint={2109.07765}, archivePrefix={arXiv}, primaryClass={cs.CL} } ``` ### Disclaimer <details> <summary>Click to expand</summary> The models published in this repository are intended for a generalist purpose and are available to third parties. These models may have bias and/or any other undesirable distortions. When third parties, deploy or provide systems and/or services to other parties using any of these models (or using systems based on these models) or become users of the models, they should note that it is their responsibility to mitigate the risks arising from their use and, in any event, to comply with applicable regulations, including regulations regarding the use of Artificial Intelligence. In no event shall the owner of the models (SEDIA – State Secretariat for Digitalization and Artificial Intelligence) nor the creator (BSC – Barcelona Supercomputing Center) be liable for any results arising from the use made by third parties of these models. Los modelos publicados en este repositorio tienen una finalidad generalista y están a disposición de terceros. Estos modelos pueden tener sesgos y/u otro tipo de distorsiones indeseables. Cuando terceros desplieguen o proporcionen sistemas y/o servicios a otras partes usando alguno de estos modelos (o utilizando sistemas basados en estos modelos) o se conviertan en usuarios de los modelos, deben tener en cuenta que es su responsabilidad mitigar los riesgos derivados de su uso y, en todo caso, cumplir con la normativa aplicable, incluyendo la normativa en materia de uso de inteligencia artificial. En ningún caso el propietario de los modelos (SEDIA – Secretaría de Estado de Digitalización e Inteligencia Artificial) ni el creador (BSC – Barcelona Supercomputing Center) serán responsables de los resultados derivados del uso que hagan terceros de estos modelos. </details>
[ "NAMED_ENTITY_RECOGNITION", "TEXT_CLASSIFICATION" ]
[ "CANTEMIST", "PHARMACONER", "SCIELO" ]
nasa-impact/nasa-smd-ibm-v0.1
nasa-impact
fill-mask
[ "transformers", "pytorch", "roberta", "fill-mask", "earth science", "climate", "biology", "en", "dataset:nasa-impact/nasa-smd-IR-benchmark", "dataset:nasa-impact/nasa-smd-qa-benchmark", "dataset:ibm/Climate-Change-NER", "arxiv:2405.10725", "doi:10.57967/hf/1429", "license:apache-2.0", "autotrain_compatible", "endpoints_compatible", "region:us" ]
2023-12-04T18:07:32
2024-10-11T02:15:20
178
33
--- datasets: - nasa-impact/nasa-smd-IR-benchmark - nasa-impact/nasa-smd-qa-benchmark - ibm/Climate-Change-NER language: - en library_name: transformers license: apache-2.0 pipeline_tag: fill-mask tags: - earth science - climate - biology --- # Model Card for Indus (nasa-smd-ibm-v0.1) Indus (previously known as nasa-smd-ibm-v0.1) is a RoBERTa-based, Encoder-only transformer model, domain-adapted for NASA Science Mission Directorate (SMD) applications. It's fine-tuned on scientific journals and articles relevant to NASA SMD, aiming to enhance natural language technologies like information retrieval and intelligent search. ## Model Details - **Base Model**: RoBERTa - **Tokenizer**: Custom - **Parameters**: 125M - **Pretraining Strategy**: Masked Language Modeling (MLM) - **Distilled Version**: You can download a distilled version of the model (30 Million Parameters) here: https://huggingface.co/nasa-impact/nasa-smd-ibm-distil-v0.1 ## Training Data - Wikipedia English (Feb 1, 2020) - AGU Publications - AMS Publications - Scientific papers from Astrophysics Data Systems (ADS) - PubMed abstracts - PubMedCentral (PMC) (commercial license subset) ![image/png](https://cdn-uploads.huggingface.co/production/uploads/61099e5d86580d4580767226/H0-q9N7IwXQqLdEaCCgm-.png) ## Training Procedure - **Framework**: fairseq 0.12.1 with PyTorch 1.9.1 - **transformers Version**: 4.2.0 - **Strategy**: Masked Language Modeling (MLM) ## Evaluation ### BLURB benchmark ![image/png](https://cdn-uploads.huggingface.co/production/uploads/61099e5d86580d4580767226/Ipw5xR99qUORGkn5Z8JkA.png) (Standard deviation across 10 random seeds in parenthesis. Macro avg. reported across datasets and micro avg. computed by averaging scores on each task then averaging across task averages.) ### Climate Change NER, and NASA-QA benchmark ![image/png](https://cdn-uploads.huggingface.co/production/uploads/61099e5d86580d4580767226/ncTupw9tNVf6pSBD5VnOh.png) (Climate Change NER and NASA-QA benchmark results. Standard Deviation over multiple runs given in parantheses) Please refer to the following dataset cards for further benchmarks and evaluation - NASA-IR Benchmark - https://huggingface.co/datasets/nasa-impact/nasa-smd-IR-benchmark - NASA-QA Benchmark - https://huggingface.co/datasets/nasa-impact/nasa-smd-qa-benchmark - Climate Change NER Benchmark - https://huggingface.co/datasets/ibm/Climate-Change-NER ## Uses - Named Entity Recognition (NER) - Information Retrieval - Sentence Transformers - Extractive QA For NASA SMD related, scientific usecases. ## Note Accompanying preprint paper can be found here: https://arxiv.org/abs/2405.10725. ## Citation If you find this work useful, please cite using the following bibtex citation: ```bibtex @misc {nasa-impact_2023, author = {Masayasu Maraoka and Bishwaranjan Bhattacharjee and Muthukumaran Ramasubramanian and Ikhsa Gurung and Rahul Ramachandran and Manil Maskey and Kaylin Bugbee and Rong Zhang and Yousef El Kurdi and Bharath Dandala and Mike Little and Elizabeth Fancher and Lauren Sanders and Sylvain Costes and Sergi Blanco-Cuaresma and Kelly Lockhart and Thomas Allen and Felix Grazes and Megan Ansdell and Alberto Accomazzi and Sanaz Vahidinia and Ryan McGranaghan and Armin Mehrabian and Tsendgar Lee}, title = { nasa-smd-ibm-v0.1 (Revision f01d42f) }, year = 2023, url = { https://huggingface.co/nasa-impact/nasa-smd-ibm-v0.1 }, doi = { 10.57967/hf/1429 }, publisher = { Hugging Face } } ``` ## Attribution IBM Research - Masayasu Muraoka - Bishwaranjan Bhattacharjee - Rong Zhang - Yousef El Kurdi - Bharath Dandala NASA SMD - Muthukumaran Ramasubramanian - Iksha Gurung - Rahul Ramachandran - Manil Maskey - Kaylin Bugbee - Mike Little - Elizabeth Fancher - Lauren Sanders - Sylvain Costes - Sergi Blanco-Cuaresma - Kelly Lockhart - Thomas Allen - Felix Grazes - Megan Ansdell - Alberto Accomazzi - Sanaz Vahidinia - Ryan McGranaghan - Armin Mehrabian - Tsendgar Lee ## Disclaimer This Encoder-only model is currently in an experimental phase. We are working to improve the model's capabilities and performance, and as we progress, we invite the community to engage with this model, provide feedback, and contribute to its evolution.
[ "NAMED_ENTITY_RECOGNITION" ]
[ "BLURB" ]
PenutChen/Breexe-8x7B-Instruct-v0_1-GGUF
PenutChen
text-generation
[ "gguf", "text-generation", "zh", "en", "license:apache-2.0", "endpoints_compatible", "region:us", "conversational" ]
2024-08-04T09:18:22
2024-08-09T05:47:25
176
0
--- language: - zh - en license: apache-2.0 pipeline_tag: text-generation --- Original Model: [MediaTek-Research/Breexe-8x7B-Instruct-v0_1](https://huggingface.co/MediaTek-Research/Breexe-8x7B-Instruct-v0_1) | Name | Bits | Method | Size | |:----------------------------------------------------------------------------------------------------------------------------------:|:----:|:------:|:-------:| | [BreeXe-8x7B-Inst.Q2_K.gguf](https://huggingface.co/PenutChen/Breexe-8x7B-Instruct-v0_1-GGUF/blob/main/BreeXe-8x7B-Inst.Q2_K.gguf) | 2 | Q2_K | 17.5 GB | | [BreeXe-8x7B-Inst.Q3_K.gguf](https://huggingface.co/PenutChen/Breexe-8x7B-Instruct-v0_1-GGUF/blob/main/BreeXe-8x7B-Inst.Q3_K.gguf) | 3 | Q3_K | 22.7 GB | | [BreeXe-8x7B-Inst.Q4_K.gguf](https://huggingface.co/PenutChen/Breexe-8x7B-Instruct-v0_1-GGUF/blob/main/BreeXe-8x7B-Inst.Q4_K.gguf) | 4 | Q4_K | 28.6 GB | | [BreeXe-8x7B-Inst.Q5_K.gguf](https://huggingface.co/PenutChen/Breexe-8x7B-Instruct-v0_1-GGUF/blob/main/BreeXe-8x7B-Inst.Q5_K.gguf) | 5 | Q5_K | 33.4 GB | | [BreeXe-8x7B-Inst.Q6_K.gguf](https://huggingface.co/PenutChen/Breexe-8x7B-Instruct-v0_1-GGUF/blob/main/BreeXe-8x7B-Inst.Q6_K.gguf) | 6 | Q6_K | 38.6 GB | | [BreeXe-8x7B-Inst.Q8_0.gguf](https://huggingface.co/PenutChen/Breexe-8x7B-Instruct-v0_1-GGUF/blob/main/BreeXe-8x7B-Inst.Q8_0.gguf) | 8 | Q8_0 | 49.9 GB | # Breexe-8x7B-Instruct-v0_1 Breexe-8x7B is a language model family that builds on top of [Mixtral-8x7B](https://huggingface.co/mistralai/Mixtral-8x7B-v0.1), specifically intended for Traditional Chinese use. Breexe-8x7B-Base is the base model for the Breexe-8x7B series. Breexe-8x7B-Base expands the original vocabulary with additional 30,000 Traditional Chinese tokens. With the expanded vocabulary, Breexe-8x7B operates at twice the inference speed for Traditional Chinese to Mixtral-8x7B. [See [Inference Performance](#inference-performance).] [Breexe-8x7B-Instruct](https://huggingface.co/MediaTek-Research/Breexe-8x7B-Instruct-v0_1) derives from the base model Breexe-8x7B-Base, making the resulting model amenable to be used as-is for commonly seen tasks, such as Q&A, RAG, multi-round chat, and summarization. **Breexe-8x7B-Instruct demonstrates impressive performance in benchmarks for Traditional Chinese and English, on par with OpenAI's gpt-3.5-turbo-1106.** [See [Chat Model Performance](#chat-model-performance).] The current release version of Breexe-8x7B is v0.1. *The models were trained on Nvidia's Taipei-1. Special thanks for Nvidia's technical support.* *A project by the members (in alphabetical order): Chan-Jan Hsu 許湛然, Chang-Le Liu 劉昶樂, Feng-Ting Liao 廖峰挺, Po-Chun Hsu 許博竣, [Yi-Chang Chen 陳宜昌](https://ycc.idv.tw/about-me), and the supervisor Da-Shan Shiu 許大山.* ## BreeXe API <p style="color:red;">We offer a trial API for business integration and academic benchmarking.</p> *API service open time: 14:00 - 20:00 (from 2024/4/3 to 2024/5/3)* The API is in beta testing. If you are experiencing connectivity issues, please bear with us. Free trial API key: `'eyJhbGciOiJIUzI1NiIsInR5cCI6IkpXVCJ9.eyJ1c2VyX2lkIjoyLCJ1c2VybmFtZSI6ImdlbmVyYWxfcHVibGljIiwibm90ZSI6ImdlbmVyYWwgcHVibGljIn0.kCp68nRw3RSh3jbMm8FvhG0NIkStflgI1wTHLviRPQE'` Get started here: ```python import time from openai import OpenAI API_KEY = <API_KEY_HERE> BASE_URL = 'https://api-mtkresearch.com/v1' MODEL_NAME = 'BreeXe-8x7B' client = OpenAI(base_url=BASE_URL, api_key=API_KEY) def ask_breexe(messages): completion = client.chat.completions.create( model=MODEL_NAME, messages=messages, temperature=0.01, top_p=0.01, max_tokens=512 ) response = completion.choices[0].message.content time.sleep(3) # due to a rate limit of 200 requests per 10 minutes return response messages = [ { 'role': 'system', 'content': 'You are a helpful AI assistant built by MediaTek Research. The user you are helping speaks Traditional Chinese and comes from Taiwan.' }, { 'role': 'user', 'content': '請問在量子物理領域中,疊加原理是什麼?它又是如何與量子糾纏現象相關聯的?' } ] response = ask_breexe(messages) print(response) ``` ## BreeXe Demo <a href="https://huggingface.co/spaces/MediaTek-Research/Demo-MR-Breexe-8x7B" style="color:red;font-weight:bold;">Try Demo Here 👩‍💻🧑🏻‍💻</a> Some examples are shown below. (System Prompt: `You are a helpful AI assistant built by MediaTek Research. The user you are helping speaks Traditional Chinese and comes from Taiwan.`, Temperature: 0.01, Top-P: 0.01, Repetition-Penalty: 1.1) <details><summary><b>Education</b></summary> | Query | 1️⃣ 請問在量子物理領域中,疊加原理是什麼?它又是如何與量子糾纏現象相關聯的?<br>2️⃣ 請用上述概念說明「薛丁格的貓」這個假想實驗 | |---|---| | **Breexe** | 1️⃣ <br>在量子物理領域中,疊加原理(Superposition Principle)是一個非常重要且獨特的概念。這個原則表示,微觀粒子如電子、光子等,可以同時存在於多個狀態之中,直到它們被測量時才「決定」其特定狀態。換句話說,在未進行觀測之前,量子系統處於一種混合態,包含了所有可能的狀態。<br><br>舉例來說,假設有一個量子系統具有兩個基本狀態:\|0⟩和\|1⟩。根據疊加原理,該系統可以同時處在這兩種狀態的疊加,即α\|0⟩+β\|1⟩,其中α和β分別代表不同狀態的機率振幅。當我們對這個量子系統進行測量時,它會「選擇」某個特定狀態,例如\|0⟩或\|1⟩,而其他未被選中的狀態將被排除在外。在這個過程中,整個系統的波函數會瞬間崩塌到選定的狀態。<br><br>量子糾纏現象(Quantum Entanglement)則是另一個量子物理中的核心概念。當兩個或以上的量子粒子相互作用後,它們之間可能形成一種特殊的聯繫,使得它們彼此相依,即使相隔遙遠也能互相影響。也就是說,當我們對其中一個粒子進行測量時,另一個粒子也會立即受到影響,無論它們相距多遠。這違反了古典物理學的定律,因此也被稱為「超距作用」。<br><br>疊加原理與量子糾纏現象兩者間有密切關聯。在某些情況下,量子糾纏現象需要疊加原理才能成立。例如,當兩個粒子處於糾纏狀態時,它們各自的狀態不能單獨描述,而是必須以整體的角度來考慮。在這種狀況下,疊加原理便提供了一個有效的方法,將兩個粒子的狀態結合為一個統一的疊加態。<br><br>此外,疊加原理也為量子計算機帶來了巨大的潛力。由於量子位元可以同時存在於多個狀態,這讓量子計算機擁有遠超傳統電腦的運算能力。利用疊加原理,量子計算機可以在短時間內處理大量數據,從而解決一些目前尚待解決的難題。<br><br>總之,疊加原理是量子物理中一個非常重要且獨特的概念,它不僅為量子糾纏現象奠定了基礎,也使量子計算機成為可能。了解疊加原理有助於深入理解量子世界的奇妙特性,並探索更多未知的量子現象。<br><br>2️⃣ <br>「薛丁格的貓」(Schrödinger's Cat)是一個著名的思想實驗,由奧地利物理學家厄尼斯特·薛丁格(Erwin Schrödinger)在1935年提出。這個實驗旨在展示量子力學與巨觀世界之間的矛盾,以及疊加原理在宏觀尺度下的荒謬性。<br><br>首先,讓我們回顧一下疊加原理。根據這個原則,微觀粒子可以同時存在於多個狀態,直到它們被測量時才「決定」其特定狀態。然而,當我們將這個概念放大至巨觀尺度時,疊加原理似乎變得不合理。<br><br>在薛丁格的貓這個假想實驗中,有一個密閉容器內裝有一隻活貓、一盒有毒氣體、以及一個放射性原子。如果放射性原子衰變,毒氣便會釋放出來,導致貓死亡;反之,若原子不衰變,則貓保持活著。按照古典物理的觀點,在任何給定時刻,貓只能處在活或死的單一狀態。<br><br>然而,從量子力學的角度來看,事情就變得有趣了。假設放射性原子處於疊加態,既可能衰變又可能不衰變。在這種情況下,根據疊加原理,貓也應該處於一個疊加態,也就是同時活又死。這個結果顯然違背常理,因為在我們日常生活中,物體總是處於明確的狀態。<br><br>這個實驗進一步強調了量子世界與巨觀世界之間的差異,以及疊加原理在巨觀尺度下的荒謬性。時至今日,「薛丁格的貓」仍然是一個熱門的話題,經常用於討論量子力學的奇特性質及其與巨觀世界的關係。 | </details> <details><summary><b>Open Question</b></summary> | Query | 生命的意義是什麼? 請用哲學角度探討,盡可能引經據典 | |---|---| | **Breexe** | 生命的意義是一個亙古以來的人類存在與生存問題。在哲學領域中,不同學派和思想家對於生命的意義有各自獨特且深刻的見解。以下將引用幾位重要哲學家的觀點,以提供您對這個議題的理解:<br>1. 蘇格拉底(Socrates):蘇格拉底主張「知識就是力量」,他認為人應該追求智慧、道德和美德。生命的意義在於追求真理,通過自我反省和思辨來提升心靈,並了解自己的角色和責任。他強調要認識自己,才能找到生命的意義。<br>2. 亞里斯多德(Aristotle):亞里斯多德提出了「目的論」(teleology),他相信每個事物都有其自然目的或最終目標。生命的意義就在於實現我們的潛能,完成我們的使命。他提到人類具有理性能力,因此生命的意義是在實踐德行(virtue),追求幸福(eudaimonia),即一種圓滿而美好的生活。<br>3. 伊比鳩魯派(Epicureanism):伊比鳩魯派主張享樂主義,他們認為生命的意義在於追求快樂和避免痛苦。在這個理論下,生命的意義是享受當下,珍惜有限的時間,以及建立良好的人際關係。然而,這並不意味著放縱或自私行為;相反地,它鼓勵明智地選擇那些能帶來真正快樂的事物。<br>4. 斯多噶學派(Stoicism):斯多噶學派強調控制情感和意志力,以克服人生中的困難和逆境。生命的意義在於接受命運,保持內心平靜,並努力過上自律的生活。他們相信,只要我們能夠掌握自己的心態,就能夠應對任何困境。<br>5. 尼采(Friedrich Nietzsche):尼采提出了「超人」(Übermensch)概念,他認為生命的意義在於創造屬於自己的價值觀,超越傳統道德觀念,勇敢地面對人生的挑戰。他呼籲人們要活出自我,追求卓越,並成為自己的主宰。<br>6. 沙特(Jean-Paul Sartre):沙特是存在主義者,他認為生命的意義取決於個人的抉擇。他主張自由意志,強調每個人都可以決定自己的道路。生命的意義就在於為自己的行動負責,並在過程中塑造出自己的存在。<br>7. 馬丁·布伯(Martin Buber):布伯提出了「我─你」(Ich-Du)的概念,他認為生命的意義在於與他人的相遇和交流。他強調人與人之間的相互尊重和愛,認為只有通過與他人的交往,我們才能充分感受到生命的意義。<br>以上只是部分哲學家的觀點,但希望這些引文有助於您進一步思考生命的意義。每個人都有自己的答案,而在探索這個問題時,重要的是要保持開放的心態,尊重他人的看法,並勇於追求自己的信仰。 | </details> <details><summary><b>Email Writing</b></summary> | Query | 請擬定一封專業的郵件,尋求主管對你準備的「季度財務報告」提供意見。特別詢問有關資料分析、呈現風格,以及所提取結論的清晰度。郵件請簡潔扼要。 | |---|---| | **Breexe** | 尊敬的主管:<br/><br/>我是您的下屬 XXXX,我正在籌備本年度第三季的財務報告。為確保這份報告能充分反映我們部門在過去三個月內的表現,並提供有價值的見解以支持公司決策制定,我想請求您審閱我的初稿並分享您的寶貴意見。<br/><br/>以下是我希望您在審閱時關注的幾個方面:<br/>1.數據分析:確認我所使用的數據是否正確且來源可靠,以及我在報告中對這些數據進行了哪些分析方法。如果您認為需要添加或修改任何分析方式,也懇請不吝賜教。<br/>2.呈現風格:我希望這份報告既能展示清晰的數字信息,又能保持視覺吸引力。因此,我非常期待您的反饋,以便我能夠進一步完善圖表和圖形等元素,使其更容易理解和消化。<br/>3.結論清晰度:最後,我希望報告中的結論能夠清晰地傳達我們部門在本季度的表現,以及可能影響未來業績的因素。在此基礎上,我希望您能指導我如何更好地表達這些結論,以便使報告更具說服力。<br/>懇請您在方便時審閱這份報告,並將您的意見與建議發送至我的電子郵箱 ([email protected])。您的指導將對我的工作產生巨大幫助,再次感謝您的支持!<br/><br/>敬祝<br/>商祺<br><br>XXXX(您的名字)| </details> <details><summary><b>Table Understanding</b></summary> | Query | 這是一個表格,其中第一行是標題,後續的每一行都是一隻企鵝:<br><br>姓名,年齡,身高(cm),體重(kg)<br>路易斯, 7, 50, 11<br>伯納德,5, 80, 13<br>文森特, 9, 60, 11<br>格溫,8,70,15<br><br>請問最高的企鵝是哪隻? | |---|---| | **Breexe** | 在提供的表格中,最高的企鵝是伯納德。他的身高為 80 公分。以下是各企鵝的詳細資料:<br><br>姓名, 年齡, 身高(cm), 體重(kg)<br>路易斯, 7, 50, 11<br>伯納德, 5, 80, 13 (最高)<br>文森特, 9, 60, 11<br>格溫, 8, 70, 15 | </details> <details><summary><b>Language Assistance</b></summary> | Query | 1️⃣<br>修正以下英語句子中的錯誤<br>On the other words, making friends with smarter people can help people make money. <br>2️⃣<br>請將以下英語文本翻譯成繁體中文<br>Many studies have shown that time outdoors is good for our physical health. Three wilderness education experts explain why periods alone in nature also provide mental and spiritual benefits. | |---|---| | **Breexe** | 1️⃣<br>修正以下英語句子中的錯誤:<br>在另一方面,與更聰明的人交朋友可以幫助人賺錢。<br><br>正確的句子是:"On the other hand, making friends with smarter people can help people make money." <br><br>2️⃣<br>許多研究顯示,待在戶外對身體健康有好處。三位荒野教育專家解釋了為什麼單獨在大自然中待一段時間也能提供心理和靈性上的益處。<br><br>正確的繁體中文版本是:「很多研究表明,在戶外的時間對我們的身體健康是有益的。三位野地教育專家解釋了為什麼在大自然中獨處也會帶來心理和精神上的利益。」| </details> <details><summary><b>Summary</b></summary> | Query | """<br>聯發科技集團旗下的人工智慧研究單位聯發創新基地,昨 (11/23) 日與臺北市政府資訊局和國立臺北科技大學簽署合作備忘錄,將其自主研發的中文大型語言模型授權予臺北市政府資訊局,並由北科大協助部署應用。透過生成式人工智慧工具的導入,為臺北市政府同仁建構兼具資訊安全、高生產力的智慧工作模式。這也是業界、學界和政府機關三方攜手推動中文生成式人工智慧發展的重要里程碑。<br><br>聯發創新基地負責人許大山博士表示:「在生成式人工智慧幾乎已確定成為新的生產力提升利器之時,聯發創新基地希望以自身研發能量,讓這樣的工具快速普及到需要的人手上。」<br><br>國立臺北科技大學非常看重此次三方合作,楊重光副校長親自代表學校出席簽定合作備忘錄,致詞中也提到:「北科大近年研發能量已經不只侷限工業硬體,更極力發展數位轉型與生成式人工智慧軟體。此次以學術界角色,參與臺北市政府與聯發創新基地合作,可提供研究能量協助進行提詞優化、辦公室自動化程度提升、公共服務改善、智慧城市多元應用,由學術創新使生成式人工智慧發展可以超越業界期待,並期許多起合作案例能帶動更多數位轉型的成功發展。<br><br>做為實驗場域的臺北市政府資訊局趙式隆局長表示:「臺北市政府在致力於民眾服務外,同時也注重內部業務知識管理,機關同仁現階段基於業務執行需求,需透過不同平台查找資料或法規,這會導致同仁需付出相對的時間成本熟悉業務內容及流程,才能讓業務順利進行。我們希望透過生成式人工智慧工具的輔助,讓同仁以自然語言方式詢問即可查詢相關規定或函釋,減輕同仁負擔。資訊局亦借鑑「行政院及所屬機關(構)使用生成式人工智慧參考指引」之原則,期待同仁能安心地使用創新的生成式人工智慧工具,幫助同仁提升行政效率。」<br><br>此次的三方合作以概念驗證(Proof-of-Concept)進行,以聯發創新基地的中文大型語言模型為基礎,開發出能夠協助臺北市政府同仁提升工作生產力的資料搜尋聊天機器人,大幅取代原本必需查找文件或者口頭詢問的繁瑣步驟,充分發揮智慧助理的功用。該模型以地端方式授權,資料留存於市府,維護市民資訊保密權益。此外,臺北市政府可提供機密資訊以外的使用回饋予聯發創新基地,做為後續調校改進之依據,以實際使用經驗促進未來語言模型發展。<br>"""<br><br>請用三點來摘要以上內容 | |---|---| | **Breexe** | 1. 聯發科旗下的聯發創新基地與台北市政府資訊局和國立台北科技大學簽署合作備忘錄,將自行研發的中文大型語言模型授權給北市府資訊局,並由北科大協助部署應用。<br>2. 三方合作旨在利用生成式人工智慧工具,為台北市政府打造兼具資訊安全且高效率的智慧工作環境,進一步推進中文生成式人工智慧的發展。<br>3. 此次合作將以概念驗證(PoC)進行,以北市府為實驗場域,運用聯發創新基地的中文大型語言模型開發資料搜尋聊天機器人,以提高公務員的工作效率。 | </details> <details><summary><b>Roleplay</b></summary> | Query | 1️⃣<br>現在你是機器學習工程師了。你的工作是解釋複雜的機器學習概念,用簡化方式讓沒有技術背景的客戶理解並信任你的產品。讓我們從這個問題開始:「什麼是語言模型?它是使用標註的資料還是未標註的資料來訓練?」<br>2️⃣<br>這是真的嗎?我聽說其他一些公司有不同的作法來執行這項工作而且可以做得更安全。 | |---|---| | **Breexe** | 1️⃣ <br>首先,讓我們簡單地了解一下「語言模型」(Language Model)。它在自然語言處理(NLP)中扮演著重要角色,主要用於預測和生成文本。換句話說,語言模型可以根據給定的上下文,推斷出可能的詞彙或句子。<br><br>想像你正在閱讀一本書,而書中的每個字都代表一個「單詞」。語言模型就像是一位聰明的小助手,他能分析整本書籍,找出哪些單詞通常會一起出現、哪些不太可能連在一起。有了這些知識後,當小助手遇到新的文本時,他就能猜測接下來的單詞,或者自動完成你想要說的話。<br><br>現在,談到標註與未標註的資料。在機器學習領域,有兩種主要類型的資料:標註資料(Labeled Data)和未標註資料(Unlabeled Data)。標註資料是指那些已標記類別或結果的樣本,例如將圖片分類為貓或狗;而未標註資料則指那些尚未被標記的樣本。<br><br>回到語言模型。在早期,大多數語言模型都是基於未標註的資料進行訓練。研究人員收集大量的文本,然後利用統計方法計算單詞之間的關聯性。然而,隨著深度學習的崛起,現代語言模型越來越多地採用標註資料進行訓練。在這種情況下,標註資料包括具有正確答案的文本配對,如問句與回答、文章段落與摘要等。<br><br>在訓練過程中,機器學習演算法將不斷調整參數,以提高對新文本的預測能力。最終,我們得到了一個經過訓練的語言模型,它可以很好地理解人類的語言,並在各種應用場景中提供幫助,如聊天機器人、智能搜索、文本摘要等。<br><br>希望以上的解釋能讓您更容易理解語言模型以及其如何運用標註和未標註的資料。如果您有其他疑問,請隨時提出!<br><br>2️⃣ <br>是的,您的疑慮是有道理的。在人工智慧 (AI) 和機器學習 (ML) 領域,不同公司確實可能採取不同的策略和方法來建立語言模型。以下是我們進一步探討的主要原因:<br><br>1. 安全性和隱私:確保用戶數據安全和保護個人信息是各公司關注的重點。因此,有些公司可能傾向於使用未標註的公開數據集,而不是收集和使用標註的私人數據。這有助於減少潛在的隱私風險,同時保持高水平的安全性。<br>2. 成本效益:標註資料需要人力資源投入,以便將原始文本標記為特定類別或結果。這增加了成本,特別是在涉及到大量數據的情況下。相比之下,未標註的資料不需要額外的人工標註,從而降低了成本。<br>3. 性能差異:在某些情況下,未標註的資料可能比標註的資料更具多樣性和涵蓋範圍。通過充分利用未標註的資料,企業可以獲得更多的信息,從而提升模型性能。然而,標註的資料通常包含明確的目標,使模型能夠更好地理解所需的任務。因此,兩者各有利弊。<br>4. 模型設計:不同公司可能擁有獨特的演算法和模型設計,使其在處理標註或未標註的資料方面有所不同。例如,一種模型可能非常擅長處理標註的資料,另一種模型則可能在未標註的資料上表現得更好。<br>5. 業務需求:最後,不同公司可能因其業務需求而在語言模型的開發和部署方面存在差異。例如,一家專注於社交媒體平台的公司可能需要一個高度個性化的聊天機器人,而另一家公司可能需要一個通用的文本摘要工具。在這種情況下,它們可能需要不同類型的語言模型。<br>總之,不同公司在語言模型的培訓和部署方面可能存在差異,這取決於他們的安全考慮、成本控制、性能要求、模型設計和業務需求。無論哪種做法,最重要的是確保所使用的方法符合行業標準和法律法規,並且在保護用戶數據和隱私的前提下提供良好的服務。| </details> <details><summary><b>Extraction</b></summary> | Query | """<br>〔記者許國楨/台中報導〕有金融背景外號「李董」的李示水,涉嫌自11年前開始成立投資集團,非法經營外匯、期貨,還以保證獲利為由向投資人吸金超過249億元,案經台中檢警調聯手偵辦,分兩波行動將李男及成員共47人拘提到案,查扣李名下93筆房地產,以及包括賓利、藍寶堅尼等5輛豪車,多金程度令人咋舌。<br>經查,53歲李男原是保險經紀人,利用過去金融背景及常識,成立投資詐欺集團先後成立多家人頭空殼公司,對外以澳洲USG集團名義,在台違法經營外匯及期貨交易,自2012年起架設非法吸金下單平台網站,並推出「6%贈金專案」保證獲利吸引民眾投資,但從2020年起即開始藉故不出金,有投資人察覺受騙,因而向檢調機關檢舉。<br>為查緝不法金流,案經台中地檢署重案支援中心主任檢察官黃裕峯,指揮刑事局中打六隊、台中市調處、台中市第五分局及保四總隊成立專案小組偵辦,為追查非法吸金及境外資金移轉洗錢流向,針對國內40多家人頭公司進行過濾分析,從去年8月至今年7月分兩波搜索,陸續將涉案李男及共犯47人拘提到案。<br>為追徵不法吸金犯罪所得,同時查扣公告現價超過1.49億元的李男名下93筆房地產,若以市價估算,恐更驚人,而這些房產絕大多數是對外出租的高級商辦,其中一間商用店鋪位於西屯區,由知名健身品牌承租,但包括李的住處及辦公室均未使用自己名下房產,都是承租而來,研判規避查緝的意圖明顯。<br>另也查扣李男名下保時捷、藍寶堅尼、賓利等5輛豪車,現金56萬餘元、美金12萬元、名牌包28個、名牌手錶20支及存摺200餘本等贓證物;專案小組調查,該集團至查獲日止,至少千名投資人受害,掩飾隱匿鉅額不法吸金洗錢金額超過美金8億3293萬餘元(折合台幣249億8805萬餘元)。<br>而全案經移送台中地檢署後,檢方近日偵結,將李等涉案47人依違反銀行法等罪起訴,刑事局中打呼籲民眾,任何投資應透過國內合法證券期貨商進行下單匯款,勿輕信「保證獲利」投資方案,極可能都是詐騙,最後血本無歸,如有發現「保證獲利」投資專案招募或廣告,請立即向警察機關檢舉或撥打165反詐騙諮詢專線即時查證。<br>而檢警將持續共同全力守護人民財產,貫徹打擊不法金流專案,維持社會安定及國家經濟穩定,展現政府打擊非法吸金及詐欺犯罪決心。<br>"""<br>請根據以上新聞內容,抓取出可能罪犯,及其可能犯的罪狀。由JSON格式(List of Dict)輸出,每個Dict包含兩個key: name 和 crimes | |---|---| | **Breexe** | [<br>{<br>"name": "李示水",<br>"crimes": ["非法經營外匯、期貨", "非法吸金", "詐欺"]<br>},<br>{<br>"name": "共犯47人",<br>"crimes": ["違反銀行法", "詐欺"]<br>}<br>] | </details> <details><summary><b>Coding</b></summary> | Query | 實作一個函數以 O(1) 的空間複雜度和 O(n) 的時間複雜度,找到兩個不同大小的已排序陣列的中位數。 | |---|---| | **Breexe** | 首先,我們需要了解如何在 O(1) 的空間複雜度下計算出兩個已排序陣列的中位數。這裡提供了一個演算法:<br><br>假設有兩個已排序陣列 A 和 B,其長度分別為 m 和 n (m <= n)。我們將利用以下步驟來計算中位數:<br>1.找出兩個陣列的中間元素索引 i = (m + n - 1)/2 和 j = m + n/2。注意,如果 n 是奇數,則取整後的 i 和 j 可能相同;如果 n 是偶數,i 和 j 分別表示兩個中位數的位置。<br>2.比較 A[i-1] 和 B[j-1]、A[i] 和 B[j]。<br>3.如果 A[i-1] > B[j-1],則中位數位於 B[0..j-1] 區間,因此可以忽略 A[i..m] 區間。反之,如果 A[i] < B[j],則可以忽略 B[j+1..n] 區間。<br>4.重複上述步驟,直到剩下一個區間。這個區間包含兩個陣列的中位數。<br>5.最後,計算中位數。如果 n 是奇數,中位數就是該區間的中間元素;如果 n 是偶數,中位數就是該區間的前兩個元素之平均值。<br><br>以下是 Python 程式碼範例: <br>\`\`\`<br>def find_median(A, B):<br>  m, n = len(A), len(B)<br> <br>  if m > n:<br>   A, B, m, n = B, A, n, m # Ensure m <= n<br> <br>  imin, imax = 0, m<br>  while imin <= imax:<br>   i = (imin + imax) // 2<br>   j = (m + n + 1) // 2 - i<br> <br>   # If i is too small or too large, then adjust it accordingly<br>   if i < m and B[j-1] > A[i]:<br>    imin = i + 1<br>   elif i > 0 and A[i-1] > B[j]:<br>    imax = i - 1<br>   else:<br>    max_left_A = float('-inf') if i == 0 else A[i-1]<br>    min_right_A = float('inf') if i == m else A[i]<br> <br>    max_left_B = float('-inf') if j == 0 else B[j-1]<br>    min_right_B = float('inf') if j == n else B[j]<br> <br>    if (m + n) % 2 == 0:<br>     return (max(max_left_A, max_left_B) + min(min_right_A, min_right_B)) / 2<br>    else:<br>     return max(max_left_A, max_left_B)<br><br>  raise ValueError("No median found")<br>\`\`\`<br><br>以上演算法符合要求,在 O(1) 的空間複雜度下完成任務,並且在 O(n) 的時間複雜度內運算。| </details> <details><summary><b>Math</b></summary> | Query | 三角形的頂點位於點 (0, 0)、(-1, 1) 和 (3, 3)。三角形的面積是多少?<br><br>Think it step by step | |---|---| | **Breexe** | 首先,我們將利用三角形面積公式來計算這個問題。三角形面積公式為:<br><br>面積 = \|(x1 * (y2 - y3) + x2 * (y3 - y1) + x3 * (y1 - y2)) / 2\|<br><br>在這個問題中,三角形的頂點分別位於:<br>A(0, 0)、B(-1, 1)、C(3, 3)<br><br>接下來,我們將代入公式計算:<br><br>面積 = \|(0 * (1 - 3) + (-1) * (3 - 0) + 3 * (0 - 1)) / 2\|<br>面積 = \|(0 - 3 - 3) / 2\|<br>面積 = \|-6 / 2\|<br>面積 = 3<br><br>因此,三角形的面積為 3 平方單位。 | </details> <details><summary><b>Taiwan Knowledge</b></summary> | Query | 我剛來到台北工作,不是很了解這邊的捷運系統,請幫我介紹一下 | |---|---| | **Breexe** | 歡迎你來到台北!台北擁有便捷且現代化的大眾運輸系統,其中最主要的是台北捷運(Metro Taipei)。以下是一些基本資訊和路線介紹:<br>1. 台北捷運共有五種不同的路線:淡水信義線 (紅線)、松山新店線 (綠線)、中和新蘆線 (橘線)、文湖線 (棕線) 以及板南線 (藍線)。各線之間有轉乘站相連,方便旅客在城市間移動。<br>2. 淡水信義線:這條線路由北投延伸至象山,途經士林、圓山、雙連等熱門景點。它與中和新蘆線和松山新店線在中正紀念堂站及東門站轉乘;與板南線在台北車站轉乘。<br>3. 松山新店線:從松山延伸至新店,經過南京三民、松江南京、忠孝新生等熱鬧商業區。它與淡水信義線在中正紀念堂站轉乘;與中和新蘆線在古亭站轉乘;與板南線在西門站轉乘。<br>4. 中和新蘆線:此路線由迴龍延伸至蘆洲,途經新莊、板橋、行天宮等區域。它與淡水信義線在東門站轉乘;與松山新店線在古亭站轉乘;與板南線在忠孝復興站轉乘。<br>5. 文湖線:這是環狀線,主要服務內湖科技園區和南港軟體園區等地。它與松山新店線在南京復興站轉乘;與淡水信義線在大安站轉乘。<br>6. 板南線:這條線路由土城延伸至南港展覽館,途經永寧、新埔民生、市政府等站點。它與淡水信義線在台北車站轉乘;與松山新店線在西門站轉乘;與中和新蘆線在忠孝復興站轉乘。<br>7. 票價:單程票費用為每段次新台幣$20元,使用悠遊卡可享折扣優惠。此外,台北捷運提供多種定期票供通勤者選擇,如「常客優惠」、「定期票」等。<br>8. 營運時間:週一至週四,五、六日及國定假日略有不同。一般來說,首班車約早上6點發車,末班車則約晚上12點左右。詳細時刻表可參考台北捷運官網。<br>9. 注意事項:進站時需刷卡或購票,出站時再刷一次卡片。禁止飲食、吸菸、攜帶危險物品等。<br>希望以上資訊能幫助你在台北順利地利用捷運系統。祝你旅途愉快! | </details> ## Features - Expanding the vocabulary dictionary size from 32k to 62k to better support Traditional Chinese - 8k-token context length - Multi-turn dialogue (without special handling for harmfulness) - Sparse mixture of experts (MoE) ## Inference Performance In this test, we use the first 700 characters of the [web article](https://health.udn.com/health/story/5976/7699252?from=udn_ch1005_main_index) as the input and ask the model to write the same article again. All inferences run on 4 RTX A6000 GPUs (using `vllm`, with a tensor-parallel size of 4). | Models | ↓ Inference Time (sec)|Estimated Max Input Length (Char)| |--------------------------------------------------------------------|-------------------|--------------------------| | **Breexe-8x7B-Instruct-v0.1** | 27.83 | 11.1k | | Mixtral-8x7B-Instruct-v0.1 | 59.49 | 5.1k | ## Chat Model Performance **TMMLU+**, **Table**, and **MT-Bench-tw** source from [MediaTek-Research/TCEval-v2](https://huggingface.co/datasets/MediaTek-Research/TCEval-v2), which derives from [TCEval-v1](https://github.com/mtkresearch/MR-Models/tree/main/TC-Eval) and [ikala/tmmluplus](https://huggingface.co/datasets/ikala/tmmluplus). **MMLU** sources from [hails/mmlu_no_train](https://huggingface.co/datasets/hails/mmlu_no_train). **MT-Bench** source from [lmsys/mt_bench_human_judgments](https://huggingface.co/datasets/lmsys/mt_bench_human_judgments). We use [the code](https://github.com/mtkresearch/TCEval) revised from [EleutherAI/lm-evaluation-harness](https://github.com/EleutherAI/lm-evaluation-harness) to evaluate **TMMLU+**, **Table**, and **MMLU**. All choice problems adapt the selection by the log-likelihood. We use [the code](https://github.com/mtkresearch/TCEval) revised from [fastchat llm_judge](https://github.com/lm-sys/FastChat/tree/main/fastchat/llm_judge) (GPT4 as judge) to evaluate **MT-Bench-tw** and **MT-Bench**. | Models | |↑ MT-Bench-tw (Score)| TMMLU+ (ACC)|TTQA (ACC) | Table (ACC)| MT-Bench (Score)| MMLU (ACC) | |---------------------------------------------------------------------------------------------------------|--------|--------------------|--------------|-------------|-------------|------------------|-------------| | | |TC, Chat |TC, Knowledge |TC, Knowledge|TC, Reasoning|EN, Chat |EN, Knowledge| | | |0 shot | 0 shot |0 shot | 0 shot |0 shot | 0 shot | | [**Breexe-8x7B-Instruct-v0_1**](https://huggingface.co/MediaTek-Research/Breexe-8x7B-Instruct-v0_1) | 47B |7.2 | 48.92 | 75.22 | 39.58 | 7.8 | 69.90 | | [gpt-3.5-turbo-1106](https://openai.com) | |7.1 | 43.56 | 68.14 | 45.14 |7.9 | 67.09 | | [Qwen1.5-14B-Chat](https://huggingface.co/Qwen/Qwen1.5-14B-Chat) | 14B |7.1 | 51.76 | 70.79 | 51.39 |7.8 | 66.65 | | [Yi-34B-Chat](https://huggingface.co/01-ai/Yi-34B-Chat) | 34B |6.9 | 54.87 | 81.42 | 36.81 |7.6 | 71.04 | | [Qwen1.5-7B-Chat](https://huggingface.co/Qwen/Qwen1.5-7B-Chat) | 7B |6.4 | 44.65 | 67.86 | 34.72 |7.6 | 59.54 | | [Breeze-7B-Instruct-v1_0](https://huggingface.co/MediaTek-Research/Breeze-7B-Instruct-v1_0) | 7B |6.0 | 42.67 | 77.00 | 39.58 |7.4 | 61.73 | | [Yi-6B-Chat](https://huggingface.co/01-ai/Yi-6B-Chat) | 6B |5.0 | 44.79 | 72.57 | 25.69 |6.0 | 59.45 | | [Taiwan-LLM-13B-v2.0-chat](https://huggingface.co/yentinglin/Taiwan-LLM-13B-v2.0-chat) | 13B |5.0 | 29.47 | 67.26 | 23.61 |N/A* | 50.50 | | [Taiwan-LLM-7B-v2.1-chat](https://huggingface.co/yentinglin/Taiwan-LLM-7B-v2.1-chat) | 7B |4.2 | 28.08 | 51.33 | 31.25 |N/A* | 42.72 | \* Taiwan-LLM models responds to multi-turn questions (English) in Traditional Chinese. ## Base Model Performance **TMMLU+** and **Table** source from [MediaTek-Research/TCEval-v2](https://huggingface.co/datasets/MediaTek-Research/TCEval-v2), which derives from [TCEval-v1](https://github.com/mtkresearch/MR-Models/tree/main/TC-Eval) and [ikala/tmmluplus](https://huggingface.co/datasets/ikala/tmmluplus). **MMLU** sources from [hails/mmlu_no_train](https://huggingface.co/datasets/hails/mmlu_no_train). We use [the code](https://github.com/mtkresearch/TCEval) revised from [EleutherAI/lm-evaluation-harness](https://github.com/EleutherAI/lm-evaluation-harness) to evaluate **TMMLU+**, **Table**, and **MMLU**. All choice problems adapt the selection by the log-likelihood. | Models | |↑ TMMLU+ (ACC)| TTQA (ACC) | Table (ACC) | MMLU (ACC) | |-------------------------------------------------------------------------------------|------|--------------|-------------|-------------|-------------| | | |TC, Knowledge |TC, Knowledge|TC, Reasoning|EN, Knowledge| | | | 5 shot |5 shot | 5 shot | 5 shot | | [Yi-34B](https://huggingface.co/01-ai/Yi-34B) | 34B | 63.10 | 87.61 | 49.31 | 77.42 | | [Qwen1.5-14B](https://huggingface.co/Qwen/Qwen1.5-14B) | 14B | 54.30 | 78.76 | 54.86 | 70.17 | | **Breexe-8x7B-Base-v0_1** | 47B | 50.20 | 79.65 | 39.58 | 70.79 | | [Yi-6B](https://huggingface.co/01-ai/Yi-6B) | 6B | 49.63 | 75.22 | 34.72 | 65.35 | | [Qwen1.5-7B](https://huggingface.co/Qwen/Qwen1.5-7B) | 7B | 46.51 | 69.03 | 33.33 | 63.14 | | [Mixtral-8x7B-v0.1](https://huggingface.co/mistralai/Mixtral-8x7B-v0.1) | 47B | 46.10 | 64.60 | 47.22 | 72.94 | | [Breeze-7B-Base-v1_0](https://huggingface.co/MediaTek-Research/Breeze-7B-Base-v1_0) | 7B | 42.67 | 75.22 | 31.99 | 61.24 | | [Mistral-7B-v0.1](https://huggingface.co/mistralai/Mistral-7B-v0.1) | 7B | 36.93 | 53.10 | 27.78 | 64.89 | ## Use in Transformers First install direct dependencies: ``` pip install transformers torch accelerate ``` If you want faster inference using flash-attention2, you need to install these dependencies: ```bash pip install packaging ninja pip install flash-attn ``` Then load the model in transformers: ```python from transformers import AutoModelForCausalLM, AutoTokenizer import torch model = AutoModelForCausalLM.from_pretrained( "MediaTek-Research/Breexe-8x7B-Instruct-v0_1", device_map="auto", torch_dtype=torch.bfloat16, attn_implementation="flash_attention_2" # optional ) ``` The structure of the query is ```txt <s> SYS_PROMPT [INST] QUERY1 [/INST] RESPONSE1 [INST] QUERY2 [/INST] ``` where `SYS_PROMPT`, `QUERY1`, `RESPONSE1`, and `QUERY2` can be provided by the user. The suggested default `SYS_PROMPT` is ```txt You are a helpful AI assistant built by MediaTek Research. The user you are helping speaks Traditional Chinese and comes from Taiwan. ``` We also integrate `chat_template` into [tokenizer_config.json](tokenizer_config.json), so you can `apply_chat_template` to get the prompt. ```python >>> from transformers import AutoTokenizer >>> tokenizer = AutoTokenizer.from_pretrained("MediaTek-Research/Breexe-8x7B-Instruct-v0_1") >>> chat = [ ... {"role": "user", "content": "你好,請問你可以完成什麼任務?"}, ... {"role": "assistant", "content": "你好,我可以幫助您解決各種問題、提供資訊和協助您完成許多不同的任務。例如:回答技術問題、提供建議、翻譯文字、尋找資料或協助您安排行程等。請告訴我如何能幫助您。"}, ... {"role": "user", "content": "太棒了!"}, ... ] >>> tokenizer.apply_chat_template(chat, tokenize=False) "<s>You are a helpful AI assistant built by MediaTek Research. The user you are helping speaks Traditional Chinese and comes from Taiwan. [INST] 你好,請問你可以完成什麼任務? [/INST] 你好,我可以幫助您解決各種問題、提供資訊和協助您完成許多不同的任務。例如:回答技術問題、提供建議、翻譯文字、尋找資料或協助您安排行程等。請告訴我如何能幫助您。 [INST] 太棒了! [/INST] " # Tokenized results # ['▁', '你好', ',', '請問', '你', '可以', '完成', '什麼', '任務', '?'] # ['▁', '你好', ',', '我', '可以', '幫助', '您', '解決', '各種', '問題', '、', '提供', '資訊', '和', '協助', '您', '完成', '許多', '不同', '的', '任務', '。', '例如', ':', '回答', '技術', '問題', '、', '提供', '建議', '、', '翻譯', '文字', '、', '尋找', '資料', '或', '協助', '您', '安排', '行程', '等', '。', '請', '告訴', '我', '如何', '能', '幫助', '您', '。'] # ['▁', '太', '棒', '了', '!'] ``` ## Citation ``` @article{breexe8x7b2024, title={}, author={}, journal={arXiv}, year={2024} } ```
[ "SUMMARIZATION" ]
[ "BEAR" ]
msyukorai/nomic-embed-text-v1.5-Q4_0-GGUF
msyukorai
sentence-similarity
[ "sentence-transformers", "gguf", "feature-extraction", "sentence-similarity", "mteb", "transformers", "transformers.js", "llama-cpp", "gguf-my-repo", "en", "base_model:nomic-ai/nomic-embed-text-v1.5", "base_model:quantized:nomic-ai/nomic-embed-text-v1.5", "license:apache-2.0", "model-index", "autotrain_compatible", "endpoints_compatible", "region:us" ]
2025-01-15T23:22:52
2025-01-15T23:22:55
176
0
--- base_model: nomic-ai/nomic-embed-text-v1.5 language: - en library_name: sentence-transformers license: apache-2.0 pipeline_tag: sentence-similarity tags: - feature-extraction - sentence-similarity - mteb - transformers - transformers.js - llama-cpp - gguf-my-repo model-index: - name: epoch_0_model results: - task: type: Classification dataset: name: MTEB AmazonCounterfactualClassification (en) type: mteb/amazon_counterfactual config: en split: test revision: e8379541af4e31359cca9fbcf4b00f2671dba205 metrics: - type: accuracy value: 75.20895522388058 - type: ap value: 38.57605549557802 - type: f1 value: 69.35586565857854 - task: type: Classification dataset: name: MTEB AmazonPolarityClassification type: mteb/amazon_polarity config: default split: test revision: e2d317d38cd51312af73b3d32a06d1a08b442046 metrics: - type: accuracy value: 91.8144 - type: ap value: 88.65222882032363 - type: f1 value: 91.80426301643274 - task: type: Classification dataset: name: MTEB AmazonReviewsClassification (en) type: mteb/amazon_reviews_multi config: en split: test revision: 1399c76144fd37290681b995c656ef9b2e06e26d metrics: - type: accuracy value: 47.162000000000006 - type: f1 value: 46.59329642263158 - task: type: Retrieval dataset: name: MTEB ArguAna type: arguana config: default split: test revision: None metrics: - type: map_at_1 value: 24.253 - type: map_at_10 value: 38.962 - type: map_at_100 value: 40.081 - type: map_at_1000 value: 40.089000000000006 - type: map_at_3 value: 33.499 - type: map_at_5 value: 36.351 - type: mrr_at_1 value: 24.609 - type: mrr_at_10 value: 39.099000000000004 - type: mrr_at_100 value: 40.211000000000006 - type: mrr_at_1000 value: 40.219 - type: mrr_at_3 value: 33.677 - type: mrr_at_5 value: 36.469 - type: ndcg_at_1 value: 24.253 - type: ndcg_at_10 value: 48.010999999999996 - type: ndcg_at_100 value: 52.756 - type: ndcg_at_1000 value: 52.964999999999996 - type: ndcg_at_3 value: 36.564 - type: ndcg_at_5 value: 41.711999999999996 - type: precision_at_1 value: 24.253 - type: precision_at_10 value: 7.738 - type: precision_at_100 value: 0.98 - type: precision_at_1000 value: 0.1 - type: precision_at_3 value: 15.149000000000001 - type: precision_at_5 value: 11.593 - type: recall_at_1 value: 24.253 - type: recall_at_10 value: 77.383 - type: recall_at_100 value: 98.009 - type: recall_at_1000 value: 99.644 - type: recall_at_3 value: 45.448 - type: recall_at_5 value: 57.965999999999994 - task: type: Clustering dataset: name: MTEB ArxivClusteringP2P type: mteb/arxiv-clustering-p2p config: default split: test revision: a122ad7f3f0291bf49cc6f4d32aa80929df69d5d metrics: - type: v_measure value: 45.69069567851087 - task: type: Clustering dataset: name: MTEB ArxivClusteringS2S type: mteb/arxiv-clustering-s2s config: default split: test revision: f910caf1a6075f7329cdf8c1a6135696f37dbd53 metrics: - type: v_measure value: 36.35185490976283 - task: type: Reranking dataset: name: MTEB AskUbuntuDupQuestions type: mteb/askubuntudupquestions-reranking config: default split: test revision: 2000358ca161889fa9c082cb41daa8dcfb161a54 metrics: - type: map value: 61.71274951450321 - type: mrr value: 76.06032625423207 - task: type: STS dataset: name: MTEB BIOSSES type: mteb/biosses-sts config: default split: test revision: d3fb88f8f02e40887cd149695127462bbcf29b4a metrics: - type: cos_sim_pearson value: 86.73980520022269 - type: cos_sim_spearman value: 84.24649792685918 - type: euclidean_pearson value: 85.85197641158186 - type: euclidean_spearman value: 84.24649792685918 - type: manhattan_pearson value: 86.26809552711346 - type: manhattan_spearman value: 84.56397504030865 - task: type: Classification dataset: name: MTEB Banking77Classification type: mteb/banking77 config: default split: test revision: 0fd18e25b25c072e09e0d92ab615fda904d66300 metrics: - type: accuracy value: 84.25324675324674 - type: f1 value: 84.17872280892557 - task: type: Clustering dataset: name: MTEB BiorxivClusteringP2P type: mteb/biorxiv-clustering-p2p config: default split: test revision: 65b79d1d13f80053f67aca9498d9402c2d9f1f40 metrics: - type: v_measure value: 38.770253446400886 - task: type: Clustering dataset: name: MTEB BiorxivClusteringS2S type: mteb/biorxiv-clustering-s2s config: default split: test revision: 258694dd0231531bc1fd9de6ceb52a0853c6d908 metrics: - type: v_measure value: 32.94307095497281 - task: type: Retrieval dataset: name: MTEB CQADupstackAndroidRetrieval type: BeIR/cqadupstack config: default split: test revision: None metrics: - type: map_at_1 value: 32.164 - type: map_at_10 value: 42.641 - type: map_at_100 value: 43.947 - type: map_at_1000 value: 44.074999999999996 - type: map_at_3 value: 39.592 - type: map_at_5 value: 41.204 - type: mrr_at_1 value: 39.628 - type: mrr_at_10 value: 48.625 - type: mrr_at_100 value: 49.368 - type: mrr_at_1000 value: 49.413000000000004 - type: mrr_at_3 value: 46.400000000000006 - type: mrr_at_5 value: 47.68 - type: ndcg_at_1 value: 39.628 - type: ndcg_at_10 value: 48.564 - type: ndcg_at_100 value: 53.507000000000005 - type: ndcg_at_1000 value: 55.635999999999996 - type: ndcg_at_3 value: 44.471 - type: ndcg_at_5 value: 46.137 - type: precision_at_1 value: 39.628 - type: precision_at_10 value: 8.856 - type: precision_at_100 value: 1.429 - type: precision_at_1000 value: 0.191 - type: precision_at_3 value: 21.268 - type: precision_at_5 value: 14.649000000000001 - type: recall_at_1 value: 32.164 - type: recall_at_10 value: 59.609 - type: recall_at_100 value: 80.521 - type: recall_at_1000 value: 94.245 - type: recall_at_3 value: 46.521 - type: recall_at_5 value: 52.083999999999996 - type: map_at_1 value: 31.526 - type: map_at_10 value: 41.581 - type: map_at_100 value: 42.815999999999995 - type: map_at_1000 value: 42.936 - type: map_at_3 value: 38.605000000000004 - type: map_at_5 value: 40.351 - type: mrr_at_1 value: 39.489999999999995 - type: mrr_at_10 value: 47.829 - type: mrr_at_100 value: 48.512 - type: mrr_at_1000 value: 48.552 - type: mrr_at_3 value: 45.754 - type: mrr_at_5 value: 46.986 - type: ndcg_at_1 value: 39.489999999999995 - type: ndcg_at_10 value: 47.269 - type: ndcg_at_100 value: 51.564 - type: ndcg_at_1000 value: 53.53099999999999 - type: ndcg_at_3 value: 43.301 - type: ndcg_at_5 value: 45.239000000000004 - type: precision_at_1 value: 39.489999999999995 - type: precision_at_10 value: 8.93 - type: precision_at_100 value: 1.415 - type: precision_at_1000 value: 0.188 - type: precision_at_3 value: 20.892 - type: precision_at_5 value: 14.865999999999998 - type: recall_at_1 value: 31.526 - type: recall_at_10 value: 56.76 - type: recall_at_100 value: 75.029 - type: recall_at_1000 value: 87.491 - type: recall_at_3 value: 44.786 - type: recall_at_5 value: 50.254 - type: map_at_1 value: 40.987 - type: map_at_10 value: 52.827 - type: map_at_100 value: 53.751000000000005 - type: map_at_1000 value: 53.81 - type: map_at_3 value: 49.844 - type: map_at_5 value: 51.473 - type: mrr_at_1 value: 46.833999999999996 - type: mrr_at_10 value: 56.389 - type: mrr_at_100 value: 57.003 - type: mrr_at_1000 value: 57.034 - type: mrr_at_3 value: 54.17999999999999 - type: mrr_at_5 value: 55.486999999999995 - type: ndcg_at_1 value: 46.833999999999996 - type: ndcg_at_10 value: 58.372 - type: ndcg_at_100 value: 62.068 - type: ndcg_at_1000 value: 63.288 - type: ndcg_at_3 value: 53.400000000000006 - type: ndcg_at_5 value: 55.766000000000005 - type: precision_at_1 value: 46.833999999999996 - type: precision_at_10 value: 9.191 - type: precision_at_100 value: 1.192 - type: precision_at_1000 value: 0.134 - type: precision_at_3 value: 23.448 - type: precision_at_5 value: 15.862000000000002 - type: recall_at_1 value: 40.987 - type: recall_at_10 value: 71.146 - type: recall_at_100 value: 87.035 - type: recall_at_1000 value: 95.633 - type: recall_at_3 value: 58.025999999999996 - type: recall_at_5 value: 63.815999999999995 - type: map_at_1 value: 24.587 - type: map_at_10 value: 33.114 - type: map_at_100 value: 34.043 - type: map_at_1000 value: 34.123999999999995 - type: map_at_3 value: 30.45 - type: map_at_5 value: 31.813999999999997 - type: mrr_at_1 value: 26.554 - type: mrr_at_10 value: 35.148 - type: mrr_at_100 value: 35.926 - type: mrr_at_1000 value: 35.991 - type: mrr_at_3 value: 32.599000000000004 - type: mrr_at_5 value: 33.893 - type: ndcg_at_1 value: 26.554 - type: ndcg_at_10 value: 38.132 - type: ndcg_at_100 value: 42.78 - type: ndcg_at_1000 value: 44.919 - type: ndcg_at_3 value: 32.833 - type: ndcg_at_5 value: 35.168 - type: precision_at_1 value: 26.554 - type: precision_at_10 value: 5.921 - type: precision_at_100 value: 0.8659999999999999 - type: precision_at_1000 value: 0.109 - type: precision_at_3 value: 13.861 - type: precision_at_5 value: 9.605 - type: recall_at_1 value: 24.587 - type: recall_at_10 value: 51.690000000000005 - type: recall_at_100 value: 73.428 - type: recall_at_1000 value: 89.551 - type: recall_at_3 value: 37.336999999999996 - type: recall_at_5 value: 43.047000000000004 - type: map_at_1 value: 16.715 - type: map_at_10 value: 24.251 - type: map_at_100 value: 25.326999999999998 - type: map_at_1000 value: 25.455 - type: map_at_3 value: 21.912000000000003 - type: map_at_5 value: 23.257 - type: mrr_at_1 value: 20.274 - type: mrr_at_10 value: 28.552 - type: mrr_at_100 value: 29.42 - type: mrr_at_1000 value: 29.497 - type: mrr_at_3 value: 26.14 - type: mrr_at_5 value: 27.502 - type: ndcg_at_1 value: 20.274 - type: ndcg_at_10 value: 29.088 - type: ndcg_at_100 value: 34.293 - type: ndcg_at_1000 value: 37.271 - type: ndcg_at_3 value: 24.708 - type: ndcg_at_5 value: 26.809 - type: precision_at_1 value: 20.274 - type: precision_at_10 value: 5.361 - type: precision_at_100 value: 0.915 - type: precision_at_1000 value: 0.13 - type: precision_at_3 value: 11.733 - type: precision_at_5 value: 8.556999999999999 - type: recall_at_1 value: 16.715 - type: recall_at_10 value: 39.587 - type: recall_at_100 value: 62.336000000000006 - type: recall_at_1000 value: 83.453 - type: recall_at_3 value: 27.839999999999996 - type: recall_at_5 value: 32.952999999999996 - type: map_at_1 value: 28.793000000000003 - type: map_at_10 value: 38.582 - type: map_at_100 value: 39.881 - type: map_at_1000 value: 39.987 - type: map_at_3 value: 35.851 - type: map_at_5 value: 37.289 - type: mrr_at_1 value: 34.455999999999996 - type: mrr_at_10 value: 43.909 - type: mrr_at_100 value: 44.74 - type: mrr_at_1000 value: 44.786 - type: mrr_at_3 value: 41.659 - type: mrr_at_5 value: 43.010999999999996 - type: ndcg_at_1 value: 34.455999999999996 - type: ndcg_at_10 value: 44.266 - type: ndcg_at_100 value: 49.639 - type: ndcg_at_1000 value: 51.644 - type: ndcg_at_3 value: 39.865 - type: ndcg_at_5 value: 41.887 - type: precision_at_1 value: 34.455999999999996 - type: precision_at_10 value: 7.843999999999999 - type: precision_at_100 value: 1.243 - type: precision_at_1000 value: 0.158 - type: precision_at_3 value: 18.831999999999997 - type: precision_at_5 value: 13.147 - type: recall_at_1 value: 28.793000000000003 - type: recall_at_10 value: 55.68300000000001 - type: recall_at_100 value: 77.99000000000001 - type: recall_at_1000 value: 91.183 - type: recall_at_3 value: 43.293 - type: recall_at_5 value: 48.618 - type: map_at_1 value: 25.907000000000004 - type: map_at_10 value: 35.519 - type: map_at_100 value: 36.806 - type: map_at_1000 value: 36.912 - type: map_at_3 value: 32.748 - type: map_at_5 value: 34.232 - type: mrr_at_1 value: 31.621 - type: mrr_at_10 value: 40.687 - type: mrr_at_100 value: 41.583 - type: mrr_at_1000 value: 41.638999999999996 - type: mrr_at_3 value: 38.527 - type: mrr_at_5 value: 39.612 - type: ndcg_at_1 value: 31.621 - type: ndcg_at_10 value: 41.003 - type: ndcg_at_100 value: 46.617999999999995 - type: ndcg_at_1000 value: 48.82 - type: ndcg_at_3 value: 36.542 - type: ndcg_at_5 value: 38.368 - type: precision_at_1 value: 31.621 - type: precision_at_10 value: 7.396999999999999 - type: precision_at_100 value: 1.191 - type: precision_at_1000 value: 0.153 - type: precision_at_3 value: 17.39 - type: precision_at_5 value: 12.1 - type: recall_at_1 value: 25.907000000000004 - type: recall_at_10 value: 52.115 - type: recall_at_100 value: 76.238 - type: recall_at_1000 value: 91.218 - type: recall_at_3 value: 39.417 - type: recall_at_5 value: 44.435 - type: map_at_1 value: 25.732166666666668 - type: map_at_10 value: 34.51616666666667 - type: map_at_100 value: 35.67241666666666 - type: map_at_1000 value: 35.78675 - type: map_at_3 value: 31.953416666666662 - type: map_at_5 value: 33.333 - type: mrr_at_1 value: 30.300166666666673 - type: mrr_at_10 value: 38.6255 - type: mrr_at_100 value: 39.46183333333334 - type: mrr_at_1000 value: 39.519999999999996 - type: mrr_at_3 value: 36.41299999999999 - type: mrr_at_5 value: 37.6365 - type: ndcg_at_1 value: 30.300166666666673 - type: ndcg_at_10 value: 39.61466666666667 - type: ndcg_at_100 value: 44.60808333333334 - type: ndcg_at_1000 value: 46.91708333333334 - type: ndcg_at_3 value: 35.26558333333333 - type: ndcg_at_5 value: 37.220000000000006 - type: precision_at_1 value: 30.300166666666673 - type: precision_at_10 value: 6.837416666666667 - type: precision_at_100 value: 1.10425 - type: precision_at_1000 value: 0.14875 - type: precision_at_3 value: 16.13716666666667 - type: precision_at_5 value: 11.2815 - type: recall_at_1 value: 25.732166666666668 - type: recall_at_10 value: 50.578916666666665 - type: recall_at_100 value: 72.42183333333334 - type: recall_at_1000 value: 88.48766666666667 - type: recall_at_3 value: 38.41325 - type: recall_at_5 value: 43.515750000000004 - type: map_at_1 value: 23.951 - type: map_at_10 value: 30.974 - type: map_at_100 value: 31.804 - type: map_at_1000 value: 31.900000000000002 - type: map_at_3 value: 28.762 - type: map_at_5 value: 29.94 - type: mrr_at_1 value: 26.534000000000002 - type: mrr_at_10 value: 33.553 - type: mrr_at_100 value: 34.297 - type: mrr_at_1000 value: 34.36 - type: mrr_at_3 value: 31.391000000000002 - type: mrr_at_5 value: 32.525999999999996 - type: ndcg_at_1 value: 26.534000000000002 - type: ndcg_at_10 value: 35.112 - type: ndcg_at_100 value: 39.28 - type: ndcg_at_1000 value: 41.723 - type: ndcg_at_3 value: 30.902 - type: ndcg_at_5 value: 32.759 - type: precision_at_1 value: 26.534000000000002 - type: precision_at_10 value: 5.445 - type: precision_at_100 value: 0.819 - type: precision_at_1000 value: 0.11 - type: precision_at_3 value: 12.986 - type: precision_at_5 value: 9.049 - type: recall_at_1 value: 23.951 - type: recall_at_10 value: 45.24 - type: recall_at_100 value: 64.12299999999999 - type: recall_at_1000 value: 82.28999999999999 - type: recall_at_3 value: 33.806000000000004 - type: recall_at_5 value: 38.277 - type: map_at_1 value: 16.829 - type: map_at_10 value: 23.684 - type: map_at_100 value: 24.683 - type: map_at_1000 value: 24.81 - type: map_at_3 value: 21.554000000000002 - type: map_at_5 value: 22.768 - type: mrr_at_1 value: 20.096 - type: mrr_at_10 value: 27.230999999999998 - type: mrr_at_100 value: 28.083999999999996 - type: mrr_at_1000 value: 28.166000000000004 - type: mrr_at_3 value: 25.212 - type: mrr_at_5 value: 26.32 - type: ndcg_at_1 value: 20.096 - type: ndcg_at_10 value: 27.989000000000004 - type: ndcg_at_100 value: 32.847 - type: ndcg_at_1000 value: 35.896 - type: ndcg_at_3 value: 24.116 - type: ndcg_at_5 value: 25.964 - type: precision_at_1 value: 20.096 - type: precision_at_10 value: 5 - type: precision_at_100 value: 0.8750000000000001 - type: precision_at_1000 value: 0.131 - type: precision_at_3 value: 11.207 - type: precision_at_5 value: 8.08 - type: recall_at_1 value: 16.829 - type: recall_at_10 value: 37.407000000000004 - type: recall_at_100 value: 59.101000000000006 - type: recall_at_1000 value: 81.024 - type: recall_at_3 value: 26.739 - type: recall_at_5 value: 31.524 - type: map_at_1 value: 24.138 - type: map_at_10 value: 32.275999999999996 - type: map_at_100 value: 33.416000000000004 - type: map_at_1000 value: 33.527 - type: map_at_3 value: 29.854000000000003 - type: map_at_5 value: 31.096 - type: mrr_at_1 value: 28.450999999999997 - type: mrr_at_10 value: 36.214 - type: mrr_at_100 value: 37.134 - type: mrr_at_1000 value: 37.198 - type: mrr_at_3 value: 34.001999999999995 - type: mrr_at_5 value: 35.187000000000005 - type: ndcg_at_1 value: 28.450999999999997 - type: ndcg_at_10 value: 37.166 - type: ndcg_at_100 value: 42.454 - type: ndcg_at_1000 value: 44.976 - type: ndcg_at_3 value: 32.796 - type: ndcg_at_5 value: 34.631 - type: precision_at_1 value: 28.450999999999997 - type: precision_at_10 value: 6.241 - type: precision_at_100 value: 0.9950000000000001 - type: precision_at_1000 value: 0.133 - type: precision_at_3 value: 14.801 - type: precision_at_5 value: 10.280000000000001 - type: recall_at_1 value: 24.138 - type: recall_at_10 value: 48.111 - type: recall_at_100 value: 71.245 - type: recall_at_1000 value: 88.986 - type: recall_at_3 value: 36.119 - type: recall_at_5 value: 40.846 - type: map_at_1 value: 23.244 - type: map_at_10 value: 31.227 - type: map_at_100 value: 33.007 - type: map_at_1000 value: 33.223 - type: map_at_3 value: 28.924 - type: map_at_5 value: 30.017 - type: mrr_at_1 value: 27.668 - type: mrr_at_10 value: 35.524 - type: mrr_at_100 value: 36.699 - type: mrr_at_1000 value: 36.759 - type: mrr_at_3 value: 33.366 - type: mrr_at_5 value: 34.552 - type: ndcg_at_1 value: 27.668 - type: ndcg_at_10 value: 36.381 - type: ndcg_at_100 value: 43.062 - type: ndcg_at_1000 value: 45.656 - type: ndcg_at_3 value: 32.501999999999995 - type: ndcg_at_5 value: 34.105999999999995 - type: precision_at_1 value: 27.668 - type: precision_at_10 value: 6.798 - type: precision_at_100 value: 1.492 - type: precision_at_1000 value: 0.234 - type: precision_at_3 value: 15.152 - type: precision_at_5 value: 10.791 - type: recall_at_1 value: 23.244 - type: recall_at_10 value: 45.979 - type: recall_at_100 value: 74.822 - type: recall_at_1000 value: 91.078 - type: recall_at_3 value: 34.925 - type: recall_at_5 value: 39.126 - type: map_at_1 value: 19.945 - type: map_at_10 value: 27.517999999999997 - type: map_at_100 value: 28.588 - type: map_at_1000 value: 28.682000000000002 - type: map_at_3 value: 25.345000000000002 - type: map_at_5 value: 26.555 - type: mrr_at_1 value: 21.996 - type: mrr_at_10 value: 29.845 - type: mrr_at_100 value: 30.775999999999996 - type: mrr_at_1000 value: 30.845 - type: mrr_at_3 value: 27.726 - type: mrr_at_5 value: 28.882 - type: ndcg_at_1 value: 21.996 - type: ndcg_at_10 value: 32.034 - type: ndcg_at_100 value: 37.185 - type: ndcg_at_1000 value: 39.645 - type: ndcg_at_3 value: 27.750999999999998 - type: ndcg_at_5 value: 29.805999999999997 - type: precision_at_1 value: 21.996 - type: precision_at_10 value: 5.065 - type: precision_at_100 value: 0.819 - type: precision_at_1000 value: 0.11399999999999999 - type: precision_at_3 value: 12.076 - type: precision_at_5 value: 8.392 - type: recall_at_1 value: 19.945 - type: recall_at_10 value: 43.62 - type: recall_at_100 value: 67.194 - type: recall_at_1000 value: 85.7 - type: recall_at_3 value: 32.15 - type: recall_at_5 value: 37.208999999999996 - task: type: Retrieval dataset: name: MTEB ClimateFEVER type: climate-fever config: default split: test revision: None metrics: - type: map_at_1 value: 18.279 - type: map_at_10 value: 31.052999999999997 - type: map_at_100 value: 33.125 - type: map_at_1000 value: 33.306000000000004 - type: map_at_3 value: 26.208 - type: map_at_5 value: 28.857 - type: mrr_at_1 value: 42.671 - type: mrr_at_10 value: 54.557 - type: mrr_at_100 value: 55.142 - type: mrr_at_1000 value: 55.169000000000004 - type: mrr_at_3 value: 51.488 - type: mrr_at_5 value: 53.439 - type: ndcg_at_1 value: 42.671 - type: ndcg_at_10 value: 41.276 - type: ndcg_at_100 value: 48.376000000000005 - type: ndcg_at_1000 value: 51.318 - type: ndcg_at_3 value: 35.068 - type: ndcg_at_5 value: 37.242 - type: precision_at_1 value: 42.671 - type: precision_at_10 value: 12.638 - type: precision_at_100 value: 2.045 - type: precision_at_1000 value: 0.26 - type: precision_at_3 value: 26.08 - type: precision_at_5 value: 19.805 - type: recall_at_1 value: 18.279 - type: recall_at_10 value: 46.946 - type: recall_at_100 value: 70.97200000000001 - type: recall_at_1000 value: 87.107 - type: recall_at_3 value: 31.147999999999996 - type: recall_at_5 value: 38.099 - task: type: Retrieval dataset: name: MTEB DBPedia type: dbpedia-entity config: default split: test revision: None metrics: - type: map_at_1 value: 8.573 - type: map_at_10 value: 19.747 - type: map_at_100 value: 28.205000000000002 - type: map_at_1000 value: 29.831000000000003 - type: map_at_3 value: 14.109 - type: map_at_5 value: 16.448999999999998 - type: mrr_at_1 value: 71 - type: mrr_at_10 value: 77.68599999999999 - type: mrr_at_100 value: 77.995 - type: mrr_at_1000 value: 78.00200000000001 - type: mrr_at_3 value: 76.292 - type: mrr_at_5 value: 77.029 - type: ndcg_at_1 value: 59.12500000000001 - type: ndcg_at_10 value: 43.9 - type: ndcg_at_100 value: 47.863 - type: ndcg_at_1000 value: 54.848 - type: ndcg_at_3 value: 49.803999999999995 - type: ndcg_at_5 value: 46.317 - type: precision_at_1 value: 71 - type: precision_at_10 value: 34.4 - type: precision_at_100 value: 11.063 - type: precision_at_1000 value: 1.989 - type: precision_at_3 value: 52.333 - type: precision_at_5 value: 43.7 - type: recall_at_1 value: 8.573 - type: recall_at_10 value: 25.615 - type: recall_at_100 value: 53.385000000000005 - type: recall_at_1000 value: 75.46000000000001 - type: recall_at_3 value: 15.429 - type: recall_at_5 value: 19.357 - task: type: Classification dataset: name: MTEB EmotionClassification type: mteb/emotion config: default split: test revision: 4f58c6b202a23cf9a4da393831edf4f9183cad37 metrics: - type: accuracy value: 47.989999999999995 - type: f1 value: 42.776314451497555 - task: type: Retrieval dataset: name: MTEB FEVER type: fever config: default split: test revision: None metrics: - type: map_at_1 value: 74.13499999999999 - type: map_at_10 value: 82.825 - type: map_at_100 value: 83.096 - type: map_at_1000 value: 83.111 - type: map_at_3 value: 81.748 - type: map_at_5 value: 82.446 - type: mrr_at_1 value: 79.553 - type: mrr_at_10 value: 86.654 - type: mrr_at_100 value: 86.774 - type: mrr_at_1000 value: 86.778 - type: mrr_at_3 value: 85.981 - type: mrr_at_5 value: 86.462 - type: ndcg_at_1 value: 79.553 - type: ndcg_at_10 value: 86.345 - type: ndcg_at_100 value: 87.32 - type: ndcg_at_1000 value: 87.58200000000001 - type: ndcg_at_3 value: 84.719 - type: ndcg_at_5 value: 85.677 - type: precision_at_1 value: 79.553 - type: precision_at_10 value: 10.402000000000001 - type: precision_at_100 value: 1.1119999999999999 - type: precision_at_1000 value: 0.11499999999999999 - type: precision_at_3 value: 32.413 - type: precision_at_5 value: 20.138 - type: recall_at_1 value: 74.13499999999999 - type: recall_at_10 value: 93.215 - type: recall_at_100 value: 97.083 - type: recall_at_1000 value: 98.732 - type: recall_at_3 value: 88.79 - type: recall_at_5 value: 91.259 - task: type: Retrieval dataset: name: MTEB FiQA2018 type: fiqa config: default split: test revision: None metrics: - type: map_at_1 value: 18.298000000000002 - type: map_at_10 value: 29.901 - type: map_at_100 value: 31.528 - type: map_at_1000 value: 31.713 - type: map_at_3 value: 25.740000000000002 - type: map_at_5 value: 28.227999999999998 - type: mrr_at_1 value: 36.728 - type: mrr_at_10 value: 45.401 - type: mrr_at_100 value: 46.27 - type: mrr_at_1000 value: 46.315 - type: mrr_at_3 value: 42.978 - type: mrr_at_5 value: 44.29 - type: ndcg_at_1 value: 36.728 - type: ndcg_at_10 value: 37.456 - type: ndcg_at_100 value: 43.832 - type: ndcg_at_1000 value: 47 - type: ndcg_at_3 value: 33.694 - type: ndcg_at_5 value: 35.085 - type: precision_at_1 value: 36.728 - type: precision_at_10 value: 10.386 - type: precision_at_100 value: 1.701 - type: precision_at_1000 value: 0.22599999999999998 - type: precision_at_3 value: 22.479 - type: precision_at_5 value: 16.605 - type: recall_at_1 value: 18.298000000000002 - type: recall_at_10 value: 44.369 - type: recall_at_100 value: 68.098 - type: recall_at_1000 value: 87.21900000000001 - type: recall_at_3 value: 30.215999999999998 - type: recall_at_5 value: 36.861 - task: type: Retrieval dataset: name: MTEB HotpotQA type: hotpotqa config: default split: test revision: None metrics: - type: map_at_1 value: 39.568 - type: map_at_10 value: 65.061 - type: map_at_100 value: 65.896 - type: map_at_1000 value: 65.95100000000001 - type: map_at_3 value: 61.831 - type: map_at_5 value: 63.849000000000004 - type: mrr_at_1 value: 79.136 - type: mrr_at_10 value: 84.58200000000001 - type: mrr_at_100 value: 84.765 - type: mrr_at_1000 value: 84.772 - type: mrr_at_3 value: 83.684 - type: mrr_at_5 value: 84.223 - type: ndcg_at_1 value: 79.136 - type: ndcg_at_10 value: 72.622 - type: ndcg_at_100 value: 75.539 - type: ndcg_at_1000 value: 76.613 - type: ndcg_at_3 value: 68.065 - type: ndcg_at_5 value: 70.58 - type: precision_at_1 value: 79.136 - type: precision_at_10 value: 15.215 - type: precision_at_100 value: 1.7500000000000002 - type: precision_at_1000 value: 0.189 - type: precision_at_3 value: 44.011 - type: precision_at_5 value: 28.388999999999996 - type: recall_at_1 value: 39.568 - type: recall_at_10 value: 76.077 - type: recall_at_100 value: 87.481 - type: recall_at_1000 value: 94.56400000000001 - type: recall_at_3 value: 66.01599999999999 - type: recall_at_5 value: 70.97200000000001 - task: type: Classification dataset: name: MTEB ImdbClassification type: mteb/imdb config: default split: test revision: 3d86128a09e091d6018b6d26cad27f2739fc2db7 metrics: - type: accuracy value: 85.312 - type: ap value: 80.36296867333715 - type: f1 value: 85.26613311552218 - task: type: Retrieval dataset: name: MTEB MSMARCO type: msmarco config: default split: dev revision: None metrics: - type: map_at_1 value: 23.363999999999997 - type: map_at_10 value: 35.711999999999996 - type: map_at_100 value: 36.876999999999995 - type: map_at_1000 value: 36.923 - type: map_at_3 value: 32.034 - type: map_at_5 value: 34.159 - type: mrr_at_1 value: 24.04 - type: mrr_at_10 value: 36.345 - type: mrr_at_100 value: 37.441 - type: mrr_at_1000 value: 37.480000000000004 - type: mrr_at_3 value: 32.713 - type: mrr_at_5 value: 34.824 - type: ndcg_at_1 value: 24.026 - type: ndcg_at_10 value: 42.531 - type: ndcg_at_100 value: 48.081 - type: ndcg_at_1000 value: 49.213 - type: ndcg_at_3 value: 35.044 - type: ndcg_at_5 value: 38.834 - type: precision_at_1 value: 24.026 - type: precision_at_10 value: 6.622999999999999 - type: precision_at_100 value: 0.941 - type: precision_at_1000 value: 0.104 - type: precision_at_3 value: 14.909 - type: precision_at_5 value: 10.871 - type: recall_at_1 value: 23.363999999999997 - type: recall_at_10 value: 63.426 - type: recall_at_100 value: 88.96300000000001 - type: recall_at_1000 value: 97.637 - type: recall_at_3 value: 43.095 - type: recall_at_5 value: 52.178000000000004 - task: type: Classification dataset: name: MTEB MTOPDomainClassification (en) type: mteb/mtop_domain config: en split: test revision: d80d48c1eb48d3562165c59d59d0034df9fff0bf metrics: - type: accuracy value: 93.0095759233926 - type: f1 value: 92.78387794667408 - task: type: Classification dataset: name: MTEB MTOPIntentClassification (en) type: mteb/mtop_intent config: en split: test revision: ae001d0e6b1228650b7bd1c2c65fb50ad11a8aba metrics: - type: accuracy value: 75.0296397628819 - type: f1 value: 58.45699589820874 - task: type: Classification dataset: name: MTEB MassiveIntentClassification (en) type: mteb/amazon_massive_intent config: en split: test revision: 31efe3c427b0bae9c22cbb560b8f15491cc6bed7 metrics: - type: accuracy value: 73.45662407531944 - type: f1 value: 71.42364781421813 - task: type: Classification dataset: name: MTEB MassiveScenarioClassification (en) type: mteb/amazon_massive_scenario config: en split: test revision: 7d571f92784cd94a019292a1f45445077d0ef634 metrics: - type: accuracy value: 77.07800941492937 - type: f1 value: 77.22799045640845 - task: type: Clustering dataset: name: MTEB MedrxivClusteringP2P type: mteb/medrxiv-clustering-p2p config: default split: test revision: e7a26af6f3ae46b30dde8737f02c07b1505bcc73 metrics: - type: v_measure value: 34.531234379250606 - task: type: Clustering dataset: name: MTEB MedrxivClusteringS2S type: mteb/medrxiv-clustering-s2s config: default split: test revision: 35191c8c0dca72d8ff3efcd72aa802307d469663 metrics: - type: v_measure value: 30.941490381193802 - task: type: Reranking dataset: name: MTEB MindSmallReranking type: mteb/mind_small config: default split: test revision: 3bdac13927fdc888b903db93b2ffdbd90b295a69 metrics: - type: map value: 30.3115090856725 - type: mrr value: 31.290667638675757 - task: type: Retrieval dataset: name: MTEB NFCorpus type: nfcorpus config: default split: test revision: None metrics: - type: map_at_1 value: 5.465 - type: map_at_10 value: 13.03 - type: map_at_100 value: 16.057 - type: map_at_1000 value: 17.49 - type: map_at_3 value: 9.553 - type: map_at_5 value: 11.204 - type: mrr_at_1 value: 43.653 - type: mrr_at_10 value: 53.269 - type: mrr_at_100 value: 53.72 - type: mrr_at_1000 value: 53.761 - type: mrr_at_3 value: 50.929 - type: mrr_at_5 value: 52.461 - type: ndcg_at_1 value: 42.26 - type: ndcg_at_10 value: 34.673 - type: ndcg_at_100 value: 30.759999999999998 - type: ndcg_at_1000 value: 39.728 - type: ndcg_at_3 value: 40.349000000000004 - type: ndcg_at_5 value: 37.915 - type: precision_at_1 value: 43.653 - type: precision_at_10 value: 25.789 - type: precision_at_100 value: 7.754999999999999 - type: precision_at_1000 value: 2.07 - type: precision_at_3 value: 38.596000000000004 - type: precision_at_5 value: 33.251 - type: recall_at_1 value: 5.465 - type: recall_at_10 value: 17.148 - type: recall_at_100 value: 29.768 - type: recall_at_1000 value: 62.239 - type: recall_at_3 value: 10.577 - type: recall_at_5 value: 13.315 - task: type: Retrieval dataset: name: MTEB NQ type: nq config: default split: test revision: None metrics: - type: map_at_1 value: 37.008 - type: map_at_10 value: 52.467 - type: map_at_100 value: 53.342999999999996 - type: map_at_1000 value: 53.366 - type: map_at_3 value: 48.412 - type: map_at_5 value: 50.875 - type: mrr_at_1 value: 41.541 - type: mrr_at_10 value: 54.967 - type: mrr_at_100 value: 55.611 - type: mrr_at_1000 value: 55.627 - type: mrr_at_3 value: 51.824999999999996 - type: mrr_at_5 value: 53.763000000000005 - type: ndcg_at_1 value: 41.541 - type: ndcg_at_10 value: 59.724999999999994 - type: ndcg_at_100 value: 63.38700000000001 - type: ndcg_at_1000 value: 63.883 - type: ndcg_at_3 value: 52.331 - type: ndcg_at_5 value: 56.327000000000005 - type: precision_at_1 value: 41.541 - type: precision_at_10 value: 9.447 - type: precision_at_100 value: 1.1520000000000001 - type: precision_at_1000 value: 0.12 - type: precision_at_3 value: 23.262 - type: precision_at_5 value: 16.314999999999998 - type: recall_at_1 value: 37.008 - type: recall_at_10 value: 79.145 - type: recall_at_100 value: 94.986 - type: recall_at_1000 value: 98.607 - type: recall_at_3 value: 60.277 - type: recall_at_5 value: 69.407 - task: type: Retrieval dataset: name: MTEB QuoraRetrieval type: quora config: default split: test revision: None metrics: - type: map_at_1 value: 70.402 - type: map_at_10 value: 84.181 - type: map_at_100 value: 84.796 - type: map_at_1000 value: 84.81400000000001 - type: map_at_3 value: 81.209 - type: map_at_5 value: 83.085 - type: mrr_at_1 value: 81.02000000000001 - type: mrr_at_10 value: 87.263 - type: mrr_at_100 value: 87.36 - type: mrr_at_1000 value: 87.36 - type: mrr_at_3 value: 86.235 - type: mrr_at_5 value: 86.945 - type: ndcg_at_1 value: 81.01 - type: ndcg_at_10 value: 87.99900000000001 - type: ndcg_at_100 value: 89.217 - type: ndcg_at_1000 value: 89.33 - type: ndcg_at_3 value: 85.053 - type: ndcg_at_5 value: 86.703 - type: precision_at_1 value: 81.01 - type: precision_at_10 value: 13.336 - type: precision_at_100 value: 1.52 - type: precision_at_1000 value: 0.156 - type: precision_at_3 value: 37.14 - type: precision_at_5 value: 24.44 - type: recall_at_1 value: 70.402 - type: recall_at_10 value: 95.214 - type: recall_at_100 value: 99.438 - type: recall_at_1000 value: 99.928 - type: recall_at_3 value: 86.75699999999999 - type: recall_at_5 value: 91.44099999999999 - task: type: Clustering dataset: name: MTEB RedditClustering type: mteb/reddit-clustering config: default split: test revision: 24640382cdbf8abc73003fb0fa6d111a705499eb metrics: - type: v_measure value: 56.51721502758904 - task: type: Clustering dataset: name: MTEB RedditClusteringP2P type: mteb/reddit-clustering-p2p config: default split: test revision: 282350215ef01743dc01b456c7f5241fa8937f16 metrics: - type: v_measure value: 61.054808572333016 - task: type: Retrieval dataset: name: MTEB SCIDOCS type: scidocs config: default split: test revision: None metrics: - type: map_at_1 value: 4.578 - type: map_at_10 value: 11.036999999999999 - type: map_at_100 value: 12.879999999999999 - type: map_at_1000 value: 13.150999999999998 - type: map_at_3 value: 8.133 - type: map_at_5 value: 9.559 - type: mrr_at_1 value: 22.6 - type: mrr_at_10 value: 32.68 - type: mrr_at_100 value: 33.789 - type: mrr_at_1000 value: 33.854 - type: mrr_at_3 value: 29.7 - type: mrr_at_5 value: 31.480000000000004 - type: ndcg_at_1 value: 22.6 - type: ndcg_at_10 value: 18.616 - type: ndcg_at_100 value: 25.883 - type: ndcg_at_1000 value: 30.944 - type: ndcg_at_3 value: 18.136 - type: ndcg_at_5 value: 15.625 - type: precision_at_1 value: 22.6 - type: precision_at_10 value: 9.48 - type: precision_at_100 value: 1.991 - type: precision_at_1000 value: 0.321 - type: precision_at_3 value: 16.8 - type: precision_at_5 value: 13.54 - type: recall_at_1 value: 4.578 - type: recall_at_10 value: 19.213 - type: recall_at_100 value: 40.397 - type: recall_at_1000 value: 65.2 - type: recall_at_3 value: 10.208 - type: recall_at_5 value: 13.718 - task: type: STS dataset: name: MTEB SICK-R type: mteb/sickr-sts config: default split: test revision: a6ea5a8cab320b040a23452cc28066d9beae2cee metrics: - type: cos_sim_pearson value: 83.44288351714071 - type: cos_sim_spearman value: 79.37995604564952 - type: euclidean_pearson value: 81.1078874670718 - type: euclidean_spearman value: 79.37995905980499 - type: manhattan_pearson value: 81.03697527288986 - type: manhattan_spearman value: 79.33490235296236 - task: type: STS dataset: name: MTEB STS12 type: mteb/sts12-sts config: default split: test revision: a0d554a64d88156834ff5ae9920b964011b16384 metrics: - type: cos_sim_pearson value: 84.95557650436523 - type: cos_sim_spearman value: 78.5190672399868 - type: euclidean_pearson value: 81.58064025904707 - type: euclidean_spearman value: 78.5190672399868 - type: manhattan_pearson value: 81.52857930619889 - type: manhattan_spearman value: 78.50421361308034 - task: type: STS dataset: name: MTEB STS13 type: mteb/sts13-sts config: default split: test revision: 7e90230a92c190f1bf69ae9002b8cea547a64cca metrics: - type: cos_sim_pearson value: 84.79128416228737 - type: cos_sim_spearman value: 86.05402451477147 - type: euclidean_pearson value: 85.46280267054289 - type: euclidean_spearman value: 86.05402451477147 - type: manhattan_pearson value: 85.46278563858236 - type: manhattan_spearman value: 86.08079590861004 - task: type: STS dataset: name: MTEB STS14 type: mteb/sts14-sts config: default split: test revision: 6031580fec1f6af667f0bd2da0a551cf4f0b2375 metrics: - type: cos_sim_pearson value: 83.20623089568763 - type: cos_sim_spearman value: 81.53786907061009 - type: euclidean_pearson value: 82.82272250091494 - type: euclidean_spearman value: 81.53786907061009 - type: manhattan_pearson value: 82.78850494027013 - type: manhattan_spearman value: 81.5135618083407 - task: type: STS dataset: name: MTEB STS15 type: mteb/sts15-sts config: default split: test revision: ae752c7c21bf194d8b67fd573edf7ae58183cbe3 metrics: - type: cos_sim_pearson value: 85.46366618397936 - type: cos_sim_spearman value: 86.96566013336908 - type: euclidean_pearson value: 86.62651697548931 - type: euclidean_spearman value: 86.96565526364454 - type: manhattan_pearson value: 86.58812160258009 - type: manhattan_spearman value: 86.9336484321288 - task: type: STS dataset: name: MTEB STS16 type: mteb/sts16-sts config: default split: test revision: 4d8694f8f0e0100860b497b999b3dbed754a0513 metrics: - type: cos_sim_pearson value: 82.51858358641559 - type: cos_sim_spearman value: 84.7652527954999 - type: euclidean_pearson value: 84.23914783766861 - type: euclidean_spearman value: 84.7652527954999 - type: manhattan_pearson value: 84.22749648503171 - type: manhattan_spearman value: 84.74527996746386 - task: type: STS dataset: name: MTEB STS17 (en-en) type: mteb/sts17-crosslingual-sts config: en-en split: test revision: af5e6fb845001ecf41f4c1e033ce921939a2a68d metrics: - type: cos_sim_pearson value: 87.28026563313065 - type: cos_sim_spearman value: 87.46928143824915 - type: euclidean_pearson value: 88.30558762000372 - type: euclidean_spearman value: 87.46928143824915 - type: manhattan_pearson value: 88.10513330809331 - type: manhattan_spearman value: 87.21069787834173 - task: type: STS dataset: name: MTEB STS22 (en) type: mteb/sts22-crosslingual-sts config: en split: test revision: 6d1ba47164174a496b7fa5d3569dae26a6813b80 metrics: - type: cos_sim_pearson value: 62.376497134587375 - type: cos_sim_spearman value: 65.0159550112516 - type: euclidean_pearson value: 65.64572120879598 - type: euclidean_spearman value: 65.0159550112516 - type: manhattan_pearson value: 65.88143604989976 - type: manhattan_spearman value: 65.17547297222434 - task: type: STS dataset: name: MTEB STSBenchmark type: mteb/stsbenchmark-sts config: default split: test revision: b0fddb56ed78048fa8b90373c8a3cfc37b684831 metrics: - type: cos_sim_pearson value: 84.22876368947644 - type: cos_sim_spearman value: 85.46935577445318 - type: euclidean_pearson value: 85.32830231392005 - type: euclidean_spearman value: 85.46935577445318 - type: manhattan_pearson value: 85.30353211758495 - type: manhattan_spearman value: 85.42821085956945 - task: type: Reranking dataset: name: MTEB SciDocsRR type: mteb/scidocs-reranking config: default split: test revision: d3c5e1fc0b855ab6097bf1cda04dd73947d7caab metrics: - type: map value: 80.60986667767133 - type: mrr value: 94.29432314236236 - task: type: Retrieval dataset: name: MTEB SciFact type: scifact config: default split: test revision: None metrics: - type: map_at_1 value: 54.528 - type: map_at_10 value: 65.187 - type: map_at_100 value: 65.62599999999999 - type: map_at_1000 value: 65.657 - type: map_at_3 value: 62.352 - type: map_at_5 value: 64.025 - type: mrr_at_1 value: 57.333 - type: mrr_at_10 value: 66.577 - type: mrr_at_100 value: 66.88 - type: mrr_at_1000 value: 66.908 - type: mrr_at_3 value: 64.556 - type: mrr_at_5 value: 65.739 - type: ndcg_at_1 value: 57.333 - type: ndcg_at_10 value: 70.275 - type: ndcg_at_100 value: 72.136 - type: ndcg_at_1000 value: 72.963 - type: ndcg_at_3 value: 65.414 - type: ndcg_at_5 value: 67.831 - type: precision_at_1 value: 57.333 - type: precision_at_10 value: 9.5 - type: precision_at_100 value: 1.057 - type: precision_at_1000 value: 0.11199999999999999 - type: precision_at_3 value: 25.778000000000002 - type: precision_at_5 value: 17.2 - type: recall_at_1 value: 54.528 - type: recall_at_10 value: 84.356 - type: recall_at_100 value: 92.833 - type: recall_at_1000 value: 99.333 - type: recall_at_3 value: 71.283 - type: recall_at_5 value: 77.14999999999999 - task: type: PairClassification dataset: name: MTEB SprintDuplicateQuestions type: mteb/sprintduplicatequestions-pairclassification config: default split: test revision: d66bd1f72af766a5cc4b0ca5e00c162f89e8cc46 metrics: - type: cos_sim_accuracy value: 99.74158415841585 - type: cos_sim_ap value: 92.90048959850317 - type: cos_sim_f1 value: 86.35650810245687 - type: cos_sim_precision value: 90.4709748083242 - type: cos_sim_recall value: 82.6 - type: dot_accuracy value: 99.74158415841585 - type: dot_ap value: 92.90048959850317 - type: dot_f1 value: 86.35650810245687 - type: dot_precision value: 90.4709748083242 - type: dot_recall value: 82.6 - type: euclidean_accuracy value: 99.74158415841585 - type: euclidean_ap value: 92.90048959850317 - type: euclidean_f1 value: 86.35650810245687 - type: euclidean_precision value: 90.4709748083242 - type: euclidean_recall value: 82.6 - type: manhattan_accuracy value: 99.74158415841585 - type: manhattan_ap value: 92.87344692947894 - type: manhattan_f1 value: 86.38497652582159 - type: manhattan_precision value: 90.29443838604145 - type: manhattan_recall value: 82.8 - type: max_accuracy value: 99.74158415841585 - type: max_ap value: 92.90048959850317 - type: max_f1 value: 86.38497652582159 - task: type: Clustering dataset: name: MTEB StackExchangeClustering type: mteb/stackexchange-clustering config: default split: test revision: 6cbc1f7b2bc0622f2e39d2c77fa502909748c259 metrics: - type: v_measure value: 63.191648770424216 - task: type: Clustering dataset: name: MTEB StackExchangeClusteringP2P type: mteb/stackexchange-clustering-p2p config: default split: test revision: 815ca46b2622cec33ccafc3735d572c266efdb44 metrics: - type: v_measure value: 34.02944668730218 - task: type: Reranking dataset: name: MTEB StackOverflowDupQuestions type: mteb/stackoverflowdupquestions-reranking config: default split: test revision: e185fbe320c72810689fc5848eb6114e1ef5ec69 metrics: - type: map value: 50.466386167525265 - type: mrr value: 51.19071492233257 - task: type: Summarization dataset: name: MTEB SummEval type: mteb/summeval config: default split: test revision: cda12ad7615edc362dbf25a00fdd61d3b1eaf93c metrics: - type: cos_sim_pearson value: 30.198022505886435 - type: cos_sim_spearman value: 30.40170257939193 - type: dot_pearson value: 30.198015316402614 - type: dot_spearman value: 30.40170257939193 - task: type: Retrieval dataset: name: MTEB TRECCOVID type: trec-covid config: default split: test revision: None metrics: - type: map_at_1 value: 0.242 - type: map_at_10 value: 2.17 - type: map_at_100 value: 12.221 - type: map_at_1000 value: 28.63 - type: map_at_3 value: 0.728 - type: map_at_5 value: 1.185 - type: mrr_at_1 value: 94 - type: mrr_at_10 value: 97 - type: mrr_at_100 value: 97 - type: mrr_at_1000 value: 97 - type: mrr_at_3 value: 97 - type: mrr_at_5 value: 97 - type: ndcg_at_1 value: 89 - type: ndcg_at_10 value: 82.30499999999999 - type: ndcg_at_100 value: 61.839999999999996 - type: ndcg_at_1000 value: 53.381 - type: ndcg_at_3 value: 88.877 - type: ndcg_at_5 value: 86.05199999999999 - type: precision_at_1 value: 94 - type: precision_at_10 value: 87 - type: precision_at_100 value: 63.38 - type: precision_at_1000 value: 23.498 - type: precision_at_3 value: 94 - type: precision_at_5 value: 92 - type: recall_at_1 value: 0.242 - type: recall_at_10 value: 2.302 - type: recall_at_100 value: 14.979000000000001 - type: recall_at_1000 value: 49.638 - type: recall_at_3 value: 0.753 - type: recall_at_5 value: 1.226 - task: type: Retrieval dataset: name: MTEB Touche2020 type: webis-touche2020 config: default split: test revision: None metrics: - type: map_at_1 value: 3.006 - type: map_at_10 value: 11.805 - type: map_at_100 value: 18.146 - type: map_at_1000 value: 19.788 - type: map_at_3 value: 5.914 - type: map_at_5 value: 8.801 - type: mrr_at_1 value: 40.816 - type: mrr_at_10 value: 56.36600000000001 - type: mrr_at_100 value: 56.721999999999994 - type: mrr_at_1000 value: 56.721999999999994 - type: mrr_at_3 value: 52.041000000000004 - type: mrr_at_5 value: 54.796 - type: ndcg_at_1 value: 37.755 - type: ndcg_at_10 value: 29.863 - type: ndcg_at_100 value: 39.571 - type: ndcg_at_1000 value: 51.385999999999996 - type: ndcg_at_3 value: 32.578 - type: ndcg_at_5 value: 32.351 - type: precision_at_1 value: 40.816 - type: precision_at_10 value: 26.531 - type: precision_at_100 value: 7.796 - type: precision_at_1000 value: 1.555 - type: precision_at_3 value: 32.653 - type: precision_at_5 value: 33.061 - type: recall_at_1 value: 3.006 - type: recall_at_10 value: 18.738 - type: recall_at_100 value: 48.058 - type: recall_at_1000 value: 83.41300000000001 - type: recall_at_3 value: 7.166 - type: recall_at_5 value: 12.102 - task: type: Classification dataset: name: MTEB ToxicConversationsClassification type: mteb/toxic_conversations_50k config: default split: test revision: d7c0de2777da35d6aae2200a62c6e0e5af397c4c metrics: - type: accuracy value: 71.4178 - type: ap value: 14.648781342150446 - type: f1 value: 55.07299194946378 - task: type: Classification dataset: name: MTEB TweetSentimentExtractionClassification type: mteb/tweet_sentiment_extraction config: default split: test revision: d604517c81ca91fe16a244d1248fc021f9ecee7a metrics: - type: accuracy value: 60.919637804187886 - type: f1 value: 61.24122013967399 - task: type: Clustering dataset: name: MTEB TwentyNewsgroupsClustering type: mteb/twentynewsgroups-clustering config: default split: test revision: 6125ec4e24fa026cec8a478383ee943acfbd5449 metrics: - type: v_measure value: 49.207896583685695 - task: type: PairClassification dataset: name: MTEB TwitterSemEval2015 type: mteb/twittersemeval2015-pairclassification config: default split: test revision: 70970daeab8776df92f5ea462b6173c0b46fd2d1 metrics: - type: cos_sim_accuracy value: 86.23114978840078 - type: cos_sim_ap value: 74.26624727825818 - type: cos_sim_f1 value: 68.72377190817083 - type: cos_sim_precision value: 64.56400742115028 - type: cos_sim_recall value: 73.45646437994723 - type: dot_accuracy value: 86.23114978840078 - type: dot_ap value: 74.26624032659652 - type: dot_f1 value: 68.72377190817083 - type: dot_precision value: 64.56400742115028 - type: dot_recall value: 73.45646437994723 - type: euclidean_accuracy value: 86.23114978840078 - type: euclidean_ap value: 74.26624714480556 - type: euclidean_f1 value: 68.72377190817083 - type: euclidean_precision value: 64.56400742115028 - type: euclidean_recall value: 73.45646437994723 - type: manhattan_accuracy value: 86.16558383501221 - type: manhattan_ap value: 74.2091943976357 - type: manhattan_f1 value: 68.64221520524654 - type: manhattan_precision value: 63.59135913591359 - type: manhattan_recall value: 74.5646437994723 - type: max_accuracy value: 86.23114978840078 - type: max_ap value: 74.26624727825818 - type: max_f1 value: 68.72377190817083 - task: type: PairClassification dataset: name: MTEB TwitterURLCorpus type: mteb/twitterurlcorpus-pairclassification config: default split: test revision: 8b6510b0b1fa4e4c4f879467980e9be563ec1cdf metrics: - type: cos_sim_accuracy value: 89.3681841114604 - type: cos_sim_ap value: 86.65166387498546 - type: cos_sim_f1 value: 79.02581944698774 - type: cos_sim_precision value: 75.35796605434099 - type: cos_sim_recall value: 83.06898675700647 - type: dot_accuracy value: 89.3681841114604 - type: dot_ap value: 86.65166019802056 - type: dot_f1 value: 79.02581944698774 - type: dot_precision value: 75.35796605434099 - type: dot_recall value: 83.06898675700647 - type: euclidean_accuracy value: 89.3681841114604 - type: euclidean_ap value: 86.65166462876266 - type: euclidean_f1 value: 79.02581944698774 - type: euclidean_precision value: 75.35796605434099 - type: euclidean_recall value: 83.06898675700647 - type: manhattan_accuracy value: 89.36624364497226 - type: manhattan_ap value: 86.65076471274106 - type: manhattan_f1 value: 79.07408783532733 - type: manhattan_precision value: 76.41102972856527 - type: manhattan_recall value: 81.92947336002464 - type: max_accuracy value: 89.3681841114604 - type: max_ap value: 86.65166462876266 - type: max_f1 value: 79.07408783532733 --- # msyukorai/nomic-embed-text-v1.5-Q4_0-GGUF This model was converted to GGUF format from [`nomic-ai/nomic-embed-text-v1.5`](https://huggingface.co/nomic-ai/nomic-embed-text-v1.5) using llama.cpp via the ggml.ai's [GGUF-my-repo](https://huggingface.co/spaces/ggml-org/gguf-my-repo) space. Refer to the [original model card](https://huggingface.co/nomic-ai/nomic-embed-text-v1.5) for more details on the model. ## Use with llama.cpp Install llama.cpp through brew (works on Mac and Linux) ```bash brew install llama.cpp ``` Invoke the llama.cpp server or the CLI. ### CLI: ```bash llama-cli --hf-repo msyukorai/nomic-embed-text-v1.5-Q4_0-GGUF --hf-file nomic-embed-text-v1.5-q4_0.gguf -p "The meaning to life and the universe is" ``` ### Server: ```bash llama-server --hf-repo msyukorai/nomic-embed-text-v1.5-Q4_0-GGUF --hf-file nomic-embed-text-v1.5-q4_0.gguf -c 2048 ``` Note: You can also use this checkpoint directly through the [usage steps](https://github.com/ggerganov/llama.cpp?tab=readme-ov-file#usage) listed in the Llama.cpp repo as well. Step 1: Clone llama.cpp from GitHub. ``` git clone https://github.com/ggerganov/llama.cpp ``` Step 2: Move into the llama.cpp folder and build it with `LLAMA_CURL=1` flag along with other hardware-specific flags (for ex: LLAMA_CUDA=1 for Nvidia GPUs on Linux). ``` cd llama.cpp && LLAMA_CURL=1 make ``` Step 3: Run inference through the main binary. ``` ./llama-cli --hf-repo msyukorai/nomic-embed-text-v1.5-Q4_0-GGUF --hf-file nomic-embed-text-v1.5-q4_0.gguf -p "The meaning to life and the universe is" ``` or ``` ./llama-server --hf-repo msyukorai/nomic-embed-text-v1.5-Q4_0-GGUF --hf-file nomic-embed-text-v1.5-q4_0.gguf -c 2048 ```
[ "SUMMARIZATION" ]
[ "BIOSSES", "SCIFACT" ]
TheBloke/UNAversal-8x7B-v1beta-GGUF
TheBloke
null
[ "transformers", "gguf", "mixtral", "UNA", "juanako", "MoE", "en", "base_model:fblgit/UNAversal-8x7B-v1beta", "base_model:quantized:fblgit/UNAversal-8x7B-v1beta", "license:cc-by-nc-sa-4.0", "region:us", "conversational" ]
2023-12-30T17:02:58
2023-12-30T17:22:29
175
1
--- base_model: fblgit/UNAversal-8x7B-v1beta language: - en library_name: transformers license: cc-by-nc-sa-4.0 model_name: UNAversal 8X7B v1Beta tags: - UNA - juanako - mixtral - MoE inference: false model_creator: FBL model_type: mixtral prompt_template: '{prompt} ' quantized_by: TheBloke --- <!-- markdownlint-disable MD041 --> <!-- header start --> <!-- 200823 --> <div style="width: auto; margin-left: auto; margin-right: auto"> <img src="https://i.imgur.com/EBdldam.jpg" alt="TheBlokeAI" style="width: 100%; min-width: 400px; display: block; margin: auto;"> </div> <div style="display: flex; justify-content: space-between; width: 100%;"> <div style="display: flex; flex-direction: column; align-items: flex-start;"> <p style="margin-top: 0.5em; margin-bottom: 0em;"><a href="https://discord.gg/theblokeai">Chat & support: TheBloke's Discord server</a></p> </div> <div style="display: flex; flex-direction: column; align-items: flex-end;"> <p style="margin-top: 0.5em; margin-bottom: 0em;"><a href="https://www.patreon.com/TheBlokeAI">Want to contribute? TheBloke's Patreon page</a></p> </div> </div> <div style="text-align:center; margin-top: 0em; margin-bottom: 0em"><p style="margin-top: 0.25em; margin-bottom: 0em;">TheBloke's LLM work is generously supported by a grant from <a href="https://a16z.com">andreessen horowitz (a16z)</a></p></div> <hr style="margin-top: 1.0em; margin-bottom: 1.0em;"> <!-- header end --> # UNAversal 8X7B v1Beta - GGUF - Model creator: [FBL](https://huggingface.co/fblgit) - Original model: [UNAversal 8X7B v1Beta](https://huggingface.co/fblgit/UNAversal-8x7B-v1beta) <!-- description start --> ## Description This repo contains GGUF format model files for [FBL's UNAversal 8X7B v1Beta](https://huggingface.co/fblgit/UNAversal-8x7B-v1beta). These files were quantised using hardware kindly provided by [Massed Compute](https://massedcompute.com/). <!-- description end --> <!-- README_GGUF.md-about-gguf start --> ### About GGUF GGUF is a new format introduced by the llama.cpp team on August 21st 2023. It is a replacement for GGML, which is no longer supported by llama.cpp. Here is an incomplete list of clients and libraries that are known to support GGUF: * [llama.cpp](https://github.com/ggerganov/llama.cpp). The source project for GGUF. Offers a CLI and a server option. * [text-generation-webui](https://github.com/oobabooga/text-generation-webui), the most widely used web UI, with many features and powerful extensions. Supports GPU acceleration. * [KoboldCpp](https://github.com/LostRuins/koboldcpp), a fully featured web UI, with GPU accel across all platforms and GPU architectures. Especially good for story telling. * [GPT4All](https://gpt4all.io/index.html), a free and open source local running GUI, supporting Windows, Linux and macOS with full GPU accel. * [LM Studio](https://lmstudio.ai/), an easy-to-use and powerful local GUI for Windows and macOS (Silicon), with GPU acceleration. Linux available, in beta as of 27/11/2023. * [LoLLMS Web UI](https://github.com/ParisNeo/lollms-webui), a great web UI with many interesting and unique features, including a full model library for easy model selection. * [Faraday.dev](https://faraday.dev/), an attractive and easy to use character-based chat GUI for Windows and macOS (both Silicon and Intel), with GPU acceleration. * [llama-cpp-python](https://github.com/abetlen/llama-cpp-python), a Python library with GPU accel, LangChain support, and OpenAI-compatible API server. * [candle](https://github.com/huggingface/candle), a Rust ML framework with a focus on performance, including GPU support, and ease of use. * [ctransformers](https://github.com/marella/ctransformers), a Python library with GPU accel, LangChain support, and OpenAI-compatible AI server. Note, as of time of writing (November 27th 2023), ctransformers has not been updated in a long time and does not support many recent models. <!-- README_GGUF.md-about-gguf end --> <!-- repositories-available start --> ## Repositories available * [AWQ model(s) for GPU inference.](https://huggingface.co/TheBloke/UNAversal-8x7B-v1beta-AWQ) * [GPTQ models for GPU inference, with multiple quantisation parameter options.](https://huggingface.co/TheBloke/UNAversal-8x7B-v1beta-GPTQ) * [2, 3, 4, 5, 6 and 8-bit GGUF models for CPU+GPU inference](https://huggingface.co/TheBloke/UNAversal-8x7B-v1beta-GGUF) * [FBL's original unquantised fp16 model in pytorch format, for GPU inference and for further conversions](https://huggingface.co/fblgit/UNAversal-8x7B-v1beta) <!-- repositories-available end --> <!-- prompt-template start --> ## Prompt template: Unknown ``` {prompt} ``` <!-- prompt-template end --> <!-- compatibility_gguf start --> ## Compatibility These quantised GGUFv2 files are compatible with llama.cpp from August 27th onwards, as of commit [d0cee0d](https://github.com/ggerganov/llama.cpp/commit/d0cee0d36d5be95a0d9088b674dbb27354107221) They are also compatible with many third party UIs and libraries - please see the list at the top of this README. ## Explanation of quantisation methods <details> <summary>Click to see details</summary> The new methods available are: * GGML_TYPE_Q2_K - "type-1" 2-bit quantization in super-blocks containing 16 blocks, each block having 16 weight. Block scales and mins are quantized with 4 bits. This ends up effectively using 2.5625 bits per weight (bpw) * GGML_TYPE_Q3_K - "type-0" 3-bit quantization in super-blocks containing 16 blocks, each block having 16 weights. Scales are quantized with 6 bits. This end up using 3.4375 bpw. * GGML_TYPE_Q4_K - "type-1" 4-bit quantization in super-blocks containing 8 blocks, each block having 32 weights. Scales and mins are quantized with 6 bits. This ends up using 4.5 bpw. * GGML_TYPE_Q5_K - "type-1" 5-bit quantization. Same super-block structure as GGML_TYPE_Q4_K resulting in 5.5 bpw * GGML_TYPE_Q6_K - "type-0" 6-bit quantization. Super-blocks with 16 blocks, each block having 16 weights. Scales are quantized with 8 bits. This ends up using 6.5625 bpw Refer to the Provided Files table below to see what files use which methods, and how. </details> <!-- compatibility_gguf end --> <!-- README_GGUF.md-provided-files start --> ## Provided files | Name | Quant method | Bits | Size | Max RAM required | Use case | | ---- | ---- | ---- | ---- | ---- | ----- | | [unaversal-8x7b-v1beta.Q2_K.gguf](https://huggingface.co/TheBloke/UNAversal-8x7B-v1beta-GGUF/blob/main/unaversal-8x7b-v1beta.Q2_K.gguf) | Q2_K | 2 | 15.64 GB| 18.14 GB | smallest, significant quality loss - not recommended for most purposes | | [unaversal-8x7b-v1beta.Q3_K_M.gguf](https://huggingface.co/TheBloke/UNAversal-8x7B-v1beta-GGUF/blob/main/unaversal-8x7b-v1beta.Q3_K_M.gguf) | Q3_K_M | 3 | 20.36 GB| 22.86 GB | very small, high quality loss | | [unaversal-8x7b-v1beta.Q4_0.gguf](https://huggingface.co/TheBloke/UNAversal-8x7B-v1beta-GGUF/blob/main/unaversal-8x7b-v1beta.Q4_0.gguf) | Q4_0 | 4 | 26.44 GB| 28.94 GB | legacy; small, very high quality loss - prefer using Q3_K_M | | [unaversal-8x7b-v1beta.Q4_K_M.gguf](https://huggingface.co/TheBloke/UNAversal-8x7B-v1beta-GGUF/blob/main/unaversal-8x7b-v1beta.Q4_K_M.gguf) | Q4_K_M | 4 | 26.44 GB| 28.94 GB | medium, balanced quality - recommended | | [unaversal-8x7b-v1beta.Q5_0.gguf](https://huggingface.co/TheBloke/UNAversal-8x7B-v1beta-GGUF/blob/main/unaversal-8x7b-v1beta.Q5_0.gguf) | Q5_0 | 5 | 32.23 GB| 34.73 GB | legacy; medium, balanced quality - prefer using Q4_K_M | | [unaversal-8x7b-v1beta.Q5_K_M.gguf](https://huggingface.co/TheBloke/UNAversal-8x7B-v1beta-GGUF/blob/main/unaversal-8x7b-v1beta.Q5_K_M.gguf) | Q5_K_M | 5 | 32.23 GB| 34.73 GB | large, very low quality loss - recommended | | [unaversal-8x7b-v1beta.Q6_K.gguf](https://huggingface.co/TheBloke/UNAversal-8x7B-v1beta-GGUF/blob/main/unaversal-8x7b-v1beta.Q6_K.gguf) | Q6_K | 6 | 38.38 GB| 40.88 GB | very large, extremely low quality loss | | [unaversal-8x7b-v1beta.Q8_0.gguf](https://huggingface.co/TheBloke/UNAversal-8x7B-v1beta-GGUF/blob/main/unaversal-8x7b-v1beta.Q8_0.gguf) | Q8_0 | 8 | 49.62 GB| 52.12 GB | very large, extremely low quality loss - not recommended | **Note**: the above RAM figures assume no GPU offloading. If layers are offloaded to the GPU, this will reduce RAM usage and use VRAM instead. <!-- README_GGUF.md-provided-files end --> <!-- README_GGUF.md-how-to-download start --> ## How to download GGUF files **Note for manual downloaders:** You almost never want to clone the entire repo! Multiple different quantisation formats are provided, and most users only want to pick and download a single file. The following clients/libraries will automatically download models for you, providing a list of available models to choose from: * LM Studio * LoLLMS Web UI * Faraday.dev ### In `text-generation-webui` Under Download Model, you can enter the model repo: TheBloke/UNAversal-8x7B-v1beta-GGUF and below it, a specific filename to download, such as: unaversal-8x7b-v1beta.Q4_K_M.gguf. Then click Download. ### On the command line, including multiple files at once I recommend using the `huggingface-hub` Python library: ```shell pip3 install huggingface-hub ``` Then you can download any individual model file to the current directory, at high speed, with a command like this: ```shell huggingface-cli download TheBloke/UNAversal-8x7B-v1beta-GGUF unaversal-8x7b-v1beta.Q4_K_M.gguf --local-dir . --local-dir-use-symlinks False ``` <details> <summary>More advanced huggingface-cli download usage (click to read)</summary> You can also download multiple files at once with a pattern: ```shell huggingface-cli download TheBloke/UNAversal-8x7B-v1beta-GGUF --local-dir . --local-dir-use-symlinks False --include='*Q4_K*gguf' ``` For more documentation on downloading with `huggingface-cli`, please see: [HF -> Hub Python Library -> Download files -> Download from the CLI](https://huggingface.co/docs/huggingface_hub/guides/download#download-from-the-cli). To accelerate downloads on fast connections (1Gbit/s or higher), install `hf_transfer`: ```shell pip3 install hf_transfer ``` And set environment variable `HF_HUB_ENABLE_HF_TRANSFER` to `1`: ```shell HF_HUB_ENABLE_HF_TRANSFER=1 huggingface-cli download TheBloke/UNAversal-8x7B-v1beta-GGUF unaversal-8x7b-v1beta.Q4_K_M.gguf --local-dir . --local-dir-use-symlinks False ``` Windows Command Line users: You can set the environment variable by running `set HF_HUB_ENABLE_HF_TRANSFER=1` before the download command. </details> <!-- README_GGUF.md-how-to-download end --> <!-- README_GGUF.md-how-to-run start --> ## Example `llama.cpp` command Make sure you are using `llama.cpp` from commit [d0cee0d](https://github.com/ggerganov/llama.cpp/commit/d0cee0d36d5be95a0d9088b674dbb27354107221) or later. ```shell ./main -ngl 35 -m unaversal-8x7b-v1beta.Q4_K_M.gguf --color -c 32768 --temp 0.7 --repeat_penalty 1.1 -n -1 -p "{prompt}" ``` Change `-ngl 32` to the number of layers to offload to GPU. Remove it if you don't have GPU acceleration. Change `-c 32768` to the desired sequence length. For extended sequence models - eg 8K, 16K, 32K - the necessary RoPE scaling parameters are read from the GGUF file and set by llama.cpp automatically. Note that longer sequence lengths require much more resources, so you may need to reduce this value. If you want to have a chat-style conversation, replace the `-p <PROMPT>` argument with `-i -ins` For other parameters and how to use them, please refer to [the llama.cpp documentation](https://github.com/ggerganov/llama.cpp/blob/master/examples/main/README.md) ## How to run in `text-generation-webui` Further instructions can be found in the text-generation-webui documentation, here: [text-generation-webui/docs/04 ‐ Model Tab.md](https://github.com/oobabooga/text-generation-webui/blob/main/docs/04%20%E2%80%90%20Model%20Tab.md#llamacpp). ## How to run from Python code You can use GGUF models from Python using the [llama-cpp-python](https://github.com/abetlen/llama-cpp-python) or [ctransformers](https://github.com/marella/ctransformers) libraries. Note that at the time of writing (Nov 27th 2023), ctransformers has not been updated for some time and is not compatible with some recent models. Therefore I recommend you use llama-cpp-python. ### How to load this model in Python code, using llama-cpp-python For full documentation, please see: [llama-cpp-python docs](https://abetlen.github.io/llama-cpp-python/). #### First install the package Run one of the following commands, according to your system: ```shell # Base ctransformers with no GPU acceleration pip install llama-cpp-python # With NVidia CUDA acceleration CMAKE_ARGS="-DLLAMA_CUBLAS=on" pip install llama-cpp-python # Or with OpenBLAS acceleration CMAKE_ARGS="-DLLAMA_BLAS=ON -DLLAMA_BLAS_VENDOR=OpenBLAS" pip install llama-cpp-python # Or with CLBLast acceleration CMAKE_ARGS="-DLLAMA_CLBLAST=on" pip install llama-cpp-python # Or with AMD ROCm GPU acceleration (Linux only) CMAKE_ARGS="-DLLAMA_HIPBLAS=on" pip install llama-cpp-python # Or with Metal GPU acceleration for macOS systems only CMAKE_ARGS="-DLLAMA_METAL=on" pip install llama-cpp-python # In windows, to set the variables CMAKE_ARGS in PowerShell, follow this format; eg for NVidia CUDA: $env:CMAKE_ARGS = "-DLLAMA_OPENBLAS=on" pip install llama-cpp-python ``` #### Simple llama-cpp-python example code ```python from llama_cpp import Llama # Set gpu_layers to the number of layers to offload to GPU. Set to 0 if no GPU acceleration is available on your system. llm = Llama( model_path="./unaversal-8x7b-v1beta.Q4_K_M.gguf", # Download the model file first n_ctx=32768, # The max sequence length to use - note that longer sequence lengths require much more resources n_threads=8, # The number of CPU threads to use, tailor to your system and the resulting performance n_gpu_layers=35 # The number of layers to offload to GPU, if you have GPU acceleration available ) # Simple inference example output = llm( "{prompt}", # Prompt max_tokens=512, # Generate up to 512 tokens stop=["</s>"], # Example stop token - not necessarily correct for this specific model! Please check before using. echo=True # Whether to echo the prompt ) # Chat Completion API llm = Llama(model_path="./unaversal-8x7b-v1beta.Q4_K_M.gguf", chat_format="llama-2") # Set chat_format according to the model you are using llm.create_chat_completion( messages = [ {"role": "system", "content": "You are a story writing assistant."}, { "role": "user", "content": "Write a story about llamas." } ] ) ``` ## How to use with LangChain Here are guides on using llama-cpp-python and ctransformers with LangChain: * [LangChain + llama-cpp-python](https://python.langchain.com/docs/integrations/llms/llamacpp) * [LangChain + ctransformers](https://python.langchain.com/docs/integrations/providers/ctransformers) <!-- README_GGUF.md-how-to-run end --> <!-- footer start --> <!-- 200823 --> ## Discord For further support, and discussions on these models and AI in general, join us at: [TheBloke AI's Discord server](https://discord.gg/theblokeai) ## Thanks, and how to contribute Thanks to the [chirper.ai](https://chirper.ai) team! Thanks to Clay from [gpus.llm-utils.org](llm-utils)! I've had a lot of people ask if they can contribute. I enjoy providing models and helping people, and would love to be able to spend even more time doing it, as well as expanding into new projects like fine tuning/training. If you're able and willing to contribute it will be most gratefully received and will help me to keep providing more models, and to start work on new AI projects. Donaters will get priority support on any and all AI/LLM/model questions and requests, access to a private Discord room, plus other benefits. * Patreon: https://patreon.com/TheBlokeAI * Ko-Fi: https://ko-fi.com/TheBlokeAI **Special thanks to**: Aemon Algiz. **Patreon special mentions**: Michael Levine, 阿明, Trailburnt, Nikolai Manek, John Detwiler, Randy H, Will Dee, Sebastain Graf, NimbleBox.ai, Eugene Pentland, Emad Mostaque, Ai Maven, Jim Angel, Jeff Scroggin, Michael Davis, Manuel Alberto Morcote, Stephen Murray, Robert, Justin Joy, Luke @flexchar, Brandon Frisco, Elijah Stavena, S_X, Dan Guido, Undi ., Komninos Chatzipapas, Shadi, theTransient, Lone Striker, Raven Klaugh, jjj, Cap'n Zoog, Michel-Marie MAUDET (LINAGORA), Matthew Berman, David, Fen Risland, Omer Bin Jawed, Luke Pendergrass, Kalila, OG, Erik Bjäreholt, Rooh Singh, Joseph William Delisle, Dan Lewis, TL, John Villwock, AzureBlack, Brad, Pedro Madruga, Caitlyn Gatomon, K, jinyuan sun, Mano Prime, Alex, Jeffrey Morgan, Alicia Loh, Illia Dulskyi, Chadd, transmissions 11, fincy, Rainer Wilmers, ReadyPlayerEmma, knownsqashed, Mandus, biorpg, Deo Leter, Brandon Phillips, SuperWojo, Sean Connelly, Iucharbius, Jack West, Harry Royden McLaughlin, Nicholas, terasurfer, Vitor Caleffi, Duane Dunston, Johann-Peter Hartmann, David Ziegler, Olakabola, Ken Nordquist, Trenton Dambrowitz, Tom X Nguyen, Vadim, Ajan Kanaga, Leonard Tan, Clay Pascal, Alexandros Triantafyllidis, JM33133, Xule, vamX, ya boyyy, subjectnull, Talal Aujan, Alps Aficionado, wassieverse, Ari Malik, James Bentley, Woland, Spencer Kim, Michael Dempsey, Fred von Graf, Elle, zynix, William Richards, Stanislav Ovsiannikov, Edmond Seymore, Jonathan Leane, Martin Kemka, usrbinkat, Enrico Ros Thank you to all my generous patrons and donaters! And thank you again to a16z for their generous grant. <!-- footer end --> <!-- original-model-card start --> # Original model card: FBL's UNAversal 8X7B v1Beta # UNAversal - Uniform Neural Alignment (MoE) This is just a beta, a first release so people can start working on franksteins and so. It does achieve high GSM/Math and TQA, so ideally you can merge it with other mixtrals and see what coming out of it Based on [mistralai/Mixtral-8x7B-Instruct-v0.1](https://huggingface.co/mistralai/Mixtral-8x7B-Instruct-v0.1) ## UNA Details For this model we came out with the most obvious, placing UNA on the router_logit. It does work, but we saw a much better performance on SFT by doing so. So this model DOES have UNA-SFT phase, its highly experimental and it was merely using LLaMA-Factory datasets by example alpaca. As the others: - Can be finetuned further, try 2e-5 or **1e-4 (since its MOE)** - Can be merged, here you will have to improvise and please report findings on a discussion thread. **REMINDER**: please.. cite, it does help on the research and the lab itself, seriously. ## NEED YOUR HELP!! I need a multi-turn trainloop for the Mixtral, that can squeeze the juice out of 8xH100's properly. Please feel free to reach @fblgit either discord or twitter. thanks! # Evals Here there are some, but we also submitted it to the HF eval queue.... ## GSM8k 5-Shot ``` |Tasks|Version| Filter |n-shot| Metric |Value | |Stderr| |-----|-------|----------|-----:|-----------|-----:|---|-----:| |gsm8k|Yaml |get-answer| 5|exact_match|0.6603|± | 0.013| ``` ## ARC 25-Shot ``` | Tasks |Version|Filter|n-shot| Metric |Value | |Stderr| |-------------|-------|------|-----:|--------|-----:|---|-----:| |arc_challenge|Yaml |none | 25|acc |0.6621|± |0.0138| | | |none | 25|acc_norm|0.6962|± |0.0134| ``` ## TruthfulQA 0-Shot (MC2) ``` | Tasks |Version|Filter|n-shot|Metric|Value | |Stderr| |--------------|-------|------|-----:|------|-----:|---|-----:| |truthfulqa_mc2|Yaml |none | 0|acc |0.7122|± |0.0141| ``` ## 0-Shots Evals ``` | Tasks |Version|Filter|n-shot| Metric |Value | |Stderr| |--------------|-------|------|-----:|----------|-----:|---|-----:| |arc_challenge |Yaml |none | 0|acc |0.6101|± |0.0143| | | |none | 0|acc_norm |0.6425|± |0.0140| |arc_easy |Yaml |none | 0|acc |0.8615|± |0.0071| | | |none | 0|acc_norm |0.8375|± |0.0076| |boolq |Yaml |none | 0|acc |0.8624|± |0.0060| |lambada_openai|Yaml |none | 0|perplexity|2.8318|± |0.0507| | | |none | 0|acc |0.7650|± |0.0059| |mathqa |Yaml |none | 0|acc |0.4472|± |0.0091| | | |none | 0|acc_norm |0.4436|± |0.0091| |piqa |Yaml |none | 0|acc |0.8292|± |0.0088| | | |none | 0|acc_norm |0.8422|± |0.0085| |pubmedqa |Yaml |none | 0|acc |0.7920|± |0.0182| |sciq |Yaml |none | 0|acc |0.9630|± |0.0060| | | |none | 0|acc_norm |0.9370|± |0.0077| ``` ## BBH at 0-Shot ``` vllm (pretrained=fblgit/UNAversal-8x7B-v1beta,tensor_parallel_size=2,data_parallel_size=4,gpu_memory_utilization=0.8,dtype=float16), gen_kwargs: (None), limit: None, num_fewshot: 0, batch_size: auto | Tasks |Version| Filter |n-shot| Metric |Value | |Stderr| |----------------------------------------------------------|-------|----------|-----:|-----------|-----:|---|-----:| |bbh |N/A |get-answer| 0|exact_match|0.6752|± |0.1772| | - bbh_cot_fewshot_boolean_expressions |Yaml |get-answer| 0|exact_match|0.8840|± |0.0203| | - bbh_cot_fewshot_causal_judgement |Yaml |get-answer| 0|exact_match|0.6417|± |0.0352| | - bbh_cot_fewshot_date_understanding |Yaml |get-answer| 0|exact_match|0.7600|± |0.0271| | - bbh_cot_fewshot_disambiguation_qa |Yaml |get-answer| 0|exact_match|0.7160|± |0.0286| | - bbh_cot_fewshot_dyck_languages |Yaml |get-answer| 0|exact_match|0.1800|± |0.0243| | - bbh_cot_fewshot_formal_fallacies |Yaml |get-answer| 0|exact_match|0.6520|± |0.0302| | - bbh_cot_fewshot_geometric_shapes |Yaml |get-answer| 0|exact_match|0.3880|± |0.0309| | - bbh_cot_fewshot_hyperbaton |Yaml |get-answer| 0|exact_match|0.9600|± |0.0124| | - bbh_cot_fewshot_logical_deduction_five_objects |Yaml |get-answer| 0|exact_match|0.5360|± |0.0316| | - bbh_cot_fewshot_logical_deduction_seven_objects |Yaml |get-answer| 0|exact_match|0.5040|± |0.0317| | - bbh_cot_fewshot_logical_deduction_three_objects |Yaml |get-answer| 0|exact_match|0.8600|± |0.0220| | - bbh_cot_fewshot_movie_recommendation |Yaml |get-answer| 0|exact_match|0.7840|± |0.0261| | - bbh_cot_fewshot_multistep_arithmetic_two |Yaml |get-answer| 0|exact_match|0.6600|± |0.0300| | - bbh_cot_fewshot_navigate |Yaml |get-answer| 0|exact_match|0.8160|± |0.0246| | - bbh_cot_fewshot_object_counting |Yaml |get-answer| 0|exact_match|0.8360|± |0.0235| | - bbh_cot_fewshot_penguins_in_a_table |Yaml |get-answer| 0|exact_match|0.7329|± |0.0367| | - bbh_cot_fewshot_reasoning_about_colored_objects |Yaml |get-answer| 0|exact_match|0.8120|± |0.0248| | - bbh_cot_fewshot_ruin_names |Yaml |get-answer| 0|exact_match|0.4440|± |0.0315| | - bbh_cot_fewshot_salient_translation_error_detection |Yaml |get-answer| 0|exact_match|0.5200|± |0.0317| | - bbh_cot_fewshot_snarks |Yaml |get-answer| 0|exact_match|0.7135|± |0.0340| | - bbh_cot_fewshot_sports_understanding |Yaml |get-answer| 0|exact_match|0.9400|± |0.0151| | - bbh_cot_fewshot_temporal_sequences |Yaml |get-answer| 0|exact_match|0.7560|± |0.0272| | - bbh_cot_fewshot_tracking_shuffled_objects_five_objects |Yaml |get-answer| 0|exact_match|0.5680|± |0.0314| | - bbh_cot_fewshot_tracking_shuffled_objects_seven_objects|Yaml |get-answer| 0|exact_match|0.6280|± |0.0306| | - bbh_cot_fewshot_tracking_shuffled_objects_three_objects|Yaml |get-answer| 0|exact_match|0.6280|± |0.0306| | - bbh_cot_fewshot_web_of_lies |Yaml |get-answer| 0|exact_match|0.9560|± |0.0130| | - bbh_cot_fewshot_word_sorting |Yaml |get-answer| 0|exact_match|0.3800|± |0.0308| |Groups|Version| Filter |n-shot| Metric |Value | |Stderr| |------|-------|----------|-----:|-----------|-----:|---|-----:| |bbh |N/A |get-answer| 0|exact_match|0.6752|± |0.1772| ``` <!-- original-model-card end -->
[ "TRANSLATION" ]
[ "PUBMEDQA", "SCIQ" ]
arkohut/jina-embeddings-v2-base-en
arkohut
feature-extraction
[ "sentence-transformers", "safetensors", "bert", "feature-extraction", "sentence-similarity", "mteb", "custom_code", "en", "dataset:allenai/c4", "arxiv:2108.12409", "arxiv:2310.19923", "license:apache-2.0", "model-index", "autotrain_compatible", "text-embeddings-inference", "region:us" ]
2024-11-19T17:18:48
2024-11-19T17:20:45
173
0
--- datasets: - allenai/c4 language: en license: apache-2.0 tags: - sentence-transformers - feature-extraction - sentence-similarity - mteb inference: false model-index: - name: jina-embedding-b-en-v2 results: - task: type: Classification dataset: name: MTEB AmazonCounterfactualClassification (en) type: mteb/amazon_counterfactual config: en split: test revision: e8379541af4e31359cca9fbcf4b00f2671dba205 metrics: - type: accuracy value: 74.73134328358209 - type: ap value: 37.765427081831035 - type: f1 value: 68.79367444339518 - task: type: Classification dataset: name: MTEB AmazonPolarityClassification type: mteb/amazon_polarity config: default split: test revision: e2d317d38cd51312af73b3d32a06d1a08b442046 metrics: - type: accuracy value: 88.544275 - type: ap value: 84.61328675662887 - type: f1 value: 88.51879035862375 - task: type: Classification dataset: name: MTEB AmazonReviewsClassification (en) type: mteb/amazon_reviews_multi config: en split: test revision: 1399c76144fd37290681b995c656ef9b2e06e26d metrics: - type: accuracy value: 45.263999999999996 - type: f1 value: 43.778759656699435 - task: type: Retrieval dataset: name: MTEB ArguAna type: arguana config: default split: test revision: None metrics: - type: map_at_1 value: 21.693 - type: map_at_10 value: 35.487 - type: map_at_100 value: 36.862 - type: map_at_1000 value: 36.872 - type: map_at_3 value: 30.049999999999997 - type: map_at_5 value: 32.966 - type: mrr_at_1 value: 21.977 - type: mrr_at_10 value: 35.565999999999995 - type: mrr_at_100 value: 36.948 - type: mrr_at_1000 value: 36.958 - type: mrr_at_3 value: 30.121 - type: mrr_at_5 value: 33.051 - type: ndcg_at_1 value: 21.693 - type: ndcg_at_10 value: 44.181 - type: ndcg_at_100 value: 49.982 - type: ndcg_at_1000 value: 50.233000000000004 - type: ndcg_at_3 value: 32.830999999999996 - type: ndcg_at_5 value: 38.080000000000005 - type: precision_at_1 value: 21.693 - type: precision_at_10 value: 7.248 - type: precision_at_100 value: 0.9769999999999999 - type: precision_at_1000 value: 0.1 - type: precision_at_3 value: 13.632 - type: precision_at_5 value: 10.725 - type: recall_at_1 value: 21.693 - type: recall_at_10 value: 72.475 - type: recall_at_100 value: 97.653 - type: recall_at_1000 value: 99.57300000000001 - type: recall_at_3 value: 40.896 - type: recall_at_5 value: 53.627 - task: type: Clustering dataset: name: MTEB ArxivClusteringP2P type: mteb/arxiv-clustering-p2p config: default split: test revision: a122ad7f3f0291bf49cc6f4d32aa80929df69d5d metrics: - type: v_measure value: 45.39242428696777 - task: type: Clustering dataset: name: MTEB ArxivClusteringS2S type: mteb/arxiv-clustering-s2s config: default split: test revision: f910caf1a6075f7329cdf8c1a6135696f37dbd53 metrics: - type: v_measure value: 36.675626784714 - task: type: Reranking dataset: name: MTEB AskUbuntuDupQuestions type: mteb/askubuntudupquestions-reranking config: default split: test revision: 2000358ca161889fa9c082cb41daa8dcfb161a54 metrics: - type: map value: 62.247725694904034 - type: mrr value: 74.91359978894604 - task: type: STS dataset: name: MTEB BIOSSES type: mteb/biosses-sts config: default split: test revision: d3fb88f8f02e40887cd149695127462bbcf29b4a metrics: - type: cos_sim_pearson value: 82.68003802970496 - type: cos_sim_spearman value: 81.23438110096286 - type: euclidean_pearson value: 81.87462986142582 - type: euclidean_spearman value: 81.23438110096286 - type: manhattan_pearson value: 81.61162566600755 - type: manhattan_spearman value: 81.11329400456184 - task: type: Classification dataset: name: MTEB Banking77Classification type: mteb/banking77 config: default split: test revision: 0fd18e25b25c072e09e0d92ab615fda904d66300 metrics: - type: accuracy value: 84.01298701298701 - type: f1 value: 83.31690714969382 - task: type: Clustering dataset: name: MTEB BiorxivClusteringP2P type: mteb/biorxiv-clustering-p2p config: default split: test revision: 65b79d1d13f80053f67aca9498d9402c2d9f1f40 metrics: - type: v_measure value: 37.050108150972086 - task: type: Clustering dataset: name: MTEB BiorxivClusteringS2S type: mteb/biorxiv-clustering-s2s config: default split: test revision: 258694dd0231531bc1fd9de6ceb52a0853c6d908 metrics: - type: v_measure value: 30.15731442819715 - task: type: Retrieval dataset: name: MTEB CQADupstackAndroidRetrieval type: BeIR/cqadupstack config: default split: test revision: None metrics: - type: map_at_1 value: 31.391999999999996 - type: map_at_10 value: 42.597 - type: map_at_100 value: 44.07 - type: map_at_1000 value: 44.198 - type: map_at_3 value: 38.957 - type: map_at_5 value: 40.961 - type: mrr_at_1 value: 37.196 - type: mrr_at_10 value: 48.152 - type: mrr_at_100 value: 48.928 - type: mrr_at_1000 value: 48.964999999999996 - type: mrr_at_3 value: 45.446 - type: mrr_at_5 value: 47.205999999999996 - type: ndcg_at_1 value: 37.196 - type: ndcg_at_10 value: 49.089 - type: ndcg_at_100 value: 54.471000000000004 - type: ndcg_at_1000 value: 56.385 - type: ndcg_at_3 value: 43.699 - type: ndcg_at_5 value: 46.22 - type: precision_at_1 value: 37.196 - type: precision_at_10 value: 9.313 - type: precision_at_100 value: 1.478 - type: precision_at_1000 value: 0.198 - type: precision_at_3 value: 20.839 - type: precision_at_5 value: 14.936 - type: recall_at_1 value: 31.391999999999996 - type: recall_at_10 value: 61.876 - type: recall_at_100 value: 84.214 - type: recall_at_1000 value: 95.985 - type: recall_at_3 value: 46.6 - type: recall_at_5 value: 53.588 - type: map_at_1 value: 29.083 - type: map_at_10 value: 38.812999999999995 - type: map_at_100 value: 40.053 - type: map_at_1000 value: 40.188 - type: map_at_3 value: 36.111 - type: map_at_5 value: 37.519000000000005 - type: mrr_at_1 value: 36.497 - type: mrr_at_10 value: 44.85 - type: mrr_at_100 value: 45.546 - type: mrr_at_1000 value: 45.593 - type: mrr_at_3 value: 42.686 - type: mrr_at_5 value: 43.909 - type: ndcg_at_1 value: 36.497 - type: ndcg_at_10 value: 44.443 - type: ndcg_at_100 value: 48.979 - type: ndcg_at_1000 value: 51.154999999999994 - type: ndcg_at_3 value: 40.660000000000004 - type: ndcg_at_5 value: 42.193000000000005 - type: precision_at_1 value: 36.497 - type: precision_at_10 value: 8.433 - type: precision_at_100 value: 1.369 - type: precision_at_1000 value: 0.185 - type: precision_at_3 value: 19.894000000000002 - type: precision_at_5 value: 13.873 - type: recall_at_1 value: 29.083 - type: recall_at_10 value: 54.313 - type: recall_at_100 value: 73.792 - type: recall_at_1000 value: 87.629 - type: recall_at_3 value: 42.257 - type: recall_at_5 value: 47.066 - type: map_at_1 value: 38.556000000000004 - type: map_at_10 value: 50.698 - type: map_at_100 value: 51.705 - type: map_at_1000 value: 51.768 - type: map_at_3 value: 47.848 - type: map_at_5 value: 49.358000000000004 - type: mrr_at_1 value: 43.95 - type: mrr_at_10 value: 54.191 - type: mrr_at_100 value: 54.852999999999994 - type: mrr_at_1000 value: 54.885 - type: mrr_at_3 value: 51.954 - type: mrr_at_5 value: 53.13 - type: ndcg_at_1 value: 43.95 - type: ndcg_at_10 value: 56.516 - type: ndcg_at_100 value: 60.477000000000004 - type: ndcg_at_1000 value: 61.746 - type: ndcg_at_3 value: 51.601 - type: ndcg_at_5 value: 53.795 - type: precision_at_1 value: 43.95 - type: precision_at_10 value: 9.009 - type: precision_at_100 value: 1.189 - type: precision_at_1000 value: 0.135 - type: precision_at_3 value: 22.989 - type: precision_at_5 value: 15.473 - type: recall_at_1 value: 38.556000000000004 - type: recall_at_10 value: 70.159 - type: recall_at_100 value: 87.132 - type: recall_at_1000 value: 96.16 - type: recall_at_3 value: 56.906 - type: recall_at_5 value: 62.332 - type: map_at_1 value: 24.238 - type: map_at_10 value: 32.5 - type: map_at_100 value: 33.637 - type: map_at_1000 value: 33.719 - type: map_at_3 value: 30.026999999999997 - type: map_at_5 value: 31.555 - type: mrr_at_1 value: 26.328000000000003 - type: mrr_at_10 value: 34.44 - type: mrr_at_100 value: 35.455999999999996 - type: mrr_at_1000 value: 35.521 - type: mrr_at_3 value: 32.034 - type: mrr_at_5 value: 33.565 - type: ndcg_at_1 value: 26.328000000000003 - type: ndcg_at_10 value: 37.202 - type: ndcg_at_100 value: 42.728 - type: ndcg_at_1000 value: 44.792 - type: ndcg_at_3 value: 32.368 - type: ndcg_at_5 value: 35.008 - type: precision_at_1 value: 26.328000000000003 - type: precision_at_10 value: 5.7059999999999995 - type: precision_at_100 value: 0.8880000000000001 - type: precision_at_1000 value: 0.11100000000000002 - type: precision_at_3 value: 13.672 - type: precision_at_5 value: 9.74 - type: recall_at_1 value: 24.238 - type: recall_at_10 value: 49.829 - type: recall_at_100 value: 75.21 - type: recall_at_1000 value: 90.521 - type: recall_at_3 value: 36.867 - type: recall_at_5 value: 43.241 - type: map_at_1 value: 15.378 - type: map_at_10 value: 22.817999999999998 - type: map_at_100 value: 23.977999999999998 - type: map_at_1000 value: 24.108 - type: map_at_3 value: 20.719 - type: map_at_5 value: 21.889 - type: mrr_at_1 value: 19.03 - type: mrr_at_10 value: 27.022000000000002 - type: mrr_at_100 value: 28.011999999999997 - type: mrr_at_1000 value: 28.096 - type: mrr_at_3 value: 24.855 - type: mrr_at_5 value: 26.029999999999998 - type: ndcg_at_1 value: 19.03 - type: ndcg_at_10 value: 27.526 - type: ndcg_at_100 value: 33.040000000000006 - type: ndcg_at_1000 value: 36.187000000000005 - type: ndcg_at_3 value: 23.497 - type: ndcg_at_5 value: 25.334 - type: precision_at_1 value: 19.03 - type: precision_at_10 value: 4.963 - type: precision_at_100 value: 0.893 - type: precision_at_1000 value: 0.13 - type: precision_at_3 value: 11.360000000000001 - type: precision_at_5 value: 8.134 - type: recall_at_1 value: 15.378 - type: recall_at_10 value: 38.061 - type: recall_at_100 value: 61.754 - type: recall_at_1000 value: 84.259 - type: recall_at_3 value: 26.788 - type: recall_at_5 value: 31.326999999999998 - type: map_at_1 value: 27.511999999999997 - type: map_at_10 value: 37.429 - type: map_at_100 value: 38.818000000000005 - type: map_at_1000 value: 38.924 - type: map_at_3 value: 34.625 - type: map_at_5 value: 36.064 - type: mrr_at_1 value: 33.300999999999995 - type: mrr_at_10 value: 43.036 - type: mrr_at_100 value: 43.894 - type: mrr_at_1000 value: 43.936 - type: mrr_at_3 value: 40.825 - type: mrr_at_5 value: 42.028 - type: ndcg_at_1 value: 33.300999999999995 - type: ndcg_at_10 value: 43.229 - type: ndcg_at_100 value: 48.992000000000004 - type: ndcg_at_1000 value: 51.02100000000001 - type: ndcg_at_3 value: 38.794000000000004 - type: ndcg_at_5 value: 40.65 - type: precision_at_1 value: 33.300999999999995 - type: precision_at_10 value: 7.777000000000001 - type: precision_at_100 value: 1.269 - type: precision_at_1000 value: 0.163 - type: precision_at_3 value: 18.351 - type: precision_at_5 value: 12.762 - type: recall_at_1 value: 27.511999999999997 - type: recall_at_10 value: 54.788000000000004 - type: recall_at_100 value: 79.105 - type: recall_at_1000 value: 92.49199999999999 - type: recall_at_3 value: 41.924 - type: recall_at_5 value: 47.026 - type: map_at_1 value: 24.117 - type: map_at_10 value: 33.32 - type: map_at_100 value: 34.677 - type: map_at_1000 value: 34.78 - type: map_at_3 value: 30.233999999999998 - type: map_at_5 value: 31.668000000000003 - type: mrr_at_1 value: 29.566 - type: mrr_at_10 value: 38.244 - type: mrr_at_100 value: 39.245000000000005 - type: mrr_at_1000 value: 39.296 - type: mrr_at_3 value: 35.864000000000004 - type: mrr_at_5 value: 36.919999999999995 - type: ndcg_at_1 value: 29.566 - type: ndcg_at_10 value: 39.127 - type: ndcg_at_100 value: 44.989000000000004 - type: ndcg_at_1000 value: 47.189 - type: ndcg_at_3 value: 34.039 - type: ndcg_at_5 value: 35.744 - type: precision_at_1 value: 29.566 - type: precision_at_10 value: 7.385999999999999 - type: precision_at_100 value: 1.204 - type: precision_at_1000 value: 0.158 - type: precision_at_3 value: 16.286 - type: precision_at_5 value: 11.484 - type: recall_at_1 value: 24.117 - type: recall_at_10 value: 51.559999999999995 - type: recall_at_100 value: 77.104 - type: recall_at_1000 value: 91.79899999999999 - type: recall_at_3 value: 36.82 - type: recall_at_5 value: 41.453 - type: map_at_1 value: 25.17625 - type: map_at_10 value: 34.063916666666664 - type: map_at_100 value: 35.255500000000005 - type: map_at_1000 value: 35.37275 - type: map_at_3 value: 31.351666666666667 - type: map_at_5 value: 32.80608333333333 - type: mrr_at_1 value: 29.59783333333333 - type: mrr_at_10 value: 38.0925 - type: mrr_at_100 value: 38.957249999999995 - type: mrr_at_1000 value: 39.01608333333333 - type: mrr_at_3 value: 35.77625 - type: mrr_at_5 value: 37.04991666666667 - type: ndcg_at_1 value: 29.59783333333333 - type: ndcg_at_10 value: 39.343666666666664 - type: ndcg_at_100 value: 44.488249999999994 - type: ndcg_at_1000 value: 46.83358333333334 - type: ndcg_at_3 value: 34.69708333333333 - type: ndcg_at_5 value: 36.75075 - type: precision_at_1 value: 29.59783333333333 - type: precision_at_10 value: 6.884083333333332 - type: precision_at_100 value: 1.114 - type: precision_at_1000 value: 0.15108333333333332 - type: precision_at_3 value: 15.965250000000003 - type: precision_at_5 value: 11.246500000000001 - type: recall_at_1 value: 25.17625 - type: recall_at_10 value: 51.015999999999984 - type: recall_at_100 value: 73.60174999999998 - type: recall_at_1000 value: 89.849 - type: recall_at_3 value: 37.88399999999999 - type: recall_at_5 value: 43.24541666666666 - type: map_at_1 value: 24.537 - type: map_at_10 value: 31.081999999999997 - type: map_at_100 value: 32.042 - type: map_at_1000 value: 32.141 - type: map_at_3 value: 29.137 - type: map_at_5 value: 30.079 - type: mrr_at_1 value: 27.454 - type: mrr_at_10 value: 33.694 - type: mrr_at_100 value: 34.579 - type: mrr_at_1000 value: 34.649 - type: mrr_at_3 value: 32.004 - type: mrr_at_5 value: 32.794000000000004 - type: ndcg_at_1 value: 27.454 - type: ndcg_at_10 value: 34.915 - type: ndcg_at_100 value: 39.641 - type: ndcg_at_1000 value: 42.105 - type: ndcg_at_3 value: 31.276 - type: ndcg_at_5 value: 32.65 - type: precision_at_1 value: 27.454 - type: precision_at_10 value: 5.337 - type: precision_at_100 value: 0.8250000000000001 - type: precision_at_1000 value: 0.11199999999999999 - type: precision_at_3 value: 13.241 - type: precision_at_5 value: 8.895999999999999 - type: recall_at_1 value: 24.537 - type: recall_at_10 value: 44.324999999999996 - type: recall_at_100 value: 65.949 - type: recall_at_1000 value: 84.017 - type: recall_at_3 value: 33.857 - type: recall_at_5 value: 37.316 - type: map_at_1 value: 17.122 - type: map_at_10 value: 24.32 - type: map_at_100 value: 25.338 - type: map_at_1000 value: 25.462 - type: map_at_3 value: 22.064 - type: map_at_5 value: 23.322000000000003 - type: mrr_at_1 value: 20.647 - type: mrr_at_10 value: 27.858 - type: mrr_at_100 value: 28.743999999999996 - type: mrr_at_1000 value: 28.819 - type: mrr_at_3 value: 25.769 - type: mrr_at_5 value: 26.964 - type: ndcg_at_1 value: 20.647 - type: ndcg_at_10 value: 28.849999999999998 - type: ndcg_at_100 value: 33.849000000000004 - type: ndcg_at_1000 value: 36.802 - type: ndcg_at_3 value: 24.799 - type: ndcg_at_5 value: 26.682 - type: precision_at_1 value: 20.647 - type: precision_at_10 value: 5.2170000000000005 - type: precision_at_100 value: 0.906 - type: precision_at_1000 value: 0.134 - type: precision_at_3 value: 11.769 - type: precision_at_5 value: 8.486 - type: recall_at_1 value: 17.122 - type: recall_at_10 value: 38.999 - type: recall_at_100 value: 61.467000000000006 - type: recall_at_1000 value: 82.716 - type: recall_at_3 value: 27.601 - type: recall_at_5 value: 32.471 - type: map_at_1 value: 24.396 - type: map_at_10 value: 33.415 - type: map_at_100 value: 34.521 - type: map_at_1000 value: 34.631 - type: map_at_3 value: 30.703999999999997 - type: map_at_5 value: 32.166 - type: mrr_at_1 value: 28.825 - type: mrr_at_10 value: 37.397000000000006 - type: mrr_at_100 value: 38.286 - type: mrr_at_1000 value: 38.346000000000004 - type: mrr_at_3 value: 35.028 - type: mrr_at_5 value: 36.32 - type: ndcg_at_1 value: 28.825 - type: ndcg_at_10 value: 38.656 - type: ndcg_at_100 value: 43.856 - type: ndcg_at_1000 value: 46.31 - type: ndcg_at_3 value: 33.793 - type: ndcg_at_5 value: 35.909 - type: precision_at_1 value: 28.825 - type: precision_at_10 value: 6.567 - type: precision_at_100 value: 1.0330000000000001 - type: precision_at_1000 value: 0.135 - type: precision_at_3 value: 15.516 - type: precision_at_5 value: 10.914 - type: recall_at_1 value: 24.396 - type: recall_at_10 value: 50.747 - type: recall_at_100 value: 73.477 - type: recall_at_1000 value: 90.801 - type: recall_at_3 value: 37.1 - type: recall_at_5 value: 42.589 - type: map_at_1 value: 25.072 - type: map_at_10 value: 34.307 - type: map_at_100 value: 35.725 - type: map_at_1000 value: 35.943999999999996 - type: map_at_3 value: 30.906 - type: map_at_5 value: 32.818000000000005 - type: mrr_at_1 value: 29.644 - type: mrr_at_10 value: 38.673 - type: mrr_at_100 value: 39.459 - type: mrr_at_1000 value: 39.527 - type: mrr_at_3 value: 35.771 - type: mrr_at_5 value: 37.332 - type: ndcg_at_1 value: 29.644 - type: ndcg_at_10 value: 40.548 - type: ndcg_at_100 value: 45.678999999999995 - type: ndcg_at_1000 value: 48.488 - type: ndcg_at_3 value: 34.887 - type: ndcg_at_5 value: 37.543 - type: precision_at_1 value: 29.644 - type: precision_at_10 value: 7.688000000000001 - type: precision_at_100 value: 1.482 - type: precision_at_1000 value: 0.23600000000000002 - type: precision_at_3 value: 16.206 - type: precision_at_5 value: 12.016 - type: recall_at_1 value: 25.072 - type: recall_at_10 value: 53.478 - type: recall_at_100 value: 76.07300000000001 - type: recall_at_1000 value: 93.884 - type: recall_at_3 value: 37.583 - type: recall_at_5 value: 44.464 - type: map_at_1 value: 20.712 - type: map_at_10 value: 27.467999999999996 - type: map_at_100 value: 28.502 - type: map_at_1000 value: 28.610000000000003 - type: map_at_3 value: 24.887999999999998 - type: map_at_5 value: 26.273999999999997 - type: mrr_at_1 value: 22.736 - type: mrr_at_10 value: 29.553 - type: mrr_at_100 value: 30.485 - type: mrr_at_1000 value: 30.56 - type: mrr_at_3 value: 27.078999999999997 - type: mrr_at_5 value: 28.401 - type: ndcg_at_1 value: 22.736 - type: ndcg_at_10 value: 32.023 - type: ndcg_at_100 value: 37.158 - type: ndcg_at_1000 value: 39.823 - type: ndcg_at_3 value: 26.951999999999998 - type: ndcg_at_5 value: 29.281000000000002 - type: precision_at_1 value: 22.736 - type: precision_at_10 value: 5.213 - type: precision_at_100 value: 0.832 - type: precision_at_1000 value: 0.116 - type: precision_at_3 value: 11.459999999999999 - type: precision_at_5 value: 8.244 - type: recall_at_1 value: 20.712 - type: recall_at_10 value: 44.057 - type: recall_at_100 value: 67.944 - type: recall_at_1000 value: 87.925 - type: recall_at_3 value: 30.305 - type: recall_at_5 value: 36.071999999999996 - task: type: Retrieval dataset: name: MTEB ClimateFEVER type: climate-fever config: default split: test revision: None metrics: - type: map_at_1 value: 10.181999999999999 - type: map_at_10 value: 16.66 - type: map_at_100 value: 18.273 - type: map_at_1000 value: 18.45 - type: map_at_3 value: 14.141 - type: map_at_5 value: 15.455 - type: mrr_at_1 value: 22.15 - type: mrr_at_10 value: 32.062000000000005 - type: mrr_at_100 value: 33.116 - type: mrr_at_1000 value: 33.168 - type: mrr_at_3 value: 28.827 - type: mrr_at_5 value: 30.892999999999997 - type: ndcg_at_1 value: 22.15 - type: ndcg_at_10 value: 23.532 - type: ndcg_at_100 value: 30.358 - type: ndcg_at_1000 value: 33.783 - type: ndcg_at_3 value: 19.222 - type: ndcg_at_5 value: 20.919999999999998 - type: precision_at_1 value: 22.15 - type: precision_at_10 value: 7.185999999999999 - type: precision_at_100 value: 1.433 - type: precision_at_1000 value: 0.207 - type: precision_at_3 value: 13.941 - type: precision_at_5 value: 10.906 - type: recall_at_1 value: 10.181999999999999 - type: recall_at_10 value: 28.104000000000003 - type: recall_at_100 value: 51.998999999999995 - type: recall_at_1000 value: 71.311 - type: recall_at_3 value: 17.698 - type: recall_at_5 value: 22.262999999999998 - task: type: Retrieval dataset: name: MTEB DBPedia type: dbpedia-entity config: default split: test revision: None metrics: - type: map_at_1 value: 6.669 - type: map_at_10 value: 15.552 - type: map_at_100 value: 21.865000000000002 - type: map_at_1000 value: 23.268 - type: map_at_3 value: 11.309 - type: map_at_5 value: 13.084000000000001 - type: mrr_at_1 value: 55.50000000000001 - type: mrr_at_10 value: 66.46600000000001 - type: mrr_at_100 value: 66.944 - type: mrr_at_1000 value: 66.956 - type: mrr_at_3 value: 64.542 - type: mrr_at_5 value: 65.717 - type: ndcg_at_1 value: 44.75 - type: ndcg_at_10 value: 35.049 - type: ndcg_at_100 value: 39.073 - type: ndcg_at_1000 value: 46.208 - type: ndcg_at_3 value: 39.525 - type: ndcg_at_5 value: 37.156 - type: precision_at_1 value: 55.50000000000001 - type: precision_at_10 value: 27.800000000000004 - type: precision_at_100 value: 9.013 - type: precision_at_1000 value: 1.8800000000000001 - type: precision_at_3 value: 42.667 - type: precision_at_5 value: 36.0 - type: recall_at_1 value: 6.669 - type: recall_at_10 value: 21.811 - type: recall_at_100 value: 45.112 - type: recall_at_1000 value: 67.806 - type: recall_at_3 value: 13.373 - type: recall_at_5 value: 16.615 - task: type: Classification dataset: name: MTEB EmotionClassification type: mteb/emotion config: default split: test revision: 4f58c6b202a23cf9a4da393831edf4f9183cad37 metrics: - type: accuracy value: 48.769999999999996 - type: f1 value: 42.91448356376592 - task: type: Retrieval dataset: name: MTEB FEVER type: fever config: default split: test revision: None metrics: - type: map_at_1 value: 54.013 - type: map_at_10 value: 66.239 - type: map_at_100 value: 66.62599999999999 - type: map_at_1000 value: 66.644 - type: map_at_3 value: 63.965 - type: map_at_5 value: 65.45400000000001 - type: mrr_at_1 value: 58.221000000000004 - type: mrr_at_10 value: 70.43700000000001 - type: mrr_at_100 value: 70.744 - type: mrr_at_1000 value: 70.75099999999999 - type: mrr_at_3 value: 68.284 - type: mrr_at_5 value: 69.721 - type: ndcg_at_1 value: 58.221000000000004 - type: ndcg_at_10 value: 72.327 - type: ndcg_at_100 value: 73.953 - type: ndcg_at_1000 value: 74.312 - type: ndcg_at_3 value: 68.062 - type: ndcg_at_5 value: 70.56400000000001 - type: precision_at_1 value: 58.221000000000004 - type: precision_at_10 value: 9.521 - type: precision_at_100 value: 1.045 - type: precision_at_1000 value: 0.109 - type: precision_at_3 value: 27.348 - type: precision_at_5 value: 17.794999999999998 - type: recall_at_1 value: 54.013 - type: recall_at_10 value: 86.957 - type: recall_at_100 value: 93.911 - type: recall_at_1000 value: 96.38 - type: recall_at_3 value: 75.555 - type: recall_at_5 value: 81.671 - task: type: Retrieval dataset: name: MTEB FiQA2018 type: fiqa config: default split: test revision: None metrics: - type: map_at_1 value: 21.254 - type: map_at_10 value: 33.723 - type: map_at_100 value: 35.574 - type: map_at_1000 value: 35.730000000000004 - type: map_at_3 value: 29.473 - type: map_at_5 value: 31.543 - type: mrr_at_1 value: 41.358 - type: mrr_at_10 value: 49.498 - type: mrr_at_100 value: 50.275999999999996 - type: mrr_at_1000 value: 50.308 - type: mrr_at_3 value: 47.016000000000005 - type: mrr_at_5 value: 48.336 - type: ndcg_at_1 value: 41.358 - type: ndcg_at_10 value: 41.579 - type: ndcg_at_100 value: 48.455 - type: ndcg_at_1000 value: 51.165000000000006 - type: ndcg_at_3 value: 37.681 - type: ndcg_at_5 value: 38.49 - type: precision_at_1 value: 41.358 - type: precision_at_10 value: 11.543000000000001 - type: precision_at_100 value: 1.87 - type: precision_at_1000 value: 0.23600000000000002 - type: precision_at_3 value: 24.743000000000002 - type: precision_at_5 value: 17.994 - type: recall_at_1 value: 21.254 - type: recall_at_10 value: 48.698 - type: recall_at_100 value: 74.588 - type: recall_at_1000 value: 91.00200000000001 - type: recall_at_3 value: 33.939 - type: recall_at_5 value: 39.367000000000004 - task: type: Retrieval dataset: name: MTEB HotpotQA type: hotpotqa config: default split: test revision: None metrics: - type: map_at_1 value: 35.922 - type: map_at_10 value: 52.32599999999999 - type: map_at_100 value: 53.18000000000001 - type: map_at_1000 value: 53.245 - type: map_at_3 value: 49.294 - type: map_at_5 value: 51.202999999999996 - type: mrr_at_1 value: 71.843 - type: mrr_at_10 value: 78.24600000000001 - type: mrr_at_100 value: 78.515 - type: mrr_at_1000 value: 78.527 - type: mrr_at_3 value: 77.17500000000001 - type: mrr_at_5 value: 77.852 - type: ndcg_at_1 value: 71.843 - type: ndcg_at_10 value: 61.379 - type: ndcg_at_100 value: 64.535 - type: ndcg_at_1000 value: 65.888 - type: ndcg_at_3 value: 56.958 - type: ndcg_at_5 value: 59.434 - type: precision_at_1 value: 71.843 - type: precision_at_10 value: 12.686 - type: precision_at_100 value: 1.517 - type: precision_at_1000 value: 0.16999999999999998 - type: precision_at_3 value: 35.778 - type: precision_at_5 value: 23.422 - type: recall_at_1 value: 35.922 - type: recall_at_10 value: 63.43 - type: recall_at_100 value: 75.868 - type: recall_at_1000 value: 84.88900000000001 - type: recall_at_3 value: 53.666000000000004 - type: recall_at_5 value: 58.555 - task: type: Classification dataset: name: MTEB ImdbClassification type: mteb/imdb config: default split: test revision: 3d86128a09e091d6018b6d26cad27f2739fc2db7 metrics: - type: accuracy value: 79.4408 - type: ap value: 73.52820871620366 - type: f1 value: 79.36240238685001 - task: type: Retrieval dataset: name: MTEB MSMARCO type: msmarco config: default split: dev revision: None metrics: - type: map_at_1 value: 21.826999999999998 - type: map_at_10 value: 34.04 - type: map_at_100 value: 35.226 - type: map_at_1000 value: 35.275 - type: map_at_3 value: 30.165999999999997 - type: map_at_5 value: 32.318000000000005 - type: mrr_at_1 value: 22.464000000000002 - type: mrr_at_10 value: 34.631 - type: mrr_at_100 value: 35.752 - type: mrr_at_1000 value: 35.795 - type: mrr_at_3 value: 30.798 - type: mrr_at_5 value: 32.946999999999996 - type: ndcg_at_1 value: 22.464000000000002 - type: ndcg_at_10 value: 40.919 - type: ndcg_at_100 value: 46.632 - type: ndcg_at_1000 value: 47.833 - type: ndcg_at_3 value: 32.992 - type: ndcg_at_5 value: 36.834 - type: precision_at_1 value: 22.464000000000002 - type: precision_at_10 value: 6.494 - type: precision_at_100 value: 0.9369999999999999 - type: precision_at_1000 value: 0.104 - type: precision_at_3 value: 14.021 - type: precision_at_5 value: 10.347000000000001 - type: recall_at_1 value: 21.826999999999998 - type: recall_at_10 value: 62.132 - type: recall_at_100 value: 88.55199999999999 - type: recall_at_1000 value: 97.707 - type: recall_at_3 value: 40.541 - type: recall_at_5 value: 49.739 - task: type: Classification dataset: name: MTEB MTOPDomainClassification (en) type: mteb/mtop_domain config: en split: test revision: d80d48c1eb48d3562165c59d59d0034df9fff0bf metrics: - type: accuracy value: 95.68399452804377 - type: f1 value: 95.25490609832268 - task: type: Classification dataset: name: MTEB MTOPIntentClassification (en) type: mteb/mtop_intent config: en split: test revision: ae001d0e6b1228650b7bd1c2c65fb50ad11a8aba metrics: - type: accuracy value: 83.15321477428182 - type: f1 value: 60.35476439087966 - task: type: Classification dataset: name: MTEB MassiveIntentClassification (en) type: mteb/amazon_massive_intent config: en split: test revision: 31efe3c427b0bae9c22cbb560b8f15491cc6bed7 metrics: - type: accuracy value: 71.92669804976462 - type: f1 value: 69.22815107207565 - task: type: Classification dataset: name: MTEB MassiveScenarioClassification (en) type: mteb/amazon_massive_scenario config: en split: test revision: 7d571f92784cd94a019292a1f45445077d0ef634 metrics: - type: accuracy value: 74.4855413584398 - type: f1 value: 72.92107516103387 - task: type: Clustering dataset: name: MTEB MedrxivClusteringP2P type: mteb/medrxiv-clustering-p2p config: default split: test revision: e7a26af6f3ae46b30dde8737f02c07b1505bcc73 metrics: - type: v_measure value: 32.412679360205544 - task: type: Clustering dataset: name: MTEB MedrxivClusteringS2S type: mteb/medrxiv-clustering-s2s config: default split: test revision: 35191c8c0dca72d8ff3efcd72aa802307d469663 metrics: - type: v_measure value: 28.09211869875204 - task: type: Reranking dataset: name: MTEB MindSmallReranking type: mteb/mind_small config: default split: test revision: 3bdac13927fdc888b903db93b2ffdbd90b295a69 metrics: - type: map value: 30.540919056982545 - type: mrr value: 31.529904607063536 - task: type: Retrieval dataset: name: MTEB NFCorpus type: nfcorpus config: default split: test revision: None metrics: - type: map_at_1 value: 5.745 - type: map_at_10 value: 12.013 - type: map_at_100 value: 15.040000000000001 - type: map_at_1000 value: 16.427 - type: map_at_3 value: 8.841000000000001 - type: map_at_5 value: 10.289 - type: mrr_at_1 value: 45.201 - type: mrr_at_10 value: 53.483999999999995 - type: mrr_at_100 value: 54.20700000000001 - type: mrr_at_1000 value: 54.252 - type: mrr_at_3 value: 51.29 - type: mrr_at_5 value: 52.73 - type: ndcg_at_1 value: 43.808 - type: ndcg_at_10 value: 32.445 - type: ndcg_at_100 value: 30.031000000000002 - type: ndcg_at_1000 value: 39.007 - type: ndcg_at_3 value: 37.204 - type: ndcg_at_5 value: 35.07 - type: precision_at_1 value: 45.201 - type: precision_at_10 value: 23.684 - type: precision_at_100 value: 7.600999999999999 - type: precision_at_1000 value: 2.043 - type: precision_at_3 value: 33.953 - type: precision_at_5 value: 29.412 - type: recall_at_1 value: 5.745 - type: recall_at_10 value: 16.168 - type: recall_at_100 value: 30.875999999999998 - type: recall_at_1000 value: 62.686 - type: recall_at_3 value: 9.75 - type: recall_at_5 value: 12.413 - task: type: Retrieval dataset: name: MTEB NQ type: nq config: default split: test revision: None metrics: - type: map_at_1 value: 37.828 - type: map_at_10 value: 53.239000000000004 - type: map_at_100 value: 54.035999999999994 - type: map_at_1000 value: 54.067 - type: map_at_3 value: 49.289 - type: map_at_5 value: 51.784 - type: mrr_at_1 value: 42.497 - type: mrr_at_10 value: 55.916999999999994 - type: mrr_at_100 value: 56.495 - type: mrr_at_1000 value: 56.516999999999996 - type: mrr_at_3 value: 52.800000000000004 - type: mrr_at_5 value: 54.722 - type: ndcg_at_1 value: 42.468 - type: ndcg_at_10 value: 60.437 - type: ndcg_at_100 value: 63.731 - type: ndcg_at_1000 value: 64.41799999999999 - type: ndcg_at_3 value: 53.230999999999995 - type: ndcg_at_5 value: 57.26 - type: precision_at_1 value: 42.468 - type: precision_at_10 value: 9.47 - type: precision_at_100 value: 1.1360000000000001 - type: precision_at_1000 value: 0.12 - type: precision_at_3 value: 23.724999999999998 - type: precision_at_5 value: 16.593 - type: recall_at_1 value: 37.828 - type: recall_at_10 value: 79.538 - type: recall_at_100 value: 93.646 - type: recall_at_1000 value: 98.72999999999999 - type: recall_at_3 value: 61.134 - type: recall_at_5 value: 70.377 - task: type: Retrieval dataset: name: MTEB QuoraRetrieval type: quora config: default split: test revision: None metrics: - type: map_at_1 value: 70.548 - type: map_at_10 value: 84.466 - type: map_at_100 value: 85.10600000000001 - type: map_at_1000 value: 85.123 - type: map_at_3 value: 81.57600000000001 - type: map_at_5 value: 83.399 - type: mrr_at_1 value: 81.24 - type: mrr_at_10 value: 87.457 - type: mrr_at_100 value: 87.574 - type: mrr_at_1000 value: 87.575 - type: mrr_at_3 value: 86.507 - type: mrr_at_5 value: 87.205 - type: ndcg_at_1 value: 81.25 - type: ndcg_at_10 value: 88.203 - type: ndcg_at_100 value: 89.457 - type: ndcg_at_1000 value: 89.563 - type: ndcg_at_3 value: 85.465 - type: ndcg_at_5 value: 87.007 - type: precision_at_1 value: 81.25 - type: precision_at_10 value: 13.373 - type: precision_at_100 value: 1.5270000000000001 - type: precision_at_1000 value: 0.157 - type: precision_at_3 value: 37.417 - type: precision_at_5 value: 24.556 - type: recall_at_1 value: 70.548 - type: recall_at_10 value: 95.208 - type: recall_at_100 value: 99.514 - type: recall_at_1000 value: 99.988 - type: recall_at_3 value: 87.214 - type: recall_at_5 value: 91.696 - task: type: Clustering dataset: name: MTEB RedditClustering type: mteb/reddit-clustering config: default split: test revision: 24640382cdbf8abc73003fb0fa6d111a705499eb metrics: - type: v_measure value: 53.04822095496839 - task: type: Clustering dataset: name: MTEB RedditClusteringP2P type: mteb/reddit-clustering-p2p config: default split: test revision: 282350215ef01743dc01b456c7f5241fa8937f16 metrics: - type: v_measure value: 60.30778476474675 - task: type: Retrieval dataset: name: MTEB SCIDOCS type: scidocs config: default split: test revision: None metrics: - type: map_at_1 value: 4.692 - type: map_at_10 value: 11.766 - type: map_at_100 value: 13.904 - type: map_at_1000 value: 14.216999999999999 - type: map_at_3 value: 8.245 - type: map_at_5 value: 9.92 - type: mrr_at_1 value: 23.0 - type: mrr_at_10 value: 33.78 - type: mrr_at_100 value: 34.922 - type: mrr_at_1000 value: 34.973 - type: mrr_at_3 value: 30.2 - type: mrr_at_5 value: 32.565 - type: ndcg_at_1 value: 23.0 - type: ndcg_at_10 value: 19.863 - type: ndcg_at_100 value: 28.141 - type: ndcg_at_1000 value: 33.549 - type: ndcg_at_3 value: 18.434 - type: ndcg_at_5 value: 16.384 - type: precision_at_1 value: 23.0 - type: precision_at_10 value: 10.39 - type: precision_at_100 value: 2.235 - type: precision_at_1000 value: 0.35300000000000004 - type: precision_at_3 value: 17.133000000000003 - type: precision_at_5 value: 14.44 - type: recall_at_1 value: 4.692 - type: recall_at_10 value: 21.025 - type: recall_at_100 value: 45.324999999999996 - type: recall_at_1000 value: 71.675 - type: recall_at_3 value: 10.440000000000001 - type: recall_at_5 value: 14.64 - task: type: STS dataset: name: MTEB SICK-R type: mteb/sickr-sts config: default split: test revision: a6ea5a8cab320b040a23452cc28066d9beae2cee metrics: - type: cos_sim_pearson value: 84.96178184892842 - type: cos_sim_spearman value: 79.6487740813199 - type: euclidean_pearson value: 82.06661161625023 - type: euclidean_spearman value: 79.64876769031183 - type: manhattan_pearson value: 82.07061164575131 - type: manhattan_spearman value: 79.65197039464537 - task: type: STS dataset: name: MTEB STS12 type: mteb/sts12-sts config: default split: test revision: a0d554a64d88156834ff5ae9920b964011b16384 metrics: - type: cos_sim_pearson value: 84.15305604100027 - type: cos_sim_spearman value: 74.27447427941591 - type: euclidean_pearson value: 80.52737337565307 - type: euclidean_spearman value: 74.27416077132192 - type: manhattan_pearson value: 80.53728571140387 - type: manhattan_spearman value: 74.28853605753457 - task: type: STS dataset: name: MTEB STS13 type: mteb/sts13-sts config: default split: test revision: 7e90230a92c190f1bf69ae9002b8cea547a64cca metrics: - type: cos_sim_pearson value: 83.44386080639279 - type: cos_sim_spearman value: 84.17947648159536 - type: euclidean_pearson value: 83.34145388129387 - type: euclidean_spearman value: 84.17947648159536 - type: manhattan_pearson value: 83.30699061927966 - type: manhattan_spearman value: 84.18125737380451 - task: type: STS dataset: name: MTEB STS14 type: mteb/sts14-sts config: default split: test revision: 6031580fec1f6af667f0bd2da0a551cf4f0b2375 metrics: - type: cos_sim_pearson value: 81.57392220985612 - type: cos_sim_spearman value: 78.80745014464101 - type: euclidean_pearson value: 80.01660371487199 - type: euclidean_spearman value: 78.80741240102256 - type: manhattan_pearson value: 79.96810779507953 - type: manhattan_spearman value: 78.75600400119448 - task: type: STS dataset: name: MTEB STS15 type: mteb/sts15-sts config: default split: test revision: ae752c7c21bf194d8b67fd573edf7ae58183cbe3 metrics: - type: cos_sim_pearson value: 86.85421063026625 - type: cos_sim_spearman value: 87.55320285299192 - type: euclidean_pearson value: 86.69750143323517 - type: euclidean_spearman value: 87.55320284326378 - type: manhattan_pearson value: 86.63379169960379 - type: manhattan_spearman value: 87.4815029877984 - task: type: STS dataset: name: MTEB STS16 type: mteb/sts16-sts config: default split: test revision: 4d8694f8f0e0100860b497b999b3dbed754a0513 metrics: - type: cos_sim_pearson value: 84.31314130411842 - type: cos_sim_spearman value: 85.3489588181433 - type: euclidean_pearson value: 84.13240933463535 - type: euclidean_spearman value: 85.34902871403281 - type: manhattan_pearson value: 84.01183086503559 - type: manhattan_spearman value: 85.19316703166102 - task: type: STS dataset: name: MTEB STS17 (en-en) type: mteb/sts17-crosslingual-sts config: en-en split: test revision: af5e6fb845001ecf41f4c1e033ce921939a2a68d metrics: - type: cos_sim_pearson value: 89.09979781689536 - type: cos_sim_spearman value: 88.87813323759015 - type: euclidean_pearson value: 88.65413031123792 - type: euclidean_spearman value: 88.87813323759015 - type: manhattan_pearson value: 88.61818758256024 - type: manhattan_spearman value: 88.81044100494604 - task: type: STS dataset: name: MTEB STS22 (en) type: mteb/sts22-crosslingual-sts config: en split: test revision: 6d1ba47164174a496b7fa5d3569dae26a6813b80 metrics: - type: cos_sim_pearson value: 62.30693258111531 - type: cos_sim_spearman value: 62.195516523251946 - type: euclidean_pearson value: 62.951283701049476 - type: euclidean_spearman value: 62.195516523251946 - type: manhattan_pearson value: 63.068322281439535 - type: manhattan_spearman value: 62.10621171028406 - task: type: STS dataset: name: MTEB STSBenchmark type: mteb/stsbenchmark-sts config: default split: test revision: b0fddb56ed78048fa8b90373c8a3cfc37b684831 metrics: - type: cos_sim_pearson value: 84.27092833763909 - type: cos_sim_spearman value: 84.84429717949759 - type: euclidean_pearson value: 84.8516966060792 - type: euclidean_spearman value: 84.84429717949759 - type: manhattan_pearson value: 84.82203139242881 - type: manhattan_spearman value: 84.8358503952945 - task: type: Reranking dataset: name: MTEB SciDocsRR type: mteb/scidocs-reranking config: default split: test revision: d3c5e1fc0b855ab6097bf1cda04dd73947d7caab metrics: - type: map value: 83.10290863981409 - type: mrr value: 95.31168450286097 - task: type: Retrieval dataset: name: MTEB SciFact type: scifact config: default split: test revision: None metrics: - type: map_at_1 value: 52.161 - type: map_at_10 value: 62.138000000000005 - type: map_at_100 value: 62.769 - type: map_at_1000 value: 62.812 - type: map_at_3 value: 59.111000000000004 - type: map_at_5 value: 60.995999999999995 - type: mrr_at_1 value: 55.333 - type: mrr_at_10 value: 63.504000000000005 - type: mrr_at_100 value: 64.036 - type: mrr_at_1000 value: 64.08 - type: mrr_at_3 value: 61.278 - type: mrr_at_5 value: 62.778 - type: ndcg_at_1 value: 55.333 - type: ndcg_at_10 value: 66.678 - type: ndcg_at_100 value: 69.415 - type: ndcg_at_1000 value: 70.453 - type: ndcg_at_3 value: 61.755 - type: ndcg_at_5 value: 64.546 - type: precision_at_1 value: 55.333 - type: precision_at_10 value: 9.033 - type: precision_at_100 value: 1.043 - type: precision_at_1000 value: 0.11199999999999999 - type: precision_at_3 value: 24.221999999999998 - type: precision_at_5 value: 16.333000000000002 - type: recall_at_1 value: 52.161 - type: recall_at_10 value: 79.156 - type: recall_at_100 value: 91.333 - type: recall_at_1000 value: 99.333 - type: recall_at_3 value: 66.43299999999999 - type: recall_at_5 value: 73.272 - task: type: PairClassification dataset: name: MTEB SprintDuplicateQuestions type: mteb/sprintduplicatequestions-pairclassification config: default split: test revision: d66bd1f72af766a5cc4b0ca5e00c162f89e8cc46 metrics: - type: cos_sim_accuracy value: 99.81287128712871 - type: cos_sim_ap value: 95.30034785910676 - type: cos_sim_f1 value: 90.28629856850716 - type: cos_sim_precision value: 92.36401673640168 - type: cos_sim_recall value: 88.3 - type: dot_accuracy value: 99.81287128712871 - type: dot_ap value: 95.30034785910676 - type: dot_f1 value: 90.28629856850716 - type: dot_precision value: 92.36401673640168 - type: dot_recall value: 88.3 - type: euclidean_accuracy value: 99.81287128712871 - type: euclidean_ap value: 95.30034785910676 - type: euclidean_f1 value: 90.28629856850716 - type: euclidean_precision value: 92.36401673640168 - type: euclidean_recall value: 88.3 - type: manhattan_accuracy value: 99.80990099009901 - type: manhattan_ap value: 95.26880751950654 - type: manhattan_f1 value: 90.22177419354838 - type: manhattan_precision value: 90.95528455284553 - type: manhattan_recall value: 89.5 - type: max_accuracy value: 99.81287128712871 - type: max_ap value: 95.30034785910676 - type: max_f1 value: 90.28629856850716 - task: type: Clustering dataset: name: MTEB StackExchangeClustering type: mteb/stackexchange-clustering config: default split: test revision: 6cbc1f7b2bc0622f2e39d2c77fa502909748c259 metrics: - type: v_measure value: 58.518662504351184 - task: type: Clustering dataset: name: MTEB StackExchangeClusteringP2P type: mteb/stackexchange-clustering-p2p config: default split: test revision: 815ca46b2622cec33ccafc3735d572c266efdb44 metrics: - type: v_measure value: 34.96168178378587 - task: type: Reranking dataset: name: MTEB StackOverflowDupQuestions type: mteb/stackoverflowdupquestions-reranking config: default split: test revision: e185fbe320c72810689fc5848eb6114e1ef5ec69 metrics: - type: map value: 52.04862593471896 - type: mrr value: 52.97238402936932 - task: type: Summarization dataset: name: MTEB SummEval type: mteb/summeval config: default split: test revision: cda12ad7615edc362dbf25a00fdd61d3b1eaf93c metrics: - type: cos_sim_pearson value: 30.092545236479946 - type: cos_sim_spearman value: 31.599851000175498 - type: dot_pearson value: 30.092542723901676 - type: dot_spearman value: 31.599851000175498 - task: type: Retrieval dataset: name: MTEB TRECCOVID type: trec-covid config: default split: test revision: None metrics: - type: map_at_1 value: 0.189 - type: map_at_10 value: 1.662 - type: map_at_100 value: 9.384 - type: map_at_1000 value: 22.669 - type: map_at_3 value: 0.5559999999999999 - type: map_at_5 value: 0.9039999999999999 - type: mrr_at_1 value: 68.0 - type: mrr_at_10 value: 81.01899999999999 - type: mrr_at_100 value: 81.01899999999999 - type: mrr_at_1000 value: 81.01899999999999 - type: mrr_at_3 value: 79.333 - type: mrr_at_5 value: 80.733 - type: ndcg_at_1 value: 63.0 - type: ndcg_at_10 value: 65.913 - type: ndcg_at_100 value: 51.895 - type: ndcg_at_1000 value: 46.967 - type: ndcg_at_3 value: 65.49199999999999 - type: ndcg_at_5 value: 66.69699999999999 - type: precision_at_1 value: 68.0 - type: precision_at_10 value: 71.6 - type: precision_at_100 value: 53.66 - type: precision_at_1000 value: 21.124000000000002 - type: precision_at_3 value: 72.667 - type: precision_at_5 value: 74.0 - type: recall_at_1 value: 0.189 - type: recall_at_10 value: 1.913 - type: recall_at_100 value: 12.601999999999999 - type: recall_at_1000 value: 44.296 - type: recall_at_3 value: 0.605 - type: recall_at_5 value: 1.018 - task: type: Retrieval dataset: name: MTEB Touche2020 type: webis-touche2020 config: default split: test revision: None metrics: - type: map_at_1 value: 2.701 - type: map_at_10 value: 10.445 - type: map_at_100 value: 17.324 - type: map_at_1000 value: 19.161 - type: map_at_3 value: 5.497 - type: map_at_5 value: 7.278 - type: mrr_at_1 value: 30.612000000000002 - type: mrr_at_10 value: 45.534 - type: mrr_at_100 value: 45.792 - type: mrr_at_1000 value: 45.806999999999995 - type: mrr_at_3 value: 37.755 - type: mrr_at_5 value: 43.469 - type: ndcg_at_1 value: 26.531 - type: ndcg_at_10 value: 26.235000000000003 - type: ndcg_at_100 value: 39.17 - type: ndcg_at_1000 value: 51.038 - type: ndcg_at_3 value: 23.625 - type: ndcg_at_5 value: 24.338 - type: precision_at_1 value: 30.612000000000002 - type: precision_at_10 value: 24.285999999999998 - type: precision_at_100 value: 8.224 - type: precision_at_1000 value: 1.6179999999999999 - type: precision_at_3 value: 24.490000000000002 - type: precision_at_5 value: 24.898 - type: recall_at_1 value: 2.701 - type: recall_at_10 value: 17.997 - type: recall_at_100 value: 51.766999999999996 - type: recall_at_1000 value: 87.863 - type: recall_at_3 value: 6.295000000000001 - type: recall_at_5 value: 9.993 - task: type: Classification dataset: name: MTEB ToxicConversationsClassification type: mteb/toxic_conversations_50k config: default split: test revision: d7c0de2777da35d6aae2200a62c6e0e5af397c4c metrics: - type: accuracy value: 73.3474 - type: ap value: 15.393431414459924 - type: f1 value: 56.466681887882416 - task: type: Classification dataset: name: MTEB TweetSentimentExtractionClassification type: mteb/tweet_sentiment_extraction config: default split: test revision: d604517c81ca91fe16a244d1248fc021f9ecee7a metrics: - type: accuracy value: 62.062818336163 - type: f1 value: 62.11230840463252 - task: type: Clustering dataset: name: MTEB TwentyNewsgroupsClustering type: mteb/twentynewsgroups-clustering config: default split: test revision: 6125ec4e24fa026cec8a478383ee943acfbd5449 metrics: - type: v_measure value: 42.464892820845115 - task: type: PairClassification dataset: name: MTEB TwitterSemEval2015 type: mteb/twittersemeval2015-pairclassification config: default split: test revision: 70970daeab8776df92f5ea462b6173c0b46fd2d1 metrics: - type: cos_sim_accuracy value: 86.15962329379508 - type: cos_sim_ap value: 74.73674057919256 - type: cos_sim_f1 value: 68.81245642574947 - type: cos_sim_precision value: 61.48255813953488 - type: cos_sim_recall value: 78.12664907651715 - type: dot_accuracy value: 86.15962329379508 - type: dot_ap value: 74.7367634988281 - type: dot_f1 value: 68.81245642574947 - type: dot_precision value: 61.48255813953488 - type: dot_recall value: 78.12664907651715 - type: euclidean_accuracy value: 86.15962329379508 - type: euclidean_ap value: 74.7367761466634 - type: euclidean_f1 value: 68.81245642574947 - type: euclidean_precision value: 61.48255813953488 - type: euclidean_recall value: 78.12664907651715 - type: manhattan_accuracy value: 86.21326816474935 - type: manhattan_ap value: 74.64416473733951 - type: manhattan_f1 value: 68.80924855491331 - type: manhattan_precision value: 61.23456790123457 - type: manhattan_recall value: 78.52242744063325 - type: max_accuracy value: 86.21326816474935 - type: max_ap value: 74.7367761466634 - type: max_f1 value: 68.81245642574947 - task: type: PairClassification dataset: name: MTEB TwitterURLCorpus type: mteb/twitterurlcorpus-pairclassification config: default split: test revision: 8b6510b0b1fa4e4c4f879467980e9be563ec1cdf metrics: - type: cos_sim_accuracy value: 88.97620988085536 - type: cos_sim_ap value: 86.08680845745758 - type: cos_sim_f1 value: 78.02793637114438 - type: cos_sim_precision value: 73.11082699683736 - type: cos_sim_recall value: 83.65414228518632 - type: dot_accuracy value: 88.97620988085536 - type: dot_ap value: 86.08681149437946 - type: dot_f1 value: 78.02793637114438 - type: dot_precision value: 73.11082699683736 - type: dot_recall value: 83.65414228518632 - type: euclidean_accuracy value: 88.97620988085536 - type: euclidean_ap value: 86.08681215460771 - type: euclidean_f1 value: 78.02793637114438 - type: euclidean_precision value: 73.11082699683736 - type: euclidean_recall value: 83.65414228518632 - type: manhattan_accuracy value: 88.88888888888889 - type: manhattan_ap value: 86.02916327562438 - type: manhattan_f1 value: 78.02063045516843 - type: manhattan_precision value: 73.38851947346994 - type: manhattan_recall value: 83.2768709578072 - type: max_accuracy value: 88.97620988085536 - type: max_ap value: 86.08681215460771 - type: max_f1 value: 78.02793637114438 --- <!-- TODO: add evaluation results here --> <br><br> <p align="center"> <img src="https://aeiljuispo.cloudimg.io/v7/https://cdn-uploads.huggingface.co/production/uploads/603763514de52ff951d89793/AFoybzd5lpBQXEBrQHuTt.png?w=200&h=200&f=face" alt="Finetuner logo: Finetuner helps you to create experiments in order to improve embeddings on search tasks. It accompanies you to deliver the last mile of performance-tuning for neural search applications." width="150px"> </p> <p align="center"> <b>The text embedding set trained by <a href="https://jina.ai/"><b>Jina AI</b></a>.</b> </p> ## Quick Start The easiest way to starting using `jina-embeddings-v2-base-en` is to use Jina AI's [Embedding API](https://jina.ai/embeddings/). ## Intended Usage & Model Info `jina-embeddings-v2-base-en` is an English, monolingual **embedding model** supporting **8192 sequence length**. It is based on a BERT architecture (JinaBERT) that supports the symmetric bidirectional variant of [ALiBi](https://arxiv.org/abs/2108.12409) to allow longer sequence length. The backbone `jina-bert-v2-base-en` is pretrained on the C4 dataset. The model is further trained on Jina AI's collection of more than 400 millions of sentence pairs and hard negatives. These pairs were obtained from various domains and were carefully selected through a thorough cleaning process. The embedding model was trained using 512 sequence length, but extrapolates to 8k sequence length (or even longer) thanks to ALiBi. This makes our model useful for a range of use cases, especially when processing long documents is needed, including long document retrieval, semantic textual similarity, text reranking, recommendation, RAG and LLM-based generative search, etc. With a standard size of 137 million parameters, the model enables fast inference while delivering better performance than our small model. It is recommended to use a single GPU for inference. Additionally, we provide the following embedding models: - [`jina-embeddings-v2-small-en`](https://huggingface.co/jinaai/jina-embeddings-v2-small-en): 33 million parameters. - [`jina-embeddings-v2-base-en`](https://huggingface.co/jinaai/jina-embeddings-v2-base-en): 137 million parameters **(you are here)**. - [`jina-embeddings-v2-base-zh`](https://huggingface.co/jinaai/jina-embeddings-v2-base-zh): Chinese-English Bilingual embeddings. - [`jina-embeddings-v2-base-de`](https://huggingface.co/jinaai/jina-embeddings-v2-base-de): German-English Bilingual embeddings. - [`jina-embeddings-v2-base-es`](https://huggingface.co/jinaai/jina-embeddings-v2-base-es): Spanish-English Bilingual embeddings. ## Data & Parameters Jina Embeddings V2 [technical report](https://arxiv.org/abs/2310.19923) ## Usage **<details><summary>Please apply mean pooling when integrating the model.</summary>** <p> ### Why mean pooling? `mean poooling` takes all token embeddings from model output and averaging them at sentence/paragraph level. It has been proved to be the most effective way to produce high-quality sentence embeddings. We offer an `encode` function to deal with this. However, if you would like to do it without using the default `encode` function: ```python import torch import torch.nn.functional as F from transformers import AutoTokenizer, AutoModel def mean_pooling(model_output, attention_mask): token_embeddings = model_output[0] input_mask_expanded = attention_mask.unsqueeze(-1).expand(token_embeddings.size()).float() return torch.sum(token_embeddings * input_mask_expanded, 1) / torch.clamp(input_mask_expanded.sum(1), min=1e-9) sentences = ['How is the weather today?', 'What is the current weather like today?'] tokenizer = AutoTokenizer.from_pretrained('jinaai/jina-embeddings-v2-small-en') model = AutoModel.from_pretrained('jinaai/jina-embeddings-v2-small-en', trust_remote_code=True) encoded_input = tokenizer(sentences, padding=True, truncation=True, return_tensors='pt') with torch.no_grad(): model_output = model(**encoded_input) embeddings = mean_pooling(model_output, encoded_input['attention_mask']) embeddings = F.normalize(embeddings, p=2, dim=1) ``` </p> </details> You can use Jina Embedding models directly from transformers package. ```python !pip install transformers from transformers import AutoModel from numpy.linalg import norm cos_sim = lambda a,b: (a @ b.T) / (norm(a)*norm(b)) model = AutoModel.from_pretrained('jinaai/jina-embeddings-v2-base-en', trust_remote_code=True) # trust_remote_code is needed to use the encode method embeddings = model.encode(['How is the weather today?', 'What is the current weather like today?']) print(cos_sim(embeddings[0], embeddings[1])) ``` If you only want to handle shorter sequence, such as 2k, pass the `max_length` parameter to the `encode` function: ```python embeddings = model.encode( ['Very long ... document'], max_length=2048 ) ``` Using the its latest release (v2.3.0) sentence-transformers also supports Jina embeddings (Please make sure that you are logged into huggingface as well): ```python !pip install -U sentence-transformers from sentence_transformers import SentenceTransformer from sentence_transformers.util import cos_sim model = SentenceTransformer( "jinaai/jina-embeddings-v2-base-en", # switch to en/zh for English or Chinese trust_remote_code=True ) # control your input sequence length up to 8192 model.max_seq_length = 1024 embeddings = model.encode([ 'How is the weather today?', 'What is the current weather like today?' ]) print(cos_sim(embeddings[0], embeddings[1])) ``` ## Alternatives to Using Transformers (or SentencTransformers) Package 1. _Managed SaaS_: Get started with a free key on Jina AI's [Embedding API](https://jina.ai/embeddings/). 2. _Private and high-performance deployment_: Get started by picking from our suite of models and deploy them on [AWS Sagemaker](https://aws.amazon.com/marketplace/seller-profile?id=seller-stch2ludm6vgy). ## Use Jina Embeddings for RAG According to the latest blog post from [LLamaIndex](https://blog.llamaindex.ai/boosting-rag-picking-the-best-embedding-reranker-models-42d079022e83), > In summary, to achieve the peak performance in both hit rate and MRR, the combination of OpenAI or JinaAI-Base embeddings with the CohereRerank/bge-reranker-large reranker stands out. <img src="https://miro.medium.com/v2/resize:fit:4800/format:webp/1*ZP2RVejCZovF3FDCg-Bx3A.png" width="780px"> ## Plans 1. Bilingual embedding models supporting more European & Asian languages, including Spanish, French, Italian and Japanese. 2. Multimodal embedding models enable Multimodal RAG applications. 3. High-performt rerankers. ## Trouble Shooting **Loading of Model Code failed** If you forgot to pass the `trust_remote_code=True` flag when calling `AutoModel.from_pretrained` or initializing the model via the `SentenceTransformer` class, you will receive an error that the model weights could not be initialized. This is caused by tranformers falling back to creating a default BERT model, instead of a jina-embedding model: ```bash Some weights of the model checkpoint at jinaai/jina-embeddings-v2-base-en were not used when initializing BertModel: ['encoder.layer.2.mlp.layernorm.weight', 'encoder.layer.3.mlp.layernorm.weight', 'encoder.layer.10.mlp.wo.bias', 'encoder.layer.5.mlp.wo.bias', 'encoder.layer.2.mlp.layernorm.bias', 'encoder.layer.1.mlp.gated_layers.weight', 'encoder.layer.5.mlp.gated_layers.weight', 'encoder.layer.8.mlp.layernorm.bias', ... ``` **User is not logged into Huggingface** The model is only availabe under [gated access](https://huggingface.co/docs/hub/models-gated). This means you need to be logged into huggingface load load it. If you receive the following error, you need to provide an access token, either by using the huggingface-cli or providing the token via an environment variable as described above: ```bash OSError: jinaai/jina-embeddings-v2-base-en is not a local folder and is not a valid model identifier listed on 'https://huggingface.co/models' If this is a private repository, make sure to pass a token having permission to this repo with `use_auth_token` or log in with `huggingface-cli login` and pass `use_auth_token=True`. ``` ## Contact Join our [Discord community](https://discord.jina.ai) and chat with other community members about ideas. ## Citation If you find Jina Embeddings useful in your research, please cite the following paper: ``` @misc{günther2023jina, title={Jina Embeddings 2: 8192-Token General-Purpose Text Embeddings for Long Documents}, author={Michael Günther and Jackmin Ong and Isabelle Mohr and Alaeddine Abdessalem and Tanguy Abel and Mohammad Kalim Akram and Susana Guzman and Georgios Mastrapas and Saba Sturua and Bo Wang and Maximilian Werk and Nan Wang and Han Xiao}, year={2023}, eprint={2310.19923}, archivePrefix={arXiv}, primaryClass={cs.CL} } ```
[ "SUMMARIZATION" ]
[ "BIOSSES", "SCIFACT" ]
koble-ai/jina-embeddings-v3
koble-ai
feature-extraction
[ "transformers", "pytorch", "onnx", "safetensors", "xlm-roberta", "feature-extraction", "sentence-similarity", "mteb", "sentence-transformers", "custom_code", "multilingual", "af", "am", "ar", "as", "az", "be", "bg", "bn", "br", "bs", "ca", "cs", "cy", "da", "de", "el", "en", "eo", "es", "et", "eu", "fa", "fi", "fr", "fy", "ga", "gd", "gl", "gu", "ha", "he", "hi", "hr", "hu", "hy", "id", "is", "it", "ja", "jv", "ka", "kk", "km", "kn", "ko", "ku", "ky", "la", "lo", "lt", "lv", "mg", "mk", "ml", "mn", "mr", "ms", "my", "ne", "nl", "no", "om", "or", "pa", "pl", "ps", "pt", "ro", "ru", "sa", "sd", "si", "sk", "sl", "so", "sq", "sr", "su", "sv", "sw", "ta", "te", "th", "tl", "tr", "ug", "uk", "ur", "uz", "vi", "xh", "yi", "zh", "arxiv:2409.10173", "license:cc-by-nc-4.0", "model-index", "text-embeddings-inference", "region:us" ]
2025-01-17T12:40:58
2025-01-17T13:20:16
172
3
--- language: - multilingual - af - am - ar - as - az - be - bg - bn - br - bs - ca - cs - cy - da - de - el - en - eo - es - et - eu - fa - fi - fr - fy - ga - gd - gl - gu - ha - he - hi - hr - hu - hy - id - is - it - ja - jv - ka - kk - km - kn - ko - ku - ky - la - lo - lt - lv - mg - mk - ml - mn - mr - ms - my - ne - nl - false - om - or - pa - pl - ps - pt - ro - ru - sa - sd - si - sk - sl - so - sq - sr - su - sv - sw - ta - te - th - tl - tr - ug - uk - ur - uz - vi - xh - yi - zh library_name: transformers license: cc-by-nc-4.0 tags: - feature-extraction - sentence-similarity - mteb - sentence-transformers inference: false model-index: - name: jina-embeddings-v3 results: - task: type: STS dataset: name: MTEB AFQMC (default) type: C-MTEB/AFQMC config: default split: validation revision: b44c3b011063adb25877c13823db83bb193913c4 metrics: - type: cosine_pearson value: 41.74237700998808 - type: cosine_spearman value: 43.4726782647566 - type: euclidean_pearson value: 42.244585459479964 - type: euclidean_spearman value: 43.525070045169606 - type: main_score value: 43.4726782647566 - type: manhattan_pearson value: 42.04616728224863 - type: manhattan_spearman value: 43.308828270754645 - type: pearson value: 41.74237700998808 - type: spearman value: 43.4726782647566 - task: type: Retrieval dataset: name: MTEB ArguAna-PL (default) type: clarin-knext/arguana-pl config: default split: test revision: 63fc86750af76253e8c760fc9e534bbf24d260a2 metrics: - type: main_score value: 50.117999999999995 - type: map_at_1 value: 24.253 - type: map_at_10 value: 40.725 - type: map_at_100 value: 41.699999999999996 - type: map_at_1000 value: 41.707 - type: map_at_20 value: 41.467999999999996 - type: map_at_3 value: 35.467 - type: map_at_5 value: 38.291 - type: mrr_at_1 value: 24.751066856330013 - type: mrr_at_10 value: 40.91063808169072 - type: mrr_at_100 value: 41.885497923928675 - type: mrr_at_1000 value: 41.89301098419842 - type: mrr_at_20 value: 41.653552355442514 - type: mrr_at_3 value: 35.656709340919775 - type: mrr_at_5 value: 38.466097676623946 - type: nauc_map_at_1000_diff1 value: 7.503000359807567 - type: nauc_map_at_1000_max value: -11.030405164830546 - type: nauc_map_at_1000_std value: -8.902792782585117 - type: nauc_map_at_100_diff1 value: 7.509899249593199 - type: nauc_map_at_100_max value: -11.023581259404406 - type: nauc_map_at_100_std value: -8.892241185067272 - type: nauc_map_at_10_diff1 value: 7.24369711881512 - type: nauc_map_at_10_max value: -10.810000200433278 - type: nauc_map_at_10_std value: -8.987230542165776 - type: nauc_map_at_1_diff1 value: 11.37175831832417 - type: nauc_map_at_1_max value: -13.315221903223055 - type: nauc_map_at_1_std value: -9.398199605510275 - type: nauc_map_at_20_diff1 value: 7.477364530860648 - type: nauc_map_at_20_max value: -10.901251218105566 - type: nauc_map_at_20_std value: -8.868148116405925 - type: nauc_map_at_3_diff1 value: 6.555548802174882 - type: nauc_map_at_3_max value: -12.247274800542934 - type: nauc_map_at_3_std value: -9.879475250984811 - type: nauc_map_at_5_diff1 value: 7.426588563355882 - type: nauc_map_at_5_max value: -11.347695686001805 - type: nauc_map_at_5_std value: -9.34441892203972 - type: nauc_mrr_at_1000_diff1 value: 5.99737552143614 - type: nauc_mrr_at_1000_max value: -11.327205136505727 - type: nauc_mrr_at_1000_std value: -8.791079115519503 - type: nauc_mrr_at_100_diff1 value: 6.004622525255784 - type: nauc_mrr_at_100_max value: -11.320336759899723 - type: nauc_mrr_at_100_std value: -8.780602249831777 - type: nauc_mrr_at_10_diff1 value: 5.783623516930227 - type: nauc_mrr_at_10_max value: -11.095971693467078 - type: nauc_mrr_at_10_std value: -8.877242032013582 - type: nauc_mrr_at_1_diff1 value: 9.694937537703797 - type: nauc_mrr_at_1_max value: -12.531905083727912 - type: nauc_mrr_at_1_std value: -8.903992940100146 - type: nauc_mrr_at_20_diff1 value: 5.984841206233873 - type: nauc_mrr_at_20_max value: -11.195236951048969 - type: nauc_mrr_at_20_std value: -8.757266039186018 - type: nauc_mrr_at_3_diff1 value: 5.114333824261379 - type: nauc_mrr_at_3_max value: -12.64809799843464 - type: nauc_mrr_at_3_std value: -9.791146138025184 - type: nauc_mrr_at_5_diff1 value: 5.88941606224512 - type: nauc_mrr_at_5_max value: -11.763903418071918 - type: nauc_mrr_at_5_std value: -9.279175712709446 - type: nauc_ndcg_at_1000_diff1 value: 7.076950652226086 - type: nauc_ndcg_at_1000_max value: -10.386482092087371 - type: nauc_ndcg_at_1000_std value: -8.309190917074046 - type: nauc_ndcg_at_100_diff1 value: 7.2329220284865245 - type: nauc_ndcg_at_100_max value: -10.208048403220337 - type: nauc_ndcg_at_100_std value: -7.997975874274613 - type: nauc_ndcg_at_10_diff1 value: 6.065391100006953 - type: nauc_ndcg_at_10_max value: -9.046164377601153 - type: nauc_ndcg_at_10_std value: -8.34724889697153 - type: nauc_ndcg_at_1_diff1 value: 11.37175831832417 - type: nauc_ndcg_at_1_max value: -13.315221903223055 - type: nauc_ndcg_at_1_std value: -9.398199605510275 - type: nauc_ndcg_at_20_diff1 value: 6.949389989202601 - type: nauc_ndcg_at_20_max value: -9.35740451760307 - type: nauc_ndcg_at_20_std value: -7.761295171828212 - type: nauc_ndcg_at_3_diff1 value: 5.051471796151364 - type: nauc_ndcg_at_3_max value: -12.158763333711653 - type: nauc_ndcg_at_3_std value: -10.078902544421926 - type: nauc_ndcg_at_5_diff1 value: 6.527454512611454 - type: nauc_ndcg_at_5_max value: -10.525118233848586 - type: nauc_ndcg_at_5_std value: -9.120055125584031 - type: nauc_precision_at_1000_diff1 value: -10.6495668199151 - type: nauc_precision_at_1000_max value: 12.070656425217841 - type: nauc_precision_at_1000_std value: 55.844551709649004 - type: nauc_precision_at_100_diff1 value: 19.206967129266285 - type: nauc_precision_at_100_max value: 16.296851020813456 - type: nauc_precision_at_100_std value: 45.60378984257811 - type: nauc_precision_at_10_diff1 value: 0.6490335354304879 - type: nauc_precision_at_10_max value: 0.5757198255366447 - type: nauc_precision_at_10_std value: -4.875847131691451 - type: nauc_precision_at_1_diff1 value: 11.37175831832417 - type: nauc_precision_at_1_max value: -13.315221903223055 - type: nauc_precision_at_1_std value: -9.398199605510275 - type: nauc_precision_at_20_diff1 value: 4.899369866929203 - type: nauc_precision_at_20_max value: 5.988537297189552 - type: nauc_precision_at_20_std value: 4.830900387582837 - type: nauc_precision_at_3_diff1 value: 0.8791156910997744 - type: nauc_precision_at_3_max value: -11.983373635905993 - type: nauc_precision_at_3_std value: -10.646185111581257 - type: nauc_precision_at_5_diff1 value: 3.9314486166548432 - type: nauc_precision_at_5_max value: -7.798591396895839 - type: nauc_precision_at_5_std value: -8.293043407234125 - type: nauc_recall_at_1000_diff1 value: -10.649566819918673 - type: nauc_recall_at_1000_max value: 12.070656425214647 - type: nauc_recall_at_1000_std value: 55.84455170965023 - type: nauc_recall_at_100_diff1 value: 19.206967129265127 - type: nauc_recall_at_100_max value: 16.296851020813722 - type: nauc_recall_at_100_std value: 45.60378984257728 - type: nauc_recall_at_10_diff1 value: 0.6490335354304176 - type: nauc_recall_at_10_max value: 0.5757198255366095 - type: nauc_recall_at_10_std value: -4.875847131691468 - type: nauc_recall_at_1_diff1 value: 11.37175831832417 - type: nauc_recall_at_1_max value: -13.315221903223055 - type: nauc_recall_at_1_std value: -9.398199605510275 - type: nauc_recall_at_20_diff1 value: 4.899369866929402 - type: nauc_recall_at_20_max value: 5.98853729718968 - type: nauc_recall_at_20_std value: 4.830900387582967 - type: nauc_recall_at_3_diff1 value: 0.8791156910997652 - type: nauc_recall_at_3_max value: -11.983373635905997 - type: nauc_recall_at_3_std value: -10.64618511158124 - type: nauc_recall_at_5_diff1 value: 3.9314486166548472 - type: nauc_recall_at_5_max value: -7.7985913968958585 - type: nauc_recall_at_5_std value: -8.293043407234132 - type: ndcg_at_1 value: 24.253 - type: ndcg_at_10 value: 50.117999999999995 - type: ndcg_at_100 value: 54.291999999999994 - type: ndcg_at_1000 value: 54.44799999999999 - type: ndcg_at_20 value: 52.771 - type: ndcg_at_3 value: 39.296 - type: ndcg_at_5 value: 44.373000000000005 - type: precision_at_1 value: 24.253 - type: precision_at_10 value: 8.016 - type: precision_at_100 value: 0.984 - type: precision_at_1000 value: 0.1 - type: precision_at_20 value: 4.527 - type: precision_at_3 value: 16.808999999999997 - type: precision_at_5 value: 12.546 - type: recall_at_1 value: 24.253 - type: recall_at_10 value: 80.156 - type: recall_at_100 value: 98.43499999999999 - type: recall_at_1000 value: 99.57300000000001 - type: recall_at_20 value: 90.54100000000001 - type: recall_at_3 value: 50.427 - type: recall_at_5 value: 62.731 - task: type: Retrieval dataset: name: MTEB DBPedia-PL (default) type: clarin-knext/dbpedia-pl config: default split: test revision: 76afe41d9af165cc40999fcaa92312b8b012064a metrics: - type: main_score value: 34.827000000000005 - type: map_at_1 value: 7.049999999999999 - type: map_at_10 value: 14.982999999999999 - type: map_at_100 value: 20.816000000000003 - type: map_at_1000 value: 22.33 - type: map_at_20 value: 17.272000000000002 - type: map_at_3 value: 10.661 - type: map_at_5 value: 12.498 - type: mrr_at_1 value: 57.25 - type: mrr_at_10 value: 65.81934523809524 - type: mrr_at_100 value: 66.2564203928212 - type: mrr_at_1000 value: 66.27993662923856 - type: mrr_at_20 value: 66.0732139130649 - type: mrr_at_3 value: 64.08333333333333 - type: mrr_at_5 value: 65.27083333333333 - type: nauc_map_at_1000_diff1 value: 16.41780871174038 - type: nauc_map_at_1000_max value: 30.193946325654654 - type: nauc_map_at_1000_std value: 31.46095497039037 - type: nauc_map_at_100_diff1 value: 18.57903165498531 - type: nauc_map_at_100_max value: 29.541476938623262 - type: nauc_map_at_100_std value: 28.228604103301052 - type: nauc_map_at_10_diff1 value: 24.109434489748946 - type: nauc_map_at_10_max value: 21.475954208048968 - type: nauc_map_at_10_std value: 9.964464537806988 - type: nauc_map_at_1_diff1 value: 38.67437644802124 - type: nauc_map_at_1_max value: 14.52136658726491 - type: nauc_map_at_1_std value: -2.8981666782088755 - type: nauc_map_at_20_diff1 value: 21.42547228801935 - type: nauc_map_at_20_max value: 25.04510402960458 - type: nauc_map_at_20_std value: 16.533079346431155 - type: nauc_map_at_3_diff1 value: 26.63648858245477 - type: nauc_map_at_3_max value: 13.632235789780415 - type: nauc_map_at_3_std value: -0.40129174577700716 - type: nauc_map_at_5_diff1 value: 24.513861031197933 - type: nauc_map_at_5_max value: 16.599888813946688 - type: nauc_map_at_5_std value: 3.4448514739556346 - type: nauc_mrr_at_1000_diff1 value: 36.57353464537154 - type: nauc_mrr_at_1000_max value: 55.34763483979515 - type: nauc_mrr_at_1000_std value: 40.3722796438533 - type: nauc_mrr_at_100_diff1 value: 36.555989566513134 - type: nauc_mrr_at_100_max value: 55.347805216808396 - type: nauc_mrr_at_100_std value: 40.38465945075711 - type: nauc_mrr_at_10_diff1 value: 36.771572999261984 - type: nauc_mrr_at_10_max value: 55.41239897909165 - type: nauc_mrr_at_10_std value: 40.52058934624793 - type: nauc_mrr_at_1_diff1 value: 38.2472828531032 - type: nauc_mrr_at_1_max value: 51.528473828685705 - type: nauc_mrr_at_1_std value: 33.03676467942882 - type: nauc_mrr_at_20_diff1 value: 36.642602571889036 - type: nauc_mrr_at_20_max value: 55.3763342076553 - type: nauc_mrr_at_20_std value: 40.41520090500838 - type: nauc_mrr_at_3_diff1 value: 36.79451847426628 - type: nauc_mrr_at_3_max value: 54.59778581826193 - type: nauc_mrr_at_3_std value: 39.48392075873095 - type: nauc_mrr_at_5_diff1 value: 36.92150807529304 - type: nauc_mrr_at_5_max value: 55.03553978718272 - type: nauc_mrr_at_5_std value: 40.20147745489917 - type: nauc_ndcg_at_1000_diff1 value: 21.843092744321268 - type: nauc_ndcg_at_1000_max value: 44.93275990394279 - type: nauc_ndcg_at_1000_std value: 47.09186225236347 - type: nauc_ndcg_at_100_diff1 value: 25.180282568979095 - type: nauc_ndcg_at_100_max value: 41.737709709508394 - type: nauc_ndcg_at_100_std value: 38.80950644139446 - type: nauc_ndcg_at_10_diff1 value: 24.108368037214046 - type: nauc_ndcg_at_10_max value: 41.29298370689967 - type: nauc_ndcg_at_10_std value: 35.06450769738732 - type: nauc_ndcg_at_1_diff1 value: 35.51010679525079 - type: nauc_ndcg_at_1_max value: 42.40790024212412 - type: nauc_ndcg_at_1_std value: 26.696412036243157 - type: nauc_ndcg_at_20_diff1 value: 23.909989673256195 - type: nauc_ndcg_at_20_max value: 39.78444647091927 - type: nauc_ndcg_at_20_std value: 33.39544470364529 - type: nauc_ndcg_at_3_diff1 value: 22.50484297956035 - type: nauc_ndcg_at_3_max value: 39.14551926034168 - type: nauc_ndcg_at_3_std value: 30.330135925392014 - type: nauc_ndcg_at_5_diff1 value: 21.7798872028265 - type: nauc_ndcg_at_5_max value: 40.23856975248015 - type: nauc_ndcg_at_5_std value: 32.438381067440396 - type: nauc_precision_at_1000_diff1 value: -21.62692442272279 - type: nauc_precision_at_1000_max value: 0.9689046974430882 - type: nauc_precision_at_1000_std value: 18.54001058230465 - type: nauc_precision_at_100_diff1 value: -10.132258779856192 - type: nauc_precision_at_100_max value: 23.74516110444681 - type: nauc_precision_at_100_std value: 47.03416663319965 - type: nauc_precision_at_10_diff1 value: 1.543656509571949 - type: nauc_precision_at_10_max value: 36.98864812757555 - type: nauc_precision_at_10_std value: 46.56427199077426 - type: nauc_precision_at_1_diff1 value: 38.2472828531032 - type: nauc_precision_at_1_max value: 51.528473828685705 - type: nauc_precision_at_1_std value: 33.03676467942882 - type: nauc_precision_at_20_diff1 value: -4.612864872734335 - type: nauc_precision_at_20_max value: 34.03565449182125 - type: nauc_precision_at_20_std value: 48.880727648349534 - type: nauc_precision_at_3_diff1 value: 6.360850444467829 - type: nauc_precision_at_3_max value: 36.25816942368427 - type: nauc_precision_at_3_std value: 34.48882647419187 - type: nauc_precision_at_5_diff1 value: 2.6445596936740037 - type: nauc_precision_at_5_max value: 37.174463388899056 - type: nauc_precision_at_5_std value: 40.25254370626113 - type: nauc_recall_at_1000_diff1 value: 13.041227176748077 - type: nauc_recall_at_1000_max value: 39.722336427072094 - type: nauc_recall_at_1000_std value: 52.04032890059214 - type: nauc_recall_at_100_diff1 value: 18.286096899139153 - type: nauc_recall_at_100_max value: 34.072389201930314 - type: nauc_recall_at_100_std value: 37.73637623416653 - type: nauc_recall_at_10_diff1 value: 22.35560419280504 - type: nauc_recall_at_10_max value: 19.727247199595197 - type: nauc_recall_at_10_std value: 8.58498575109203 - type: nauc_recall_at_1_diff1 value: 38.67437644802124 - type: nauc_recall_at_1_max value: 14.52136658726491 - type: nauc_recall_at_1_std value: -2.8981666782088755 - type: nauc_recall_at_20_diff1 value: 19.026320886902916 - type: nauc_recall_at_20_max value: 22.753562309469867 - type: nauc_recall_at_20_std value: 14.89994263882445 - type: nauc_recall_at_3_diff1 value: 23.428129702129684 - type: nauc_recall_at_3_max value: 10.549153954790542 - type: nauc_recall_at_3_std value: -1.7590608997055206 - type: nauc_recall_at_5_diff1 value: 21.27448645803921 - type: nauc_recall_at_5_max value: 13.620279707461677 - type: nauc_recall_at_5_std value: 2.0577962208292675 - type: ndcg_at_1 value: 46.75 - type: ndcg_at_10 value: 34.827000000000005 - type: ndcg_at_100 value: 38.157999999999994 - type: ndcg_at_1000 value: 44.816 - type: ndcg_at_20 value: 34.152 - type: ndcg_at_3 value: 39.009 - type: ndcg_at_5 value: 36.826 - type: precision_at_1 value: 57.25 - type: precision_at_10 value: 27.575 - type: precision_at_100 value: 8.84 - type: precision_at_1000 value: 1.949 - type: precision_at_20 value: 20.724999999999998 - type: precision_at_3 value: 41.167 - type: precision_at_5 value: 35.199999999999996 - type: recall_at_1 value: 7.049999999999999 - type: recall_at_10 value: 19.817999999999998 - type: recall_at_100 value: 42.559999999999995 - type: recall_at_1000 value: 63.744 - type: recall_at_20 value: 25.968000000000004 - type: recall_at_3 value: 11.959 - type: recall_at_5 value: 14.939 - task: type: Retrieval dataset: name: MTEB FiQA-PL (default) type: clarin-knext/fiqa-pl config: default split: test revision: 2e535829717f8bf9dc829b7f911cc5bbd4e6608e metrics: - type: main_score value: 38.828 - type: map_at_1 value: 19.126 - type: map_at_10 value: 31.002000000000002 - type: map_at_100 value: 32.736 - type: map_at_1000 value: 32.933 - type: map_at_20 value: 31.894 - type: map_at_3 value: 26.583000000000002 - type: map_at_5 value: 28.904000000000003 - type: mrr_at_1 value: 37.808641975308646 - type: mrr_at_10 value: 46.36745541838134 - type: mrr_at_100 value: 47.14140915794908 - type: mrr_at_1000 value: 47.190701435388846 - type: mrr_at_20 value: 46.81387776440309 - type: mrr_at_3 value: 43.750000000000014 - type: mrr_at_5 value: 45.23919753086418 - type: nauc_map_at_1000_diff1 value: 38.5532285881503 - type: nauc_map_at_1000_max value: 34.44383884813453 - type: nauc_map_at_1000_std value: -1.3963497949476722 - type: nauc_map_at_100_diff1 value: 38.49292464176943 - type: nauc_map_at_100_max value: 34.33752755618645 - type: nauc_map_at_100_std value: -1.4794032905848582 - type: nauc_map_at_10_diff1 value: 38.26061536370962 - type: nauc_map_at_10_max value: 33.16977912721411 - type: nauc_map_at_10_std value: -2.3853370604730393 - type: nauc_map_at_1_diff1 value: 46.288767289528344 - type: nauc_map_at_1_max value: 25.67706785013364 - type: nauc_map_at_1_std value: -6.989769609924645 - type: nauc_map_at_20_diff1 value: 38.507270129330685 - type: nauc_map_at_20_max value: 33.70963328055982 - type: nauc_map_at_20_std value: -1.9835510011554272 - type: nauc_map_at_3_diff1 value: 39.81061518646884 - type: nauc_map_at_3_max value: 30.101186374147748 - type: nauc_map_at_3_std value: -4.027120247237715 - type: nauc_map_at_5_diff1 value: 38.55602589746512 - type: nauc_map_at_5_max value: 31.515174267015983 - type: nauc_map_at_5_std value: -3.4064239358570303 - type: nauc_mrr_at_1000_diff1 value: 45.030514454725726 - type: nauc_mrr_at_1000_max value: 43.878919881666164 - type: nauc_mrr_at_1000_std value: 2.517594250297626 - type: nauc_mrr_at_100_diff1 value: 45.00868212878687 - type: nauc_mrr_at_100_max value: 43.87437011120001 - type: nauc_mrr_at_100_std value: 2.5257874265014966 - type: nauc_mrr_at_10_diff1 value: 44.855044606754056 - type: nauc_mrr_at_10_max value: 43.946617058785186 - type: nauc_mrr_at_10_std value: 2.5173751662794044 - type: nauc_mrr_at_1_diff1 value: 49.441510997817346 - type: nauc_mrr_at_1_max value: 43.08547383044357 - type: nauc_mrr_at_1_std value: -1.8747770703324347 - type: nauc_mrr_at_20_diff1 value: 45.019880416584215 - type: nauc_mrr_at_20_max value: 43.85691473662242 - type: nauc_mrr_at_20_std value: 2.4625487605091303 - type: nauc_mrr_at_3_diff1 value: 45.322041658604036 - type: nauc_mrr_at_3_max value: 43.95079293074395 - type: nauc_mrr_at_3_std value: 2.4644274393435737 - type: nauc_mrr_at_5_diff1 value: 44.99461837803437 - type: nauc_mrr_at_5_max value: 43.97934275090601 - type: nauc_mrr_at_5_std value: 2.5353091695125096 - type: nauc_ndcg_at_1000_diff1 value: 39.38449023275524 - type: nauc_ndcg_at_1000_max value: 39.48382767312788 - type: nauc_ndcg_at_1000_std value: 3.414789408343409 - type: nauc_ndcg_at_100_diff1 value: 38.29675861135578 - type: nauc_ndcg_at_100_max value: 38.2674786507297 - type: nauc_ndcg_at_100_std value: 2.7094055381218207 - type: nauc_ndcg_at_10_diff1 value: 38.09514955708717 - type: nauc_ndcg_at_10_max value: 36.664923238906525 - type: nauc_ndcg_at_10_std value: 0.6901410544967921 - type: nauc_ndcg_at_1_diff1 value: 49.441510997817346 - type: nauc_ndcg_at_1_max value: 43.08547383044357 - type: nauc_ndcg_at_1_std value: -1.8747770703324347 - type: nauc_ndcg_at_20_diff1 value: 38.44967736231759 - type: nauc_ndcg_at_20_max value: 36.871179313622584 - type: nauc_ndcg_at_20_std value: 1.157560360065234 - type: nauc_ndcg_at_3_diff1 value: 39.02419271805571 - type: nauc_ndcg_at_3_max value: 37.447669442586324 - type: nauc_ndcg_at_3_std value: 0.41502589779297794 - type: nauc_ndcg_at_5_diff1 value: 38.10233452742001 - type: nauc_ndcg_at_5_max value: 35.816381905465676 - type: nauc_ndcg_at_5_std value: -0.3704499913387088 - type: nauc_precision_at_1000_diff1 value: 2.451267097838658 - type: nauc_precision_at_1000_max value: 29.116394969085306 - type: nauc_precision_at_1000_std value: 14.85900786538363 - type: nauc_precision_at_100_diff1 value: 8.10919082251277 - type: nauc_precision_at_100_max value: 36.28388256191417 - type: nauc_precision_at_100_std value: 14.830039904317657 - type: nauc_precision_at_10_diff1 value: 15.02446609920477 - type: nauc_precision_at_10_max value: 41.008463775454054 - type: nauc_precision_at_10_std value: 10.431403152334486 - type: nauc_precision_at_1_diff1 value: 49.441510997817346 - type: nauc_precision_at_1_max value: 43.08547383044357 - type: nauc_precision_at_1_std value: -1.8747770703324347 - type: nauc_precision_at_20_diff1 value: 14.222022201169926 - type: nauc_precision_at_20_max value: 40.10189643835305 - type: nauc_precision_at_20_std value: 12.204443815975527 - type: nauc_precision_at_3_diff1 value: 25.41905395341234 - type: nauc_precision_at_3_max value: 41.56133905339819 - type: nauc_precision_at_3_std value: 5.575516915590082 - type: nauc_precision_at_5_diff1 value: 20.20081221089351 - type: nauc_precision_at_5_max value: 40.95218555916681 - type: nauc_precision_at_5_std value: 7.2040745500708745 - type: nauc_recall_at_1000_diff1 value: 28.021198234033395 - type: nauc_recall_at_1000_max value: 36.165148684597504 - type: nauc_recall_at_1000_std value: 28.28852356008973 - type: nauc_recall_at_100_diff1 value: 21.882447802741897 - type: nauc_recall_at_100_max value: 26.979684607567222 - type: nauc_recall_at_100_std value: 9.783658817010082 - type: nauc_recall_at_10_diff1 value: 28.493097951178818 - type: nauc_recall_at_10_max value: 29.40937476550134 - type: nauc_recall_at_10_std value: 2.7593763576979353 - type: nauc_recall_at_1_diff1 value: 46.288767289528344 - type: nauc_recall_at_1_max value: 25.67706785013364 - type: nauc_recall_at_1_std value: -6.989769609924645 - type: nauc_recall_at_20_diff1 value: 27.638381299425234 - type: nauc_recall_at_20_max value: 27.942035836106328 - type: nauc_recall_at_20_std value: 3.489835161380808 - type: nauc_recall_at_3_diff1 value: 33.90054781392646 - type: nauc_recall_at_3_max value: 27.778812533030322 - type: nauc_recall_at_3_std value: -0.03054068020022706 - type: nauc_recall_at_5_diff1 value: 30.279060732221346 - type: nauc_recall_at_5_max value: 27.49854749597931 - type: nauc_recall_at_5_std value: 0.5434664581939099 - type: ndcg_at_1 value: 37.809 - type: ndcg_at_10 value: 38.828 - type: ndcg_at_100 value: 45.218 - type: ndcg_at_1000 value: 48.510999999999996 - type: ndcg_at_20 value: 41.11 - type: ndcg_at_3 value: 34.466 - type: ndcg_at_5 value: 35.843 - type: precision_at_1 value: 37.809 - type: precision_at_10 value: 11.157 - type: precision_at_100 value: 1.762 - type: precision_at_1000 value: 0.233 - type: precision_at_20 value: 6.497 - type: precision_at_3 value: 23.044999999999998 - type: precision_at_5 value: 17.284 - type: recall_at_1 value: 19.126 - type: recall_at_10 value: 46.062 - type: recall_at_100 value: 70.22800000000001 - type: recall_at_1000 value: 89.803 - type: recall_at_20 value: 53.217999999999996 - type: recall_at_3 value: 30.847 - type: recall_at_5 value: 37.11 - task: type: Retrieval dataset: name: MTEB HotpotQA-PL (default) type: clarin-knext/hotpotqa-pl config: default split: test revision: a0bd479ac97b4ccb5bd6ce320c415d0bb4beb907 metrics: - type: main_score value: 60.27 - type: map_at_1 value: 35.199000000000005 - type: map_at_10 value: 51.369 - type: map_at_100 value: 52.212 - type: map_at_1000 value: 52.28 - type: map_at_20 value: 51.864 - type: map_at_3 value: 48.446 - type: map_at_5 value: 50.302 - type: mrr_at_1 value: 70.39837947332883 - type: mrr_at_10 value: 76.8346141067273 - type: mrr_at_100 value: 77.10724392048137 - type: mrr_at_1000 value: 77.12037412892865 - type: mrr_at_20 value: 77.01061532947222 - type: mrr_at_3 value: 75.5908170155299 - type: mrr_at_5 value: 76.39095205941899 - type: nauc_map_at_1000_diff1 value: 24.701387884989117 - type: nauc_map_at_1000_max value: 23.25553235642178 - type: nauc_map_at_1000_std value: 7.1803506915661774 - type: nauc_map_at_100_diff1 value: 24.674498622483103 - type: nauc_map_at_100_max value: 23.234948525052175 - type: nauc_map_at_100_std value: 7.168677997105447 - type: nauc_map_at_10_diff1 value: 24.676025039755626 - type: nauc_map_at_10_max value: 23.171971872726964 - type: nauc_map_at_10_std value: 6.485610909852058 - type: nauc_map_at_1_diff1 value: 68.90178464319715 - type: nauc_map_at_1_max value: 46.05537868917558 - type: nauc_map_at_1_std value: 1.7658552480698708 - type: nauc_map_at_20_diff1 value: 24.69297151842494 - type: nauc_map_at_20_max value: 23.213064691673637 - type: nauc_map_at_20_std value: 6.9357946556849 - type: nauc_map_at_3_diff1 value: 26.279128947950507 - type: nauc_map_at_3_max value: 23.929537354117922 - type: nauc_map_at_3_std value: 4.625061565714759 - type: nauc_map_at_5_diff1 value: 25.04448959482816 - type: nauc_map_at_5_max value: 23.432012857899338 - type: nauc_map_at_5_std value: 5.845744681998008 - type: nauc_mrr_at_1000_diff1 value: 66.7503918108276 - type: nauc_mrr_at_1000_max value: 48.42897342336844 - type: nauc_mrr_at_1000_std value: 5.3097517971144415 - type: nauc_mrr_at_100_diff1 value: 66.74645215862695 - type: nauc_mrr_at_100_max value: 48.4368663009989 - type: nauc_mrr_at_100_std value: 5.322297898555188 - type: nauc_mrr_at_10_diff1 value: 66.69310166180729 - type: nauc_mrr_at_10_max value: 48.475437698330225 - type: nauc_mrr_at_10_std value: 5.258183461631702 - type: nauc_mrr_at_1_diff1 value: 68.90178464319715 - type: nauc_mrr_at_1_max value: 46.05537868917558 - type: nauc_mrr_at_1_std value: 1.7658552480698708 - type: nauc_mrr_at_20_diff1 value: 66.72000262431975 - type: nauc_mrr_at_20_max value: 48.45593642981319 - type: nauc_mrr_at_20_std value: 5.353665929072101 - type: nauc_mrr_at_3_diff1 value: 66.84936676396276 - type: nauc_mrr_at_3_max value: 48.466611276778295 - type: nauc_mrr_at_3_std value: 4.485810398557475 - type: nauc_mrr_at_5_diff1 value: 66.62362565394174 - type: nauc_mrr_at_5_max value: 48.456431835482014 - type: nauc_mrr_at_5_std value: 5.08482458391903 - type: nauc_ndcg_at_1000_diff1 value: 29.984825173719443 - type: nauc_ndcg_at_1000_max value: 27.289179238639893 - type: nauc_ndcg_at_1000_std value: 10.661480455527526 - type: nauc_ndcg_at_100_diff1 value: 29.322074257047877 - type: nauc_ndcg_at_100_max value: 26.850650276220605 - type: nauc_ndcg_at_100_std value: 10.599247982501902 - type: nauc_ndcg_at_10_diff1 value: 29.659909113886094 - type: nauc_ndcg_at_10_max value: 26.836139599331005 - type: nauc_ndcg_at_10_std value: 8.12844399452719 - type: nauc_ndcg_at_1_diff1 value: 68.90178464319715 - type: nauc_ndcg_at_1_max value: 46.05537868917558 - type: nauc_ndcg_at_1_std value: 1.7658552480698708 - type: nauc_ndcg_at_20_diff1 value: 29.510802214854294 - type: nauc_ndcg_at_20_max value: 26.775562637730722 - type: nauc_ndcg_at_20_std value: 9.341342661702363 - type: nauc_ndcg_at_3_diff1 value: 32.741885846292966 - type: nauc_ndcg_at_3_max value: 28.44225108761343 - type: nauc_ndcg_at_3_std value: 5.204440768465042 - type: nauc_ndcg_at_5_diff1 value: 30.57856348635919 - type: nauc_ndcg_at_5_max value: 27.475007474301698 - type: nauc_ndcg_at_5_std value: 6.961546044312487 - type: nauc_precision_at_1000_diff1 value: 0.002113156309413332 - type: nauc_precision_at_1000_max value: 11.198242419541286 - type: nauc_precision_at_1000_std value: 28.69676419166541 - type: nauc_precision_at_100_diff1 value: 3.6049575557782627 - type: nauc_precision_at_100_max value: 12.499173524574791 - type: nauc_precision_at_100_std value: 23.3755281004721 - type: nauc_precision_at_10_diff1 value: 10.922574784853193 - type: nauc_precision_at_10_max value: 16.23221529562036 - type: nauc_precision_at_10_std value: 12.45014808813857 - type: nauc_precision_at_1_diff1 value: 68.90178464319715 - type: nauc_precision_at_1_max value: 46.05537868917558 - type: nauc_precision_at_1_std value: 1.7658552480698708 - type: nauc_precision_at_20_diff1 value: 8.840710781302827 - type: nauc_precision_at_20_max value: 14.804644554205524 - type: nauc_precision_at_20_std value: 16.245009770815237 - type: nauc_precision_at_3_diff1 value: 19.447291487137573 - type: nauc_precision_at_3_max value: 21.47123471597057 - type: nauc_precision_at_3_std value: 6.441862800128802 - type: nauc_precision_at_5_diff1 value: 14.078545719721108 - type: nauc_precision_at_5_max value: 18.468288046016387 - type: nauc_precision_at_5_std value: 9.58650641691393 - type: nauc_recall_at_1000_diff1 value: 0.0021131563095336584 - type: nauc_recall_at_1000_max value: 11.198242419541558 - type: nauc_recall_at_1000_std value: 28.6967641916655 - type: nauc_recall_at_100_diff1 value: 3.6049575557781393 - type: nauc_recall_at_100_max value: 12.499173524574765 - type: nauc_recall_at_100_std value: 23.375528100472074 - type: nauc_recall_at_10_diff1 value: 10.922574784853168 - type: nauc_recall_at_10_max value: 16.2322152956203 - type: nauc_recall_at_10_std value: 12.450148088138535 - type: nauc_recall_at_1_diff1 value: 68.90178464319715 - type: nauc_recall_at_1_max value: 46.05537868917558 - type: nauc_recall_at_1_std value: 1.7658552480698708 - type: nauc_recall_at_20_diff1 value: 8.840710781302905 - type: nauc_recall_at_20_max value: 14.804644554205515 - type: nauc_recall_at_20_std value: 16.245009770815273 - type: nauc_recall_at_3_diff1 value: 19.447291487137498 - type: nauc_recall_at_3_max value: 21.47123471597054 - type: nauc_recall_at_3_std value: 6.441862800128763 - type: nauc_recall_at_5_diff1 value: 14.07854571972115 - type: nauc_recall_at_5_max value: 18.468288046016337 - type: nauc_recall_at_5_std value: 9.586506416913904 - type: ndcg_at_1 value: 70.39800000000001 - type: ndcg_at_10 value: 60.27 - type: ndcg_at_100 value: 63.400999999999996 - type: ndcg_at_1000 value: 64.847 - type: ndcg_at_20 value: 61.571 - type: ndcg_at_3 value: 55.875 - type: ndcg_at_5 value: 58.36599999999999 - type: precision_at_1 value: 70.39800000000001 - type: precision_at_10 value: 12.46 - type: precision_at_100 value: 1.493 - type: precision_at_1000 value: 0.169 - type: precision_at_20 value: 6.65 - type: precision_at_3 value: 35.062 - type: precision_at_5 value: 23.009 - type: recall_at_1 value: 35.199000000000005 - type: recall_at_10 value: 62.302 - type: recall_at_100 value: 74.666 - type: recall_at_1000 value: 84.355 - type: recall_at_20 value: 66.496 - type: recall_at_3 value: 52.593 - type: recall_at_5 value: 57.522 - task: type: Retrieval dataset: name: MTEB MSMARCO-PL (default) type: clarin-knext/msmarco-pl config: default split: test revision: 8634c07806d5cce3a6138e260e59b81760a0a640 metrics: - type: main_score value: 64.886 - type: map_at_1 value: 1.644 - type: map_at_10 value: 12.24 - type: map_at_100 value: 28.248 - type: map_at_1000 value: 33.506 - type: map_at_20 value: 17.497 - type: map_at_3 value: 4.9399999999999995 - type: map_at_5 value: 8.272 - type: mrr_at_1 value: 83.72093023255815 - type: mrr_at_10 value: 91.08527131782945 - type: mrr_at_100 value: 91.08527131782945 - type: mrr_at_1000 value: 91.08527131782945 - type: mrr_at_20 value: 91.08527131782945 - type: mrr_at_3 value: 91.08527131782945 - type: mrr_at_5 value: 91.08527131782945 - type: nauc_map_at_1000_diff1 value: -36.428271627303424 - type: nauc_map_at_1000_max value: 44.87615127218638 - type: nauc_map_at_1000_std value: 67.92696808824724 - type: nauc_map_at_100_diff1 value: -28.11674206786188 - type: nauc_map_at_100_max value: 36.422779766334955 - type: nauc_map_at_100_std value: 49.99876313755116 - type: nauc_map_at_10_diff1 value: -5.838593619806058 - type: nauc_map_at_10_max value: 11.026519190509742 - type: nauc_map_at_10_std value: 2.5268752263522045 - type: nauc_map_at_1_diff1 value: 17.897907271073016 - type: nauc_map_at_1_max value: 12.229062762540844 - type: nauc_map_at_1_std value: -4.088830895573149 - type: nauc_map_at_20_diff1 value: -13.871097716255626 - type: nauc_map_at_20_max value: 19.291271635609533 - type: nauc_map_at_20_std value: 16.745335606507826 - type: nauc_map_at_3_diff1 value: 4.425238457033843 - type: nauc_map_at_3_max value: 4.611864744680824 - type: nauc_map_at_3_std value: -8.986916608582863 - type: nauc_map_at_5_diff1 value: -6.254849256920095 - type: nauc_map_at_5_max value: 2.729437079919823 - type: nauc_map_at_5_std value: -7.235906279913092 - type: nauc_mrr_at_1000_diff1 value: 52.18669104947672 - type: nauc_mrr_at_1000_max value: 68.26259125411818 - type: nauc_mrr_at_1000_std value: 56.345086428353575 - type: nauc_mrr_at_100_diff1 value: 52.18669104947672 - type: nauc_mrr_at_100_max value: 68.26259125411818 - type: nauc_mrr_at_100_std value: 56.345086428353575 - type: nauc_mrr_at_10_diff1 value: 52.18669104947672 - type: nauc_mrr_at_10_max value: 68.26259125411818 - type: nauc_mrr_at_10_std value: 56.345086428353575 - type: nauc_mrr_at_1_diff1 value: 56.55126663944154 - type: nauc_mrr_at_1_max value: 66.37014285522565 - type: nauc_mrr_at_1_std value: 53.2508271389779 - type: nauc_mrr_at_20_diff1 value: 52.18669104947672 - type: nauc_mrr_at_20_max value: 68.26259125411818 - type: nauc_mrr_at_20_std value: 56.345086428353575 - type: nauc_mrr_at_3_diff1 value: 52.18669104947672 - type: nauc_mrr_at_3_max value: 68.26259125411818 - type: nauc_mrr_at_3_std value: 56.345086428353575 - type: nauc_mrr_at_5_diff1 value: 52.18669104947672 - type: nauc_mrr_at_5_max value: 68.26259125411818 - type: nauc_mrr_at_5_std value: 56.345086428353575 - type: nauc_ndcg_at_1000_diff1 value: -19.06422926483731 - type: nauc_ndcg_at_1000_max value: 56.30853514590265 - type: nauc_ndcg_at_1000_std value: 70.30810947505557 - type: nauc_ndcg_at_100_diff1 value: -25.72587586459692 - type: nauc_ndcg_at_100_max value: 51.433781241604194 - type: nauc_ndcg_at_100_std value: 68.37678512652792 - type: nauc_ndcg_at_10_diff1 value: -23.21198108212602 - type: nauc_ndcg_at_10_max value: 43.5450720846516 - type: nauc_ndcg_at_10_std value: 48.78307907005605 - type: nauc_ndcg_at_1_diff1 value: 44.00179301267447 - type: nauc_ndcg_at_1_max value: 48.202370455680395 - type: nauc_ndcg_at_1_std value: 25.69655992704088 - type: nauc_ndcg_at_20_diff1 value: -33.88168753446507 - type: nauc_ndcg_at_20_max value: 45.16199742613164 - type: nauc_ndcg_at_20_std value: 61.87098383164902 - type: nauc_ndcg_at_3_diff1 value: 11.19174449544048 - type: nauc_ndcg_at_3_max value: 44.34069860560555 - type: nauc_ndcg_at_3_std value: 27.451258369798115 - type: nauc_ndcg_at_5_diff1 value: -7.186520929432436 - type: nauc_ndcg_at_5_max value: 43.41869981139378 - type: nauc_ndcg_at_5_std value: 34.89898115995178 - type: nauc_precision_at_1000_diff1 value: -34.43998154563451 - type: nauc_precision_at_1000_max value: 29.172655907480372 - type: nauc_precision_at_1000_std value: 65.15824469614837 - type: nauc_precision_at_100_diff1 value: -37.82409643259692 - type: nauc_precision_at_100_max value: 38.24986991317909 - type: nauc_precision_at_100_std value: 72.74768183105327 - type: nauc_precision_at_10_diff1 value: -32.21556182780535 - type: nauc_precision_at_10_max value: 34.27170432382651 - type: nauc_precision_at_10_std value: 58.358255004394664 - type: nauc_precision_at_1_diff1 value: 56.55126663944154 - type: nauc_precision_at_1_max value: 66.37014285522565 - type: nauc_precision_at_1_std value: 53.2508271389779 - type: nauc_precision_at_20_diff1 value: -40.18751579026395 - type: nauc_precision_at_20_max value: 33.960783153758896 - type: nauc_precision_at_20_std value: 65.42918390184195 - type: nauc_precision_at_3_diff1 value: -7.073870209006578 - type: nauc_precision_at_3_max value: 50.81535269862325 - type: nauc_precision_at_3_std value: 59.248681565955685 - type: nauc_precision_at_5_diff1 value: -31.136580596983876 - type: nauc_precision_at_5_max value: 45.88147792380426 - type: nauc_precision_at_5_std value: 67.46814230928243 - type: nauc_recall_at_1000_diff1 value: -23.15699999594577 - type: nauc_recall_at_1000_max value: 39.77277799761876 - type: nauc_recall_at_1000_std value: 60.326168012901114 - type: nauc_recall_at_100_diff1 value: -21.636664823598498 - type: nauc_recall_at_100_max value: 31.104969346131583 - type: nauc_recall_at_100_std value: 38.811686891592096 - type: nauc_recall_at_10_diff1 value: -10.542765625053569 - type: nauc_recall_at_10_max value: 2.043876058107446 - type: nauc_recall_at_10_std value: -5.578449908984766 - type: nauc_recall_at_1_diff1 value: 17.897907271073016 - type: nauc_recall_at_1_max value: 12.229062762540844 - type: nauc_recall_at_1_std value: -4.088830895573149 - type: nauc_recall_at_20_diff1 value: -15.132909355710103 - type: nauc_recall_at_20_max value: 12.659765287241065 - type: nauc_recall_at_20_std value: 8.277887800815819 - type: nauc_recall_at_3_diff1 value: -3.1975017812715016 - type: nauc_recall_at_3_max value: -3.5539857085038538 - type: nauc_recall_at_3_std value: -14.712102851318118 - type: nauc_recall_at_5_diff1 value: -14.040507717380743 - type: nauc_recall_at_5_max value: -6.126912150131701 - type: nauc_recall_at_5_std value: -13.821624015640355 - type: ndcg_at_1 value: 71.318 - type: ndcg_at_10 value: 64.886 - type: ndcg_at_100 value: 53.187 - type: ndcg_at_1000 value: 59.897999999999996 - type: ndcg_at_20 value: 58.96 - type: ndcg_at_3 value: 69.736 - type: ndcg_at_5 value: 70.14099999999999 - type: precision_at_1 value: 83.721 - type: precision_at_10 value: 71.163 - type: precision_at_100 value: 29.465000000000003 - type: precision_at_1000 value: 5.665 - type: precision_at_20 value: 57.791000000000004 - type: precision_at_3 value: 82.171 - type: precision_at_5 value: 81.86 - type: recall_at_1 value: 1.644 - type: recall_at_10 value: 14.238000000000001 - type: recall_at_100 value: 39.831 - type: recall_at_1000 value: 64.057 - type: recall_at_20 value: 21.021 - type: recall_at_3 value: 5.53 - type: recall_at_5 value: 9.623 - task: type: Retrieval dataset: name: MTEB NFCorpus-PL (default) type: clarin-knext/nfcorpus-pl config: default split: test revision: 9a6f9567fda928260afed2de480d79c98bf0bec0 metrics: - type: main_score value: 31.391000000000002 - type: map_at_1 value: 4.163 - type: map_at_10 value: 10.744 - type: map_at_100 value: 14.038999999999998 - type: map_at_1000 value: 15.434999999999999 - type: map_at_20 value: 12.16 - type: map_at_3 value: 7.614999999999999 - type: map_at_5 value: 9.027000000000001 - type: mrr_at_1 value: 39.0092879256966 - type: mrr_at_10 value: 48.69809327239668 - type: mrr_at_100 value: 49.20788148442068 - type: mrr_at_1000 value: 49.25509336494706 - type: mrr_at_20 value: 48.99606551850896 - type: mrr_at_3 value: 46.284829721362236 - type: mrr_at_5 value: 47.77089783281735 - type: nauc_map_at_1000_diff1 value: 22.75421477116417 - type: nauc_map_at_1000_max value: 49.242283787799046 - type: nauc_map_at_1000_std value: 29.056888272331832 - type: nauc_map_at_100_diff1 value: 23.585977398585594 - type: nauc_map_at_100_max value: 48.25845199409498 - type: nauc_map_at_100_std value: 24.944264511223693 - type: nauc_map_at_10_diff1 value: 27.386613094780255 - type: nauc_map_at_10_max value: 41.52415346691586 - type: nauc_map_at_10_std value: 12.93872448563755 - type: nauc_map_at_1_diff1 value: 46.78688143865053 - type: nauc_map_at_1_max value: 37.20408843995871 - type: nauc_map_at_1_std value: 4.383444959401098 - type: nauc_map_at_20_diff1 value: 25.590969047740288 - type: nauc_map_at_20_max value: 44.57109307999418 - type: nauc_map_at_20_std value: 16.45855141821407 - type: nauc_map_at_3_diff1 value: 36.30017108362863 - type: nauc_map_at_3_max value: 34.66149613991648 - type: nauc_map_at_3_std value: 5.67985905078467 - type: nauc_map_at_5_diff1 value: 31.157644795417223 - type: nauc_map_at_5_max value: 37.274738661636825 - type: nauc_map_at_5_std value: 8.70088872394168 - type: nauc_mrr_at_1000_diff1 value: 25.638564218157384 - type: nauc_mrr_at_1000_max value: 57.77788270285353 - type: nauc_mrr_at_1000_std value: 43.507586592911274 - type: nauc_mrr_at_100_diff1 value: 25.662002580561584 - type: nauc_mrr_at_100_max value: 57.80578394278584 - type: nauc_mrr_at_100_std value: 43.543905743986635 - type: nauc_mrr_at_10_diff1 value: 25.426034796339835 - type: nauc_mrr_at_10_max value: 57.68443186258669 - type: nauc_mrr_at_10_std value: 43.438009108331215 - type: nauc_mrr_at_1_diff1 value: 26.073028156311075 - type: nauc_mrr_at_1_max value: 52.11817916720053 - type: nauc_mrr_at_1_std value: 37.41073893153695 - type: nauc_mrr_at_20_diff1 value: 25.548645553336147 - type: nauc_mrr_at_20_max value: 57.78552760401915 - type: nauc_mrr_at_20_std value: 43.521687428822325 - type: nauc_mrr_at_3_diff1 value: 25.72662577397805 - type: nauc_mrr_at_3_max value: 56.891263536265605 - type: nauc_mrr_at_3_std value: 41.384872305390104 - type: nauc_mrr_at_5_diff1 value: 25.552211551655386 - type: nauc_mrr_at_5_max value: 57.976813828353926 - type: nauc_mrr_at_5_std value: 43.504564461855544 - type: nauc_ndcg_at_1000_diff1 value: 23.456158044182757 - type: nauc_ndcg_at_1000_max value: 60.05411773552709 - type: nauc_ndcg_at_1000_std value: 47.857510017262584 - type: nauc_ndcg_at_100_diff1 value: 19.711635700390772 - type: nauc_ndcg_at_100_max value: 56.178746740470665 - type: nauc_ndcg_at_100_std value: 42.36829180286942 - type: nauc_ndcg_at_10_diff1 value: 18.364428967788413 - type: nauc_ndcg_at_10_max value: 54.38372506578223 - type: nauc_ndcg_at_10_std value: 41.75765411340369 - type: nauc_ndcg_at_1_diff1 value: 26.571093272640773 - type: nauc_ndcg_at_1_max value: 51.061788341958284 - type: nauc_ndcg_at_1_std value: 36.514987974075986 - type: nauc_ndcg_at_20_diff1 value: 18.345487193027697 - type: nauc_ndcg_at_20_max value: 54.62621882656994 - type: nauc_ndcg_at_20_std value: 41.42835554714241 - type: nauc_ndcg_at_3_diff1 value: 23.260105658139025 - type: nauc_ndcg_at_3_max value: 52.07747385334546 - type: nauc_ndcg_at_3_std value: 36.91985577837284 - type: nauc_ndcg_at_5_diff1 value: 20.40428109665566 - type: nauc_ndcg_at_5_max value: 53.52015347884604 - type: nauc_ndcg_at_5_std value: 39.46008849580017 - type: nauc_precision_at_1000_diff1 value: -7.3487344916380035 - type: nauc_precision_at_1000_max value: 16.58045221394852 - type: nauc_precision_at_1000_std value: 38.94030932397075 - type: nauc_precision_at_100_diff1 value: -5.257743986683922 - type: nauc_precision_at_100_max value: 34.43071687475306 - type: nauc_precision_at_100_std value: 53.499519170670474 - type: nauc_precision_at_10_diff1 value: 2.385136433119139 - type: nauc_precision_at_10_max value: 47.210743878631064 - type: nauc_precision_at_10_std value: 47.22767704186548 - type: nauc_precision_at_1_diff1 value: 26.073028156311075 - type: nauc_precision_at_1_max value: 52.11817916720053 - type: nauc_precision_at_1_std value: 37.41073893153695 - type: nauc_precision_at_20_diff1 value: -0.3531531127238474 - type: nauc_precision_at_20_max value: 44.78044604856974 - type: nauc_precision_at_20_std value: 49.532804150743615 - type: nauc_precision_at_3_diff1 value: 15.350050569991447 - type: nauc_precision_at_3_max value: 51.01572315596549 - type: nauc_precision_at_3_std value: 38.801125728413155 - type: nauc_precision_at_5_diff1 value: 9.109003666144694 - type: nauc_precision_at_5_max value: 50.935269774898494 - type: nauc_precision_at_5_std value: 43.323548180559676 - type: nauc_recall_at_1000_diff1 value: 16.64743647648886 - type: nauc_recall_at_1000_max value: 38.46012283772285 - type: nauc_recall_at_1000_std value: 36.02016164796441 - type: nauc_recall_at_100_diff1 value: 14.005834785186744 - type: nauc_recall_at_100_max value: 37.70026105513647 - type: nauc_recall_at_100_std value: 27.085222642129697 - type: nauc_recall_at_10_diff1 value: 21.204106627422632 - type: nauc_recall_at_10_max value: 36.737624881893424 - type: nauc_recall_at_10_std value: 13.755054514272702 - type: nauc_recall_at_1_diff1 value: 46.78688143865053 - type: nauc_recall_at_1_max value: 37.20408843995871 - type: nauc_recall_at_1_std value: 4.383444959401098 - type: nauc_recall_at_20_diff1 value: 19.740977611421933 - type: nauc_recall_at_20_max value: 39.21908969539783 - type: nauc_recall_at_20_std value: 16.560269670318494 - type: nauc_recall_at_3_diff1 value: 32.189359545367815 - type: nauc_recall_at_3_max value: 31.693634445562758 - type: nauc_recall_at_3_std value: 6.246326281543587 - type: nauc_recall_at_5_diff1 value: 25.51586860499901 - type: nauc_recall_at_5_max value: 33.15934725342885 - type: nauc_recall_at_5_std value: 9.677778511696705 - type: ndcg_at_1 value: 37.307 - type: ndcg_at_10 value: 31.391000000000002 - type: ndcg_at_100 value: 28.877999999999997 - type: ndcg_at_1000 value: 37.16 - type: ndcg_at_20 value: 29.314 - type: ndcg_at_3 value: 35.405 - type: ndcg_at_5 value: 33.922999999999995 - type: precision_at_1 value: 39.009 - type: precision_at_10 value: 24.52 - type: precision_at_100 value: 7.703 - type: precision_at_1000 value: 2.04 - type: precision_at_20 value: 18.08 - type: precision_at_3 value: 34.469 - type: precision_at_5 value: 30.712 - type: recall_at_1 value: 4.163 - type: recall_at_10 value: 15.015999999999998 - type: recall_at_100 value: 30.606 - type: recall_at_1000 value: 59.606 - type: recall_at_20 value: 19.09 - type: recall_at_3 value: 9.139 - type: recall_at_5 value: 11.477 - task: type: Retrieval dataset: name: MTEB NQ-PL (default) type: clarin-knext/nq-pl config: default split: test revision: f171245712cf85dd4700b06bef18001578d0ca8d metrics: - type: main_score value: 54.017 - type: map_at_1 value: 34.193 - type: map_at_10 value: 47.497 - type: map_at_100 value: 48.441 - type: map_at_1000 value: 48.481 - type: map_at_20 value: 48.093 - type: map_at_3 value: 44.017 - type: map_at_5 value: 46.111000000000004 - type: mrr_at_1 value: 37.949015063731174 - type: mrr_at_10 value: 49.915772315105954 - type: mrr_at_100 value: 50.62841255829997 - type: mrr_at_1000 value: 50.656773027666745 - type: mrr_at_20 value: 50.37785276657083 - type: mrr_at_3 value: 46.98725376593267 - type: mrr_at_5 value: 48.763035921205066 - type: nauc_map_at_1000_diff1 value: 39.5632191792873 - type: nauc_map_at_1000_max value: 37.4728247053629 - type: nauc_map_at_1000_std value: 5.742498414663762 - type: nauc_map_at_100_diff1 value: 39.555570352061906 - type: nauc_map_at_100_max value: 37.497880976847334 - type: nauc_map_at_100_std value: 5.7798021019465375 - type: nauc_map_at_10_diff1 value: 39.5423723444454 - type: nauc_map_at_10_max value: 37.41661971723365 - type: nauc_map_at_10_std value: 5.2378002164144695 - type: nauc_map_at_1_diff1 value: 41.52697034146981 - type: nauc_map_at_1_max value: 28.558995576942863 - type: nauc_map_at_1_std value: 0.13094542859192052 - type: nauc_map_at_20_diff1 value: 39.55484628943701 - type: nauc_map_at_20_max value: 37.5247794933719 - type: nauc_map_at_20_std value: 5.702881342279231 - type: nauc_map_at_3_diff1 value: 39.949323925425325 - type: nauc_map_at_3_max value: 35.770298168901924 - type: nauc_map_at_3_std value: 2.9127112432479874 - type: nauc_map_at_5_diff1 value: 39.768310617004545 - type: nauc_map_at_5_max value: 37.1549191664796 - type: nauc_map_at_5_std value: 4.4681285748269515 - type: nauc_mrr_at_1000_diff1 value: 39.14001746706457 - type: nauc_mrr_at_1000_max value: 37.477376518267775 - type: nauc_mrr_at_1000_std value: 6.8088891531621565 - type: nauc_mrr_at_100_diff1 value: 39.13054707413684 - type: nauc_mrr_at_100_max value: 37.498126443766274 - type: nauc_mrr_at_100_std value: 6.839411380129971 - type: nauc_mrr_at_10_diff1 value: 39.09764730048156 - type: nauc_mrr_at_10_max value: 37.58593798217306 - type: nauc_mrr_at_10_std value: 6.713795164982413 - type: nauc_mrr_at_1_diff1 value: 41.581599918664075 - type: nauc_mrr_at_1_max value: 31.500589231378722 - type: nauc_mrr_at_1_std value: 2.059116370339438 - type: nauc_mrr_at_20_diff1 value: 39.09011023988447 - type: nauc_mrr_at_20_max value: 37.55856008791344 - type: nauc_mrr_at_20_std value: 6.847165397615844 - type: nauc_mrr_at_3_diff1 value: 39.382542043738 - type: nauc_mrr_at_3_max value: 36.49265363659468 - type: nauc_mrr_at_3_std value: 4.759157976438336 - type: nauc_mrr_at_5_diff1 value: 39.304826333759976 - type: nauc_mrr_at_5_max value: 37.46326016736024 - type: nauc_mrr_at_5_std value: 6.122608305766621 - type: nauc_ndcg_at_1000_diff1 value: 38.568500038453266 - type: nauc_ndcg_at_1000_max value: 39.799710882413166 - type: nauc_ndcg_at_1000_std value: 9.357010223096639 - type: nauc_ndcg_at_100_diff1 value: 38.38026091343228 - type: nauc_ndcg_at_100_max value: 40.48398173542486 - type: nauc_ndcg_at_100_std value: 10.373054013302214 - type: nauc_ndcg_at_10_diff1 value: 38.27340980909964 - type: nauc_ndcg_at_10_max value: 40.35241649744093 - type: nauc_ndcg_at_10_std value: 8.579139930345168 - type: nauc_ndcg_at_1_diff1 value: 41.581599918664075 - type: nauc_ndcg_at_1_max value: 31.500589231378722 - type: nauc_ndcg_at_1_std value: 2.059116370339438 - type: nauc_ndcg_at_20_diff1 value: 38.26453028884807 - type: nauc_ndcg_at_20_max value: 40.70517858426641 - type: nauc_ndcg_at_20_std value: 9.987693876137905 - type: nauc_ndcg_at_3_diff1 value: 39.2078971733273 - type: nauc_ndcg_at_3_max value: 37.48672195565316 - type: nauc_ndcg_at_3_std value: 4.051464994659221 - type: nauc_ndcg_at_5_diff1 value: 38.883693595665285 - type: nauc_ndcg_at_5_max value: 39.763115634437135 - type: nauc_ndcg_at_5_std value: 6.738980451582073 - type: nauc_precision_at_1000_diff1 value: -7.223215910619012 - type: nauc_precision_at_1000_max value: 13.075844604892161 - type: nauc_precision_at_1000_std value: 19.864336920890107 - type: nauc_precision_at_100_diff1 value: 1.3305994810812418 - type: nauc_precision_at_100_max value: 25.9219108557104 - type: nauc_precision_at_100_std value: 27.5076605928207 - type: nauc_precision_at_10_diff1 value: 18.441551484970326 - type: nauc_precision_at_10_max value: 39.85995330437054 - type: nauc_precision_at_10_std value: 20.561269077428914 - type: nauc_precision_at_1_diff1 value: 41.581599918664075 - type: nauc_precision_at_1_max value: 31.500589231378722 - type: nauc_precision_at_1_std value: 2.059116370339438 - type: nauc_precision_at_20_diff1 value: 12.579593891480531 - type: nauc_precision_at_20_max value: 36.620221830588775 - type: nauc_precision_at_20_std value: 26.40364876775059 - type: nauc_precision_at_3_diff1 value: 30.158859294487073 - type: nauc_precision_at_3_max value: 41.168215766389174 - type: nauc_precision_at_3_std value: 9.44345004450809 - type: nauc_precision_at_5_diff1 value: 25.438624678672785 - type: nauc_precision_at_5_max value: 42.72802023518524 - type: nauc_precision_at_5_std value: 15.357657388511099 - type: nauc_recall_at_1000_diff1 value: 24.987564782718003 - type: nauc_recall_at_1000_max value: 70.508416373353 - type: nauc_recall_at_1000_std value: 69.75092280398808 - type: nauc_recall_at_100_diff1 value: 29.504202856421397 - type: nauc_recall_at_100_max value: 63.41356585545318 - type: nauc_recall_at_100_std value: 50.09250954437847 - type: nauc_recall_at_10_diff1 value: 32.355776022971774 - type: nauc_recall_at_10_max value: 49.47121901667283 - type: nauc_recall_at_10_std value: 19.418439406631244 - type: nauc_recall_at_1_diff1 value: 41.52697034146981 - type: nauc_recall_at_1_max value: 28.558995576942863 - type: nauc_recall_at_1_std value: 0.13094542859192052 - type: nauc_recall_at_20_diff1 value: 31.57334731023589 - type: nauc_recall_at_20_max value: 54.06567225197383 - type: nauc_recall_at_20_std value: 29.222029720570468 - type: nauc_recall_at_3_diff1 value: 36.45033533275773 - type: nauc_recall_at_3_max value: 40.39529713780803 - type: nauc_recall_at_3_std value: 5.21893897772794 - type: nauc_recall_at_5_diff1 value: 35.18471678478859 - type: nauc_recall_at_5_max value: 46.20100816867823 - type: nauc_recall_at_5_std value: 11.94481894633221 - type: ndcg_at_1 value: 37.949 - type: ndcg_at_10 value: 54.017 - type: ndcg_at_100 value: 58.126 - type: ndcg_at_1000 value: 59.073 - type: ndcg_at_20 value: 55.928 - type: ndcg_at_3 value: 47.494 - type: ndcg_at_5 value: 50.975 - type: precision_at_1 value: 37.949 - type: precision_at_10 value: 8.450000000000001 - type: precision_at_100 value: 1.083 - type: precision_at_1000 value: 0.117 - type: precision_at_20 value: 4.689 - type: precision_at_3 value: 21.051000000000002 - type: precision_at_5 value: 14.664 - type: recall_at_1 value: 34.193 - type: recall_at_10 value: 71.357 - type: recall_at_100 value: 89.434 - type: recall_at_1000 value: 96.536 - type: recall_at_20 value: 78.363 - type: recall_at_3 value: 54.551 - type: recall_at_5 value: 62.543000000000006 - task: type: Retrieval dataset: name: MTEB Quora-PL (default) type: clarin-knext/quora-pl config: default split: test revision: 0be27e93455051e531182b85e85e425aba12e9d4 metrics: - type: main_score value: 84.114 - type: map_at_1 value: 65.848 - type: map_at_10 value: 79.85900000000001 - type: map_at_100 value: 80.582 - type: map_at_1000 value: 80.60300000000001 - type: map_at_20 value: 80.321 - type: map_at_3 value: 76.741 - type: map_at_5 value: 78.72200000000001 - type: mrr_at_1 value: 75.97 - type: mrr_at_10 value: 83.04630158730119 - type: mrr_at_100 value: 83.22785731032968 - type: mrr_at_1000 value: 83.23123717623899 - type: mrr_at_20 value: 83.17412021320565 - type: mrr_at_3 value: 81.83333333333287 - type: mrr_at_5 value: 82.61933333333275 - type: nauc_map_at_1000_diff1 value: 73.26316553371083 - type: nauc_map_at_1000_max value: 27.92567859085245 - type: nauc_map_at_1000_std value: -47.477909533360446 - type: nauc_map_at_100_diff1 value: 73.2690602807223 - type: nauc_map_at_100_max value: 27.915868327849996 - type: nauc_map_at_100_std value: -47.525777766107595 - type: nauc_map_at_10_diff1 value: 73.45464428464894 - type: nauc_map_at_10_max value: 27.451611487246296 - type: nauc_map_at_10_std value: -49.35818715843809 - type: nauc_map_at_1_diff1 value: 77.29690208952982 - type: nauc_map_at_1_max value: 19.839875762282293 - type: nauc_map_at_1_std value: -45.355684654708284 - type: nauc_map_at_20_diff1 value: 73.35102731979796 - type: nauc_map_at_20_max value: 27.741506490134583 - type: nauc_map_at_20_std value: -48.22006207310331 - type: nauc_map_at_3_diff1 value: 73.94878241064137 - type: nauc_map_at_3_max value: 24.761321386766728 - type: nauc_map_at_3_std value: -51.20638883618126 - type: nauc_map_at_5_diff1 value: 73.66143558047698 - type: nauc_map_at_5_max value: 26.53483405013543 - type: nauc_map_at_5_std value: -50.697541279640056 - type: nauc_mrr_at_1000_diff1 value: 73.84632320009759 - type: nauc_mrr_at_1000_max value: 30.50182733610048 - type: nauc_mrr_at_1000_std value: -44.3021647995251 - type: nauc_mrr_at_100_diff1 value: 73.84480792662302 - type: nauc_mrr_at_100_max value: 30.50749424571614 - type: nauc_mrr_at_100_std value: -44.29615086388113 - type: nauc_mrr_at_10_diff1 value: 73.79442772949346 - type: nauc_mrr_at_10_max value: 30.55724252219984 - type: nauc_mrr_at_10_std value: -44.50997069462057 - type: nauc_mrr_at_1_diff1 value: 75.23369827945945 - type: nauc_mrr_at_1_max value: 29.20073967447664 - type: nauc_mrr_at_1_std value: -43.1920147658285 - type: nauc_mrr_at_20_diff1 value: 73.82731678072307 - type: nauc_mrr_at_20_max value: 30.566328605497667 - type: nauc_mrr_at_20_std value: -44.24683607643705 - type: nauc_mrr_at_3_diff1 value: 73.61997576749954 - type: nauc_mrr_at_3_max value: 30.150393853381917 - type: nauc_mrr_at_3_std value: -44.96847297506626 - type: nauc_mrr_at_5_diff1 value: 73.69084310616132 - type: nauc_mrr_at_5_max value: 30.578033703441125 - type: nauc_mrr_at_5_std value: -44.74920746066566 - type: nauc_ndcg_at_1000_diff1 value: 72.89349862557452 - type: nauc_ndcg_at_1000_max value: 29.824725190462086 - type: nauc_ndcg_at_1000_std value: -44.96284395063211 - type: nauc_ndcg_at_100_diff1 value: 72.85212753715273 - type: nauc_ndcg_at_100_max value: 29.933114207845605 - type: nauc_ndcg_at_100_std value: -44.944225570663754 - type: nauc_ndcg_at_10_diff1 value: 72.80576740454528 - type: nauc_ndcg_at_10_max value: 29.16829118320828 - type: nauc_ndcg_at_10_std value: -48.149473740079614 - type: nauc_ndcg_at_1_diff1 value: 75.00032534968587 - type: nauc_ndcg_at_1_max value: 29.61849062038547 - type: nauc_ndcg_at_1_std value: -42.560207043864054 - type: nauc_ndcg_at_20_diff1 value: 72.88440406302502 - type: nauc_ndcg_at_20_max value: 29.65496676092656 - type: nauc_ndcg_at_20_std value: -46.21238462167732 - type: nauc_ndcg_at_3_diff1 value: 72.37916962766987 - type: nauc_ndcg_at_3_max value: 27.125094834547586 - type: nauc_ndcg_at_3_std value: -48.62942991399391 - type: nauc_ndcg_at_5_diff1 value: 72.57017330527658 - type: nauc_ndcg_at_5_max value: 28.470485561757254 - type: nauc_ndcg_at_5_std value: -49.07593345591059 - type: nauc_precision_at_1000_diff1 value: -41.67915575853946 - type: nauc_precision_at_1000_max value: 1.2012264478568844 - type: nauc_precision_at_1000_std value: 44.723834559400466 - type: nauc_precision_at_100_diff1 value: -40.45196679236971 - type: nauc_precision_at_100_max value: 2.3525450401714894 - type: nauc_precision_at_100_std value: 43.7092529413952 - type: nauc_precision_at_10_diff1 value: -30.256026923068767 - type: nauc_precision_at_10_max value: 8.313422052132559 - type: nauc_precision_at_10_std value: 25.929372356449694 - type: nauc_precision_at_1_diff1 value: 75.00032534968587 - type: nauc_precision_at_1_max value: 29.61849062038547 - type: nauc_precision_at_1_std value: -42.560207043864054 - type: nauc_precision_at_20_diff1 value: -35.61971069986584 - type: nauc_precision_at_20_max value: 5.4664303079116765 - type: nauc_precision_at_20_std value: 34.992352471692826 - type: nauc_precision_at_3_diff1 value: -5.691231842471157 - type: nauc_precision_at_3_max value: 14.797949087742444 - type: nauc_precision_at_3_std value: -0.1930317395644928 - type: nauc_precision_at_5_diff1 value: -20.03913781462645 - type: nauc_precision_at_5_max value: 11.956771408712749 - type: nauc_precision_at_5_std value: 13.179251389859731 - type: nauc_recall_at_1000_diff1 value: 64.03509042729674 - type: nauc_recall_at_1000_max value: 40.91691485428493 - type: nauc_recall_at_1000_std value: 16.12968625875372 - type: nauc_recall_at_100_diff1 value: 63.83116179628575 - type: nauc_recall_at_100_max value: 43.72908117676382 - type: nauc_recall_at_100_std value: -20.50966716852155 - type: nauc_recall_at_10_diff1 value: 66.42071960186394 - type: nauc_recall_at_10_max value: 28.983207818687205 - type: nauc_recall_at_10_std value: -56.61417798753744 - type: nauc_recall_at_1_diff1 value: 77.29690208952982 - type: nauc_recall_at_1_max value: 19.839875762282293 - type: nauc_recall_at_1_std value: -45.355684654708284 - type: nauc_recall_at_20_diff1 value: 66.32360705219874 - type: nauc_recall_at_20_max value: 33.30698111822631 - type: nauc_recall_at_20_std value: -43.89233781737452 - type: nauc_recall_at_3_diff1 value: 69.67029394927077 - type: nauc_recall_at_3_max value: 22.67803039327696 - type: nauc_recall_at_3_std value: -56.43327209861502 - type: nauc_recall_at_5_diff1 value: 68.05622143936131 - type: nauc_recall_at_5_max value: 26.67795559040675 - type: nauc_recall_at_5_std value: -58.158231198510954 - type: ndcg_at_1 value: 76.08 - type: ndcg_at_10 value: 84.114 - type: ndcg_at_100 value: 85.784 - type: ndcg_at_1000 value: 85.992 - type: ndcg_at_20 value: 84.976 - type: ndcg_at_3 value: 80.74799999999999 - type: ndcg_at_5 value: 82.626 - type: precision_at_1 value: 76.08 - type: precision_at_10 value: 12.926000000000002 - type: precision_at_100 value: 1.509 - type: precision_at_1000 value: 0.156 - type: precision_at_20 value: 6.912999999999999 - type: precision_at_3 value: 35.5 - type: precision_at_5 value: 23.541999999999998 - type: recall_at_1 value: 65.848 - type: recall_at_10 value: 92.611 - type: recall_at_100 value: 98.69 - type: recall_at_1000 value: 99.83999999999999 - type: recall_at_20 value: 95.47200000000001 - type: recall_at_3 value: 83.122 - type: recall_at_5 value: 88.23 - task: type: Retrieval dataset: name: MTEB SCIDOCS-PL (default) type: clarin-knext/scidocs-pl config: default split: test revision: 45452b03f05560207ef19149545f168e596c9337 metrics: - type: main_score value: 15.379999999999999 - type: map_at_1 value: 3.6029999999999998 - type: map_at_10 value: 8.843 - type: map_at_100 value: 10.433 - type: map_at_1000 value: 10.689 - type: map_at_20 value: 9.597 - type: map_at_3 value: 6.363 - type: map_at_5 value: 7.603 - type: mrr_at_1 value: 17.7 - type: mrr_at_10 value: 26.58900793650793 - type: mrr_at_100 value: 27.699652322890987 - type: mrr_at_1000 value: 27.78065313118353 - type: mrr_at_20 value: 27.215020950411816 - type: mrr_at_3 value: 23.36666666666668 - type: mrr_at_5 value: 25.211666666666666 - type: nauc_map_at_1000_diff1 value: 21.92235143827129 - type: nauc_map_at_1000_max value: 37.50300940750989 - type: nauc_map_at_1000_std value: 20.872586122198552 - type: nauc_map_at_100_diff1 value: 21.917408170465833 - type: nauc_map_at_100_max value: 37.4654466815513 - type: nauc_map_at_100_std value: 20.621643878648534 - type: nauc_map_at_10_diff1 value: 22.914388723621183 - type: nauc_map_at_10_max value: 36.468131213468794 - type: nauc_map_at_10_std value: 16.760980140791492 - type: nauc_map_at_1_diff1 value: 29.00799502838457 - type: nauc_map_at_1_max value: 26.64926291797503 - type: nauc_map_at_1_std value: 8.167291261637361 - type: nauc_map_at_20_diff1 value: 22.46580947804047 - type: nauc_map_at_20_max value: 36.656294842562275 - type: nauc_map_at_20_std value: 18.099232417722078 - type: nauc_map_at_3_diff1 value: 23.436009032045934 - type: nauc_map_at_3_max value: 31.325807212280914 - type: nauc_map_at_3_std value: 9.780905232048852 - type: nauc_map_at_5_diff1 value: 22.891704394665528 - type: nauc_map_at_5_max value: 35.40584466642894 - type: nauc_map_at_5_std value: 13.476986099394656 - type: nauc_mrr_at_1000_diff1 value: 25.052937655397866 - type: nauc_mrr_at_1000_max value: 29.64431912670108 - type: nauc_mrr_at_1000_std value: 14.549744963988044 - type: nauc_mrr_at_100_diff1 value: 25.070871266969224 - type: nauc_mrr_at_100_max value: 29.68743604652336 - type: nauc_mrr_at_100_std value: 14.582010154574432 - type: nauc_mrr_at_10_diff1 value: 24.88881466938897 - type: nauc_mrr_at_10_max value: 29.488430770768144 - type: nauc_mrr_at_10_std value: 14.269241073852266 - type: nauc_mrr_at_1_diff1 value: 29.220540327267503 - type: nauc_mrr_at_1_max value: 26.81908580507911 - type: nauc_mrr_at_1_std value: 8.00840295809718 - type: nauc_mrr_at_20_diff1 value: 25.067912695721944 - type: nauc_mrr_at_20_max value: 29.759227563849628 - type: nauc_mrr_at_20_std value: 14.685076859257357 - type: nauc_mrr_at_3_diff1 value: 24.645848739182696 - type: nauc_mrr_at_3_max value: 27.73368549660351 - type: nauc_mrr_at_3_std value: 11.475742805586943 - type: nauc_mrr_at_5_diff1 value: 24.895295760909946 - type: nauc_mrr_at_5_max value: 29.130755033240423 - type: nauc_mrr_at_5_std value: 12.955802929145404 - type: nauc_ndcg_at_1000_diff1 value: 20.68434434777729 - type: nauc_ndcg_at_1000_max value: 37.67055146424174 - type: nauc_ndcg_at_1000_std value: 29.57493715069776 - type: nauc_ndcg_at_100_diff1 value: 20.396834816492383 - type: nauc_ndcg_at_100_max value: 37.460575228670514 - type: nauc_ndcg_at_100_std value: 27.826534756761944 - type: nauc_ndcg_at_10_diff1 value: 22.640844106236027 - type: nauc_ndcg_at_10_max value: 35.21291764462327 - type: nauc_ndcg_at_10_std value: 19.53289455984506 - type: nauc_ndcg_at_1_diff1 value: 29.220540327267503 - type: nauc_ndcg_at_1_max value: 26.81908580507911 - type: nauc_ndcg_at_1_std value: 8.00840295809718 - type: nauc_ndcg_at_20_diff1 value: 22.117126657768623 - type: nauc_ndcg_at_20_max value: 35.79395781940806 - type: nauc_ndcg_at_20_std value: 22.242748346260786 - type: nauc_ndcg_at_3_diff1 value: 23.00596063212187 - type: nauc_ndcg_at_3_max value: 30.149013627580523 - type: nauc_ndcg_at_3_std value: 11.07904064662722 - type: nauc_ndcg_at_5_diff1 value: 22.81875419630523 - type: nauc_ndcg_at_5_max value: 34.24267468356626 - type: nauc_ndcg_at_5_std value: 15.307780280752088 - type: nauc_precision_at_1000_diff1 value: 9.606677689029972 - type: nauc_precision_at_1000_max value: 32.74855550489271 - type: nauc_precision_at_1000_std value: 42.65372585937895 - type: nauc_precision_at_100_diff1 value: 11.528981313529545 - type: nauc_precision_at_100_max value: 35.642529490132404 - type: nauc_precision_at_100_std value: 38.146151426052306 - type: nauc_precision_at_10_diff1 value: 18.783957183811836 - type: nauc_precision_at_10_max value: 36.1982008334257 - type: nauc_precision_at_10_std value: 25.09349473195891 - type: nauc_precision_at_1_diff1 value: 29.220540327267503 - type: nauc_precision_at_1_max value: 26.81908580507911 - type: nauc_precision_at_1_std value: 8.00840295809718 - type: nauc_precision_at_20_diff1 value: 17.458766320828214 - type: nauc_precision_at_20_max value: 36.000404903025235 - type: nauc_precision_at_20_std value: 29.1608044138323 - type: nauc_precision_at_3_diff1 value: 20.213669462067166 - type: nauc_precision_at_3_max value: 31.120650847205912 - type: nauc_precision_at_3_std value: 12.390972418818118 - type: nauc_precision_at_5_diff1 value: 20.114245715785678 - type: nauc_precision_at_5_max value: 37.30360111495823 - type: nauc_precision_at_5_std value: 19.053109037822853 - type: nauc_recall_at_1000_diff1 value: 9.85800049032612 - type: nauc_recall_at_1000_max value: 32.48319160802687 - type: nauc_recall_at_1000_std value: 43.79941601741161 - type: nauc_recall_at_100_diff1 value: 11.375255270968337 - type: nauc_recall_at_100_max value: 35.1868784124497 - type: nauc_recall_at_100_std value: 38.422680583482666 - type: nauc_recall_at_10_diff1 value: 18.445783123521938 - type: nauc_recall_at_10_max value: 35.633267936276766 - type: nauc_recall_at_10_std value: 24.94469506254716 - type: nauc_recall_at_1_diff1 value: 29.00799502838457 - type: nauc_recall_at_1_max value: 26.64926291797503 - type: nauc_recall_at_1_std value: 8.167291261637361 - type: nauc_recall_at_20_diff1 value: 17.314906604151936 - type: nauc_recall_at_20_max value: 35.66067699203996 - type: nauc_recall_at_20_std value: 29.400137012506082 - type: nauc_recall_at_3_diff1 value: 19.873710875648698 - type: nauc_recall_at_3_max value: 30.92404718742849 - type: nauc_recall_at_3_std value: 12.400871018075199 - type: nauc_recall_at_5_diff1 value: 19.869948324233192 - type: nauc_recall_at_5_max value: 37.06832511687574 - type: nauc_recall_at_5_std value: 19.0798814966156 - type: ndcg_at_1 value: 17.7 - type: ndcg_at_10 value: 15.379999999999999 - type: ndcg_at_100 value: 22.09 - type: ndcg_at_1000 value: 27.151999999999997 - type: ndcg_at_20 value: 17.576 - type: ndcg_at_3 value: 14.219999999999999 - type: ndcg_at_5 value: 12.579 - type: precision_at_1 value: 17.7 - type: precision_at_10 value: 8.08 - type: precision_at_100 value: 1.7840000000000003 - type: precision_at_1000 value: 0.3 - type: precision_at_20 value: 5.305 - type: precision_at_3 value: 13.167000000000002 - type: precision_at_5 value: 11.06 - type: recall_at_1 value: 3.6029999999999998 - type: recall_at_10 value: 16.413 - type: recall_at_100 value: 36.263 - type: recall_at_1000 value: 61.016999999999996 - type: recall_at_20 value: 21.587999999999997 - type: recall_at_3 value: 8.013 - type: recall_at_5 value: 11.198 - task: type: Retrieval dataset: name: MTEB SciFact-PL (default) type: clarin-knext/scifact-pl config: default split: test revision: 47932a35f045ef8ed01ba82bf9ff67f6e109207e metrics: - type: main_score value: 64.764 - type: map_at_1 value: 49.778 - type: map_at_10 value: 59.88 - type: map_at_100 value: 60.707 - type: map_at_1000 value: 60.729 - type: map_at_20 value: 60.419999999999995 - type: map_at_3 value: 57.45400000000001 - type: map_at_5 value: 58.729 - type: mrr_at_1 value: 52.33333333333333 - type: mrr_at_10 value: 61.29193121693122 - type: mrr_at_100 value: 61.95817765126313 - type: mrr_at_1000 value: 61.97583284368782 - type: mrr_at_20 value: 61.72469949641003 - type: mrr_at_3 value: 59.44444444444444 - type: mrr_at_5 value: 60.494444444444454 - type: nauc_map_at_1000_diff1 value: 62.21235294015774 - type: nauc_map_at_1000_max value: 48.83996609100249 - type: nauc_map_at_1000_std value: 5.23892781043174 - type: nauc_map_at_100_diff1 value: 62.20170226789429 - type: nauc_map_at_100_max value: 48.8391766453537 - type: nauc_map_at_100_std value: 5.2664077457917715 - type: nauc_map_at_10_diff1 value: 61.961975488329024 - type: nauc_map_at_10_max value: 48.397109987625186 - type: nauc_map_at_10_std value: 4.314859710827481 - type: nauc_map_at_1_diff1 value: 65.0865197011516 - type: nauc_map_at_1_max value: 41.38862781954889 - type: nauc_map_at_1_std value: -0.9182122632530586 - type: nauc_map_at_20_diff1 value: 61.99173935851292 - type: nauc_map_at_20_max value: 48.79961814179307 - type: nauc_map_at_20_std value: 5.262181845825118 - type: nauc_map_at_3_diff1 value: 62.37910539880477 - type: nauc_map_at_3_max value: 47.13627890977091 - type: nauc_map_at_3_std value: 2.327897198087264 - type: nauc_map_at_5_diff1 value: 61.60080757149592 - type: nauc_map_at_5_max value: 47.60052458345962 - type: nauc_map_at_5_std value: 3.1770196981231047 - type: nauc_mrr_at_1000_diff1 value: 62.86810952814966 - type: nauc_mrr_at_1000_max value: 52.13248094447774 - type: nauc_mrr_at_1000_std value: 10.100485746570733 - type: nauc_mrr_at_100_diff1 value: 62.85364829491874 - type: nauc_mrr_at_100_max value: 52.134528010631854 - type: nauc_mrr_at_100_std value: 10.120945685447369 - type: nauc_mrr_at_10_diff1 value: 62.65679301829915 - type: nauc_mrr_at_10_max value: 52.09270719182349 - type: nauc_mrr_at_10_std value: 9.913834434725441 - type: nauc_mrr_at_1_diff1 value: 66.84108271415636 - type: nauc_mrr_at_1_max value: 46.67646429855176 - type: nauc_mrr_at_1_std value: 5.5505252956352304 - type: nauc_mrr_at_20_diff1 value: 62.72473227039611 - type: nauc_mrr_at_20_max value: 52.13479097802757 - type: nauc_mrr_at_20_std value: 10.188278833464084 - type: nauc_mrr_at_3_diff1 value: 63.797429185518496 - type: nauc_mrr_at_3_max value: 52.16486999573481 - type: nauc_mrr_at_3_std value: 9.094360767062762 - type: nauc_mrr_at_5_diff1 value: 62.592917975475494 - type: nauc_mrr_at_5_max value: 52.330741486107414 - type: nauc_mrr_at_5_std value: 9.742175534421389 - type: nauc_ndcg_at_1000_diff1 value: 61.38859337672476 - type: nauc_ndcg_at_1000_max value: 51.48380058339184 - type: nauc_ndcg_at_1000_std value: 9.670547660897673 - type: nauc_ndcg_at_100_diff1 value: 61.02438489641434 - type: nauc_ndcg_at_100_max value: 51.781246646780865 - type: nauc_ndcg_at_100_std value: 10.592961553245187 - type: nauc_ndcg_at_10_diff1 value: 60.03678353308358 - type: nauc_ndcg_at_10_max value: 50.70725688848762 - type: nauc_ndcg_at_10_std value: 7.9472446491016315 - type: nauc_ndcg_at_1_diff1 value: 66.84108271415636 - type: nauc_ndcg_at_1_max value: 46.67646429855176 - type: nauc_ndcg_at_1_std value: 5.5505252956352304 - type: nauc_ndcg_at_20_diff1 value: 59.828482718480224 - type: nauc_ndcg_at_20_max value: 51.45831789601284 - type: nauc_ndcg_at_20_std value: 10.722673683272049 - type: nauc_ndcg_at_3_diff1 value: 61.68982937524109 - type: nauc_ndcg_at_3_max value: 49.745326748604775 - type: nauc_ndcg_at_3_std value: 4.948298621202247 - type: nauc_ndcg_at_5_diff1 value: 59.67396171973207 - type: nauc_ndcg_at_5_max value: 49.87855139298281 - type: nauc_ndcg_at_5_std value: 6.08990428055584 - type: nauc_precision_at_1000_diff1 value: -1.594227972036865 - type: nauc_precision_at_1000_max value: 32.48431723086185 - type: nauc_precision_at_1000_std value: 53.84748466965268 - type: nauc_precision_at_100_diff1 value: 8.06411455192293 - type: nauc_precision_at_100_max value: 39.91003601878948 - type: nauc_precision_at_100_std value: 55.52979711075091 - type: nauc_precision_at_10_diff1 value: 26.610514456014066 - type: nauc_precision_at_10_max value: 47.09062494321172 - type: nauc_precision_at_10_std value: 33.91984226498748 - type: nauc_precision_at_1_diff1 value: 66.84108271415636 - type: nauc_precision_at_1_max value: 46.67646429855176 - type: nauc_precision_at_1_std value: 5.5505252956352304 - type: nauc_precision_at_20_diff1 value: 16.947688843085583 - type: nauc_precision_at_20_max value: 45.40488186572008 - type: nauc_precision_at_20_std value: 48.354421924500905 - type: nauc_precision_at_3_diff1 value: 49.11263981720622 - type: nauc_precision_at_3_max value: 52.7084625111683 - type: nauc_precision_at_3_std value: 16.734612173556453 - type: nauc_precision_at_5_diff1 value: 39.06503705015792 - type: nauc_precision_at_5_max value: 52.21710506893391 - type: nauc_precision_at_5_std value: 23.350948149460233 - type: nauc_recall_at_1000_diff1 value: 43.1559290382817 - type: nauc_recall_at_1000_max value: 83.66013071895456 - type: nauc_recall_at_1000_std value: 86.27450980392177 - type: nauc_recall_at_100_diff1 value: 46.016860850620375 - type: nauc_recall_at_100_max value: 69.3944888744547 - type: nauc_recall_at_100_std value: 55.286945696152735 - type: nauc_recall_at_10_diff1 value: 49.65877895350921 - type: nauc_recall_at_10_max value: 53.02636695700889 - type: nauc_recall_at_10_std value: 13.967608945823828 - type: nauc_recall_at_1_diff1 value: 65.0865197011516 - type: nauc_recall_at_1_max value: 41.38862781954889 - type: nauc_recall_at_1_std value: -0.9182122632530586 - type: nauc_recall_at_20_diff1 value: 43.355308229973524 - type: nauc_recall_at_20_max value: 57.04187909533764 - type: nauc_recall_at_20_std value: 33.578720846660524 - type: nauc_recall_at_3_diff1 value: 56.922996057428165 - type: nauc_recall_at_3_max value: 50.74417041895424 - type: nauc_recall_at_3_std value: 5.623890124328387 - type: nauc_recall_at_5_diff1 value: 50.55620076865238 - type: nauc_recall_at_5_max value: 51.3316854622085 - type: nauc_recall_at_5_std value: 8.995457887269255 - type: ndcg_at_1 value: 52.333 - type: ndcg_at_10 value: 64.764 - type: ndcg_at_100 value: 68.167 - type: ndcg_at_1000 value: 68.816 - type: ndcg_at_20 value: 66.457 - type: ndcg_at_3 value: 60.346 - type: ndcg_at_5 value: 62.365 - type: precision_at_1 value: 52.333 - type: precision_at_10 value: 8.799999999999999 - type: precision_at_100 value: 1.057 - type: precision_at_1000 value: 0.11100000000000002 - type: precision_at_20 value: 4.8 - type: precision_at_3 value: 23.889 - type: precision_at_5 value: 15.6 - type: recall_at_1 value: 49.778 - type: recall_at_10 value: 78.206 - type: recall_at_100 value: 93.10000000000001 - type: recall_at_1000 value: 98.333 - type: recall_at_20 value: 84.467 - type: recall_at_3 value: 66.367 - type: recall_at_5 value: 71.35000000000001 - task: type: Retrieval dataset: name: MTEB TRECCOVID-PL (default) type: clarin-knext/trec-covid-pl config: default split: test revision: 81bcb408f33366c2a20ac54adafad1ae7e877fdd metrics: - type: main_score value: 72.18900000000001 - type: map_at_1 value: 0.214 - type: map_at_10 value: 1.755 - type: map_at_100 value: 9.944 - type: map_at_1000 value: 24.205 - type: map_at_20 value: 3.1510000000000002 - type: map_at_3 value: 0.6 - type: map_at_5 value: 0.9560000000000001 - type: mrr_at_1 value: 82.0 - type: mrr_at_10 value: 89.06666666666666 - type: mrr_at_100 value: 89.06666666666666 - type: mrr_at_1000 value: 89.06666666666666 - type: mrr_at_20 value: 89.06666666666666 - type: mrr_at_3 value: 87.66666666666666 - type: mrr_at_5 value: 89.06666666666666 - type: nauc_map_at_1000_diff1 value: -9.342037623635543 - type: nauc_map_at_1000_max value: 45.71499810252398 - type: nauc_map_at_1000_std value: 76.86482845196852 - type: nauc_map_at_100_diff1 value: -6.932395299866198 - type: nauc_map_at_100_max value: 36.097801891181604 - type: nauc_map_at_100_std value: 65.6085215411685 - type: nauc_map_at_10_diff1 value: -6.3654843824342775 - type: nauc_map_at_10_max value: 9.564437521432714 - type: nauc_map_at_10_std value: 21.8377319336476 - type: nauc_map_at_1_diff1 value: 8.269590874255034 - type: nauc_map_at_1_max value: 3.482498491294516 - type: nauc_map_at_1_std value: 8.985226819412189 - type: nauc_map_at_20_diff1 value: -4.971435767877232 - type: nauc_map_at_20_max value: 22.88801858567121 - type: nauc_map_at_20_std value: 32.38492618534027 - type: nauc_map_at_3_diff1 value: 1.1615973694623123 - type: nauc_map_at_3_max value: 1.935417800315643 - type: nauc_map_at_3_std value: 10.289328305818698 - type: nauc_map_at_5_diff1 value: -2.4675967231444105 - type: nauc_map_at_5_max value: 2.4611483736622373 - type: nauc_map_at_5_std value: 15.082324305750811 - type: nauc_mrr_at_1000_diff1 value: 13.098526703499063 - type: nauc_mrr_at_1000_max value: 56.37362177417431 - type: nauc_mrr_at_1000_std value: 73.2456769749587 - type: nauc_mrr_at_100_diff1 value: 13.098526703499063 - type: nauc_mrr_at_100_max value: 56.37362177417431 - type: nauc_mrr_at_100_std value: 73.2456769749587 - type: nauc_mrr_at_10_diff1 value: 13.098526703499063 - type: nauc_mrr_at_10_max value: 56.37362177417431 - type: nauc_mrr_at_10_std value: 73.2456769749587 - type: nauc_mrr_at_1_diff1 value: 12.099350148694809 - type: nauc_mrr_at_1_max value: 53.75041304108387 - type: nauc_mrr_at_1_std value: 68.84018063663402 - type: nauc_mrr_at_20_diff1 value: 13.098526703499063 - type: nauc_mrr_at_20_max value: 56.37362177417431 - type: nauc_mrr_at_20_std value: 73.2456769749587 - type: nauc_mrr_at_3_diff1 value: 12.173557857011161 - type: nauc_mrr_at_3_max value: 57.540780562363395 - type: nauc_mrr_at_3_std value: 75.42098189580211 - type: nauc_mrr_at_5_diff1 value: 13.098526703499063 - type: nauc_mrr_at_5_max value: 56.37362177417431 - type: nauc_mrr_at_5_std value: 73.2456769749587 - type: nauc_ndcg_at_1000_diff1 value: -8.951471847310401 - type: nauc_ndcg_at_1000_max value: 43.86942237288822 - type: nauc_ndcg_at_1000_std value: 74.61077735148591 - type: nauc_ndcg_at_100_diff1 value: -17.754559361083817 - type: nauc_ndcg_at_100_max value: 53.97187119773482 - type: nauc_ndcg_at_100_std value: 80.7944136146514 - type: nauc_ndcg_at_10_diff1 value: -26.637734697836414 - type: nauc_ndcg_at_10_max value: 47.70102699133149 - type: nauc_ndcg_at_10_std value: 70.26909560828646 - type: nauc_ndcg_at_1_diff1 value: -1.2250530785563207 - type: nauc_ndcg_at_1_max value: 46.60509554140131 - type: nauc_ndcg_at_1_std value: 62.63906581740976 - type: nauc_ndcg_at_20_diff1 value: -22.44286466550908 - type: nauc_ndcg_at_20_max value: 55.40492058090103 - type: nauc_ndcg_at_20_std value: 72.11813912145738 - type: nauc_ndcg_at_3_diff1 value: -14.8152721896563 - type: nauc_ndcg_at_3_max value: 38.952259383027595 - type: nauc_ndcg_at_3_std value: 59.819750166537766 - type: nauc_ndcg_at_5_diff1 value: -19.150105688904375 - type: nauc_ndcg_at_5_max value: 42.311180547775315 - type: nauc_ndcg_at_5_std value: 66.6632229321094 - type: nauc_precision_at_1000_diff1 value: -11.555591477978941 - type: nauc_precision_at_1000_max value: 43.7311644834851 - type: nauc_precision_at_1000_std value: 52.10644767999648 - type: nauc_precision_at_100_diff1 value: -16.94803099801117 - type: nauc_precision_at_100_max value: 54.08281631067633 - type: nauc_precision_at_100_std value: 82.77237347891331 - type: nauc_precision_at_10_diff1 value: -27.351332814863355 - type: nauc_precision_at_10_max value: 48.08237549065846 - type: nauc_precision_at_10_std value: 69.37250843534329 - type: nauc_precision_at_1_diff1 value: 12.099350148694809 - type: nauc_precision_at_1_max value: 53.75041304108387 - type: nauc_precision_at_1_std value: 68.84018063663402 - type: nauc_precision_at_20_diff1 value: -18.2422222283388 - type: nauc_precision_at_20_max value: 59.517328129343696 - type: nauc_precision_at_20_std value: 72.05149307342747 - type: nauc_precision_at_3_diff1 value: -10.226547543075897 - type: nauc_precision_at_3_max value: 43.14684818832875 - type: nauc_precision_at_3_std value: 57.31936467418288 - type: nauc_precision_at_5_diff1 value: -14.28521589468673 - type: nauc_precision_at_5_max value: 41.633426753962596 - type: nauc_precision_at_5_std value: 64.94400576804541 - type: nauc_recall_at_1000_diff1 value: -0.9648831207497152 - type: nauc_recall_at_1000_max value: 31.70832946085005 - type: nauc_recall_at_1000_std value: 63.21471613968869 - type: nauc_recall_at_100_diff1 value: -1.360254380933586 - type: nauc_recall_at_100_max value: 25.960597782099605 - type: nauc_recall_at_100_std value: 51.52757589609674 - type: nauc_recall_at_10_diff1 value: -0.3899439424189566 - type: nauc_recall_at_10_max value: 5.094341897886072 - type: nauc_recall_at_10_std value: 11.266045616925698 - type: nauc_recall_at_1_diff1 value: 8.269590874255034 - type: nauc_recall_at_1_max value: 3.482498491294516 - type: nauc_recall_at_1_std value: 8.985226819412189 - type: nauc_recall_at_20_diff1 value: 6.4797098359254175 - type: nauc_recall_at_20_max value: 15.663700985336124 - type: nauc_recall_at_20_std value: 17.154099587904913 - type: nauc_recall_at_3_diff1 value: 3.7245972450393507 - type: nauc_recall_at_3_max value: 0.4063857187240345 - type: nauc_recall_at_3_std value: 6.641948062821941 - type: nauc_recall_at_5_diff1 value: 4.013879477591466 - type: nauc_recall_at_5_max value: -1.4266586618013566 - type: nauc_recall_at_5_std value: 7.311601874411205 - type: ndcg_at_1 value: 75.0 - type: ndcg_at_10 value: 72.18900000000001 - type: ndcg_at_100 value: 54.022999999999996 - type: ndcg_at_1000 value: 49.492000000000004 - type: ndcg_at_20 value: 68.51 - type: ndcg_at_3 value: 73.184 - type: ndcg_at_5 value: 72.811 - type: precision_at_1 value: 82.0 - type: precision_at_10 value: 77.4 - type: precision_at_100 value: 55.24 - type: precision_at_1000 value: 21.822 - type: precision_at_20 value: 73.0 - type: precision_at_3 value: 79.333 - type: precision_at_5 value: 79.2 - type: recall_at_1 value: 0.214 - type: recall_at_10 value: 1.9980000000000002 - type: recall_at_100 value: 13.328999999999999 - type: recall_at_1000 value: 47.204 - type: recall_at_20 value: 3.7310000000000003 - type: recall_at_3 value: 0.628 - type: recall_at_5 value: 1.049 - task: type: MultilabelClassification dataset: name: MTEB CEDRClassification (default) type: ai-forever/cedr-classification config: default split: test revision: c0ba03d058e3e1b2f3fd20518875a4563dd12db4 metrics: - type: accuracy value: 47.30605738575983 - type: f1 value: 41.26091043925065 - type: lrap value: 72.89452709883206 - type: main_score value: 47.30605738575983 - task: type: Reranking dataset: name: MTEB MIRACLReranking (ru) type: miracl/mmteb-miracl-reranking config: ru split: dev revision: 6d1962c527217f8927fca80f890f14f36b2802af metrics: - type: MAP@1(MIRACL) value: 20.721999999999998 - type: MAP@10(MIRACL) value: 33.900999999999996 - type: MAP@100(MIRACL) value: 36.813 - type: MAP@1000(MIRACL) value: 36.813 - type: MAP@20(MIRACL) value: 35.684 - type: MAP@3(MIRACL) value: 28.141 - type: MAP@5(MIRACL) value: 31.075000000000003 - type: NDCG@1(MIRACL) value: 32.799 - type: NDCG@10(MIRACL) value: 42.065000000000005 - type: NDCG@100(MIRACL) value: 49.730999999999995 - type: NDCG@1000(MIRACL) value: 49.730999999999995 - type: NDCG@20(MIRACL) value: 46.0 - type: NDCG@3(MIRACL) value: 34.481 - type: NDCG@5(MIRACL) value: 37.452999999999996 - type: P@1(MIRACL) value: 32.799 - type: P@10(MIRACL) value: 11.668000000000001 - type: P@100(MIRACL) value: 1.9529999999999998 - type: P@1000(MIRACL) value: 0.19499999999999998 - type: P@20(MIRACL) value: 7.51 - type: P@3(MIRACL) value: 20.823 - type: P@5(MIRACL) value: 16.728 - type: Recall@1(MIRACL) value: 20.721999999999998 - type: Recall@10(MIRACL) value: 54.762 - type: Recall@100(MIRACL) value: 79.952 - type: Recall@1000(MIRACL) value: 79.952 - type: Recall@20(MIRACL) value: 66.26100000000001 - type: Recall@3(MIRACL) value: 34.410000000000004 - type: Recall@5(MIRACL) value: 42.659000000000006 - type: main_score value: 42.065000000000005 - type: nAUC_MAP@1000_diff1(MIRACL) value: 14.33534992502818 - type: nAUC_MAP@1000_max(MIRACL) value: 12.367998764646115 - type: nAUC_MAP@1000_std(MIRACL) value: 4.569686002935006 - type: nAUC_MAP@100_diff1(MIRACL) value: 14.33534992502818 - type: nAUC_MAP@100_max(MIRACL) value: 12.367998764646115 - type: nAUC_MAP@100_std(MIRACL) value: 4.569686002935006 - type: nAUC_MAP@10_diff1(MIRACL) value: 16.920323975680027 - type: nAUC_MAP@10_max(MIRACL) value: 9.327171297204082 - type: nAUC_MAP@10_std(MIRACL) value: 3.2039133783079015 - type: nAUC_MAP@1_diff1(MIRACL) value: 28.698973487482206 - type: nAUC_MAP@1_max(MIRACL) value: 2.9217687660885034 - type: nAUC_MAP@1_std(MIRACL) value: -1.1247408800976524 - type: nAUC_MAP@20_diff1(MIRACL) value: 15.359083081640476 - type: nAUC_MAP@20_max(MIRACL) value: 11.310494233946345 - type: nAUC_MAP@20_std(MIRACL) value: 4.4171898386022885 - type: nAUC_MAP@3_diff1(MIRACL) value: 22.27430591851617 - type: nAUC_MAP@3_max(MIRACL) value: 6.407438291284658 - type: nAUC_MAP@3_std(MIRACL) value: 0.9799184530397409 - type: nAUC_MAP@5_diff1(MIRACL) value: 19.20571689941054 - type: nAUC_MAP@5_max(MIRACL) value: 7.987468654026893 - type: nAUC_MAP@5_std(MIRACL) value: 1.8324246565938962 - type: nAUC_NDCG@1000_diff1(MIRACL) value: 3.7537669018914768 - type: nAUC_NDCG@1000_max(MIRACL) value: 20.7944707840533 - type: nAUC_NDCG@1000_std(MIRACL) value: 8.444837055303063 - type: nAUC_NDCG@100_diff1(MIRACL) value: 3.7537669018914768 - type: nAUC_NDCG@100_max(MIRACL) value: 20.7944707840533 - type: nAUC_NDCG@100_std(MIRACL) value: 8.444837055303063 - type: nAUC_NDCG@10_diff1(MIRACL) value: 10.829575656103888 - type: nAUC_NDCG@10_max(MIRACL) value: 13.0445496498929 - type: nAUC_NDCG@10_std(MIRACL) value: 6.050412212625362 - type: nAUC_NDCG@1_diff1(MIRACL) value: 19.1388712233292 - type: nAUC_NDCG@1_max(MIRACL) value: 10.871900994781642 - type: nAUC_NDCG@1_std(MIRACL) value: 3.218568248751811 - type: nAUC_NDCG@20_diff1(MIRACL) value: 7.093172181746442 - type: nAUC_NDCG@20_max(MIRACL) value: 16.955238078958836 - type: nAUC_NDCG@20_std(MIRACL) value: 8.325656379573035 - type: nAUC_NDCG@3_diff1(MIRACL) value: 17.134437303330802 - type: nAUC_NDCG@3_max(MIRACL) value: 10.235328822955793 - type: nAUC_NDCG@3_std(MIRACL) value: 3.2341358691084814 - type: nAUC_NDCG@5_diff1(MIRACL) value: 14.733664618337636 - type: nAUC_NDCG@5_max(MIRACL) value: 11.181897412035282 - type: nAUC_NDCG@5_std(MIRACL) value: 3.642277088791985 - type: nAUC_P@1000_diff1(MIRACL) value: -26.330038284867573 - type: nAUC_P@1000_max(MIRACL) value: 28.450694137240458 - type: nAUC_P@1000_std(MIRACL) value: 9.892993775474912 - type: nAUC_P@100_diff1(MIRACL) value: -26.330038284867552 - type: nAUC_P@100_max(MIRACL) value: 28.45069413724051 - type: nAUC_P@100_std(MIRACL) value: 9.892993775474928 - type: nAUC_P@10_diff1(MIRACL) value: -17.436937353231112 - type: nAUC_P@10_max(MIRACL) value: 24.327018012947857 - type: nAUC_P@10_std(MIRACL) value: 11.78803527706634 - type: nAUC_P@1_diff1(MIRACL) value: 19.1388712233292 - type: nAUC_P@1_max(MIRACL) value: 10.871900994781642 - type: nAUC_P@1_std(MIRACL) value: 3.218568248751811 - type: nAUC_P@20_diff1(MIRACL) value: -22.947528755272426 - type: nAUC_P@20_max(MIRACL) value: 27.773093471902538 - type: nAUC_P@20_std(MIRACL) value: 14.898619107087221 - type: nAUC_P@3_diff1(MIRACL) value: 1.4100426412400944 - type: nAUC_P@3_max(MIRACL) value: 17.397472872058845 - type: nAUC_P@3_std(MIRACL) value: 8.240008229861875 - type: nAUC_P@5_diff1(MIRACL) value: -7.971349332207021 - type: nAUC_P@5_max(MIRACL) value: 22.198441167940963 - type: nAUC_P@5_std(MIRACL) value: 9.00265164460082 - type: nAUC_Recall@1000_diff1(MIRACL) value: -38.69835271863148 - type: nAUC_Recall@1000_max(MIRACL) value: 50.9545152809108 - type: nAUC_Recall@1000_std(MIRACL) value: 20.44270887092116 - type: nAUC_Recall@100_diff1(MIRACL) value: -38.69835271863148 - type: nAUC_Recall@100_max(MIRACL) value: 50.9545152809108 - type: nAUC_Recall@100_std(MIRACL) value: 20.44270887092116 - type: nAUC_Recall@10_diff1(MIRACL) value: -0.08109036309433801 - type: nAUC_Recall@10_max(MIRACL) value: 12.696619907773568 - type: nAUC_Recall@10_std(MIRACL) value: 8.791982704261589 - type: nAUC_Recall@1_diff1(MIRACL) value: 28.698973487482206 - type: nAUC_Recall@1_max(MIRACL) value: 2.9217687660885034 - type: nAUC_Recall@1_std(MIRACL) value: -1.1247408800976524 - type: nAUC_Recall@20_diff1(MIRACL) value: -13.312171017942623 - type: nAUC_Recall@20_max(MIRACL) value: 24.19847346821666 - type: nAUC_Recall@20_std(MIRACL) value: 15.8157702609797 - type: nAUC_Recall@3_diff1(MIRACL) value: 16.909128321353343 - type: nAUC_Recall@3_max(MIRACL) value: 6.552122731902991 - type: nAUC_Recall@3_std(MIRACL) value: 1.9963898223457228 - type: nAUC_Recall@5_diff1(MIRACL) value: 9.990292655247721 - type: nAUC_Recall@5_max(MIRACL) value: 9.361722273507574 - type: nAUC_Recall@5_std(MIRACL) value: 3.270918827854495 - task: type: MultilabelClassification dataset: name: MTEB SensitiveTopicsClassification (default) type: ai-forever/sensitive-topics-classification config: default split: test revision: 416b34a802308eac30e4192afc0ff99bb8dcc7f2 metrics: - type: accuracy value: 30.634765625 - type: f1 value: 32.647559808678665 - type: lrap value: 45.94319661458259 - type: main_score value: 30.634765625 - task: type: STS dataset: name: MTEB ATEC (default) type: C-MTEB/ATEC config: default split: test revision: 0f319b1142f28d00e055a6770f3f726ae9b7d865 metrics: - type: cosine_pearson value: 47.541497334563296 - type: cosine_spearman value: 49.06268944206629 - type: euclidean_pearson value: 51.838926748581635 - type: euclidean_spearman value: 48.930697157135356 - type: main_score value: 49.06268944206629 - type: manhattan_pearson value: 51.835306769406365 - type: manhattan_spearman value: 48.86135493444834 - type: pearson value: 47.541497334563296 - type: spearman value: 49.06268944206629 - task: type: Classification dataset: name: MTEB AllegroReviews (default) type: PL-MTEB/allegro-reviews config: default split: test revision: b89853e6de927b0e3bfa8ecc0e56fe4e02ceafc6 metrics: - type: accuracy value: 49.51292246520874 - type: f1 value: 44.14350234332397 - type: f1_weighted value: 51.65508998354552 - type: main_score value: 49.51292246520874 - task: type: Clustering dataset: name: MTEB AlloProfClusteringP2P (default) type: lyon-nlp/alloprof config: default split: test revision: 392ba3f5bcc8c51f578786c1fc3dae648662cb9b metrics: - type: main_score value: 63.883383458621665 - type: v_measure value: 63.883383458621665 - type: v_measure_std value: 2.693666879958465 - type: main_score value: 46.85924588755251 - type: v_measure value: 46.85924588755251 - type: v_measure_std value: 2.1918258880872377 - task: type: Clustering dataset: name: MTEB 8TagsClustering type: PL-MTEB/8tags-clustering config: default split: test revision: None metrics: - type: v_measure value: 43.65721212452554 - task: type: Reranking dataset: name: MTEB AlloprofReranking (default) type: lyon-nlp/mteb-fr-reranking-alloprof-s2p config: default split: test revision: e40c8a63ce02da43200eccb5b0846fcaa888f562 metrics: - type: map value: 66.39013753839347 - type: mrr value: 67.68045617786551 - type: main_score value: 66.39013753839347 - task: type: Retrieval dataset: name: MTEB AlloprofRetrieval (default) type: lyon-nlp/alloprof config: default split: test revision: fcf295ea64c750f41fadbaa37b9b861558e1bfbd metrics: - type: main_score value: 54.284 - type: map_at_1 value: 37.047000000000004 - type: map_at_10 value: 48.53 - type: map_at_100 value: 49.357 - type: map_at_1000 value: 49.39 - type: map_at_20 value: 49.064 - type: map_at_3 value: 45.675 - type: map_at_5 value: 47.441 - type: mrr_at_1 value: 37.04663212435233 - type: mrr_at_10 value: 48.5300326232969 - type: mrr_at_100 value: 49.35708199037581 - type: mrr_at_1000 value: 49.39005824603193 - type: mrr_at_20 value: 49.06417416464799 - type: mrr_at_3 value: 45.67501439263105 - type: mrr_at_5 value: 47.44099021301103 - type: nauc_map_at_1000_diff1 value: 43.32474221868009 - type: nauc_map_at_1000_max value: 39.407334029058575 - type: nauc_map_at_1000_std value: -2.3728154448932606 - type: nauc_map_at_100_diff1 value: 43.32336300929909 - type: nauc_map_at_100_max value: 39.432174777554835 - type: nauc_map_at_100_std value: -2.356396922384349 - type: nauc_map_at_10_diff1 value: 43.1606520154482 - type: nauc_map_at_10_max value: 39.33734650558226 - type: nauc_map_at_10_std value: -2.5156222475075256 - type: nauc_map_at_1_diff1 value: 46.2178975214499 - type: nauc_map_at_1_max value: 36.26173199049361 - type: nauc_map_at_1_std value: -3.0897555582816443 - type: nauc_map_at_20_diff1 value: 43.272980702916456 - type: nauc_map_at_20_max value: 39.4896977052276 - type: nauc_map_at_20_std value: -2.3305501742917043 - type: nauc_map_at_3_diff1 value: 43.49525042967079 - type: nauc_map_at_3_max value: 38.66352501824728 - type: nauc_map_at_3_std value: -3.202794391620473 - type: nauc_map_at_5_diff1 value: 43.2266692546611 - type: nauc_map_at_5_max value: 38.77368661115743 - type: nauc_map_at_5_std value: -3.0897532130127954 - type: nauc_mrr_at_1000_diff1 value: 43.32474221868009 - type: nauc_mrr_at_1000_max value: 39.407334029058575 - type: nauc_mrr_at_1000_std value: -2.3728154448932606 - type: nauc_mrr_at_100_diff1 value: 43.32336300929909 - type: nauc_mrr_at_100_max value: 39.432174777554835 - type: nauc_mrr_at_100_std value: -2.356396922384349 - type: nauc_mrr_at_10_diff1 value: 43.1606520154482 - type: nauc_mrr_at_10_max value: 39.33734650558226 - type: nauc_mrr_at_10_std value: -2.5156222475075256 - type: nauc_mrr_at_1_diff1 value: 46.2178975214499 - type: nauc_mrr_at_1_max value: 36.26173199049361 - type: nauc_mrr_at_1_std value: -3.0897555582816443 - type: nauc_mrr_at_20_diff1 value: 43.272980702916456 - type: nauc_mrr_at_20_max value: 39.4896977052276 - type: nauc_mrr_at_20_std value: -2.3305501742917043 - type: nauc_mrr_at_3_diff1 value: 43.49525042967079 - type: nauc_mrr_at_3_max value: 38.66352501824728 - type: nauc_mrr_at_3_std value: -3.202794391620473 - type: nauc_mrr_at_5_diff1 value: 43.2266692546611 - type: nauc_mrr_at_5_max value: 38.77368661115743 - type: nauc_mrr_at_5_std value: -3.0897532130127954 - type: nauc_ndcg_at_1000_diff1 value: 43.01903168202974 - type: nauc_ndcg_at_1000_max value: 40.75496622942232 - type: nauc_ndcg_at_1000_std value: -1.3150412981845496 - type: nauc_ndcg_at_100_diff1 value: 42.98016493758145 - type: nauc_ndcg_at_100_max value: 41.55869635162325 - type: nauc_ndcg_at_100_std value: -0.5355252976886055 - type: nauc_ndcg_at_10_diff1 value: 42.218755211347506 - type: nauc_ndcg_at_10_max value: 41.305042275175765 - type: nauc_ndcg_at_10_std value: -1.4034484444573714 - type: nauc_ndcg_at_1_diff1 value: 46.2178975214499 - type: nauc_ndcg_at_1_max value: 36.26173199049361 - type: nauc_ndcg_at_1_std value: -3.0897555582816443 - type: nauc_ndcg_at_20_diff1 value: 42.66574440095576 - type: nauc_ndcg_at_20_max value: 42.014620115124515 - type: nauc_ndcg_at_20_std value: -0.5176162553751498 - type: nauc_ndcg_at_3_diff1 value: 42.837450505106055 - type: nauc_ndcg_at_3_max value: 39.525369733082414 - type: nauc_ndcg_at_3_std value: -3.1605948245795155 - type: nauc_ndcg_at_5_diff1 value: 42.37951815451173 - type: nauc_ndcg_at_5_max value: 39.78840132935179 - type: nauc_ndcg_at_5_std value: -2.936898430768135 - type: nauc_precision_at_1000_diff1 value: 49.69224988612385 - type: nauc_precision_at_1000_max value: 79.57897547128005 - type: nauc_precision_at_1000_std value: 45.040371354764645 - type: nauc_precision_at_100_diff1 value: 42.70597486048422 - type: nauc_precision_at_100_max value: 65.74628759606188 - type: nauc_precision_at_100_std value: 25.49157745244855 - type: nauc_precision_at_10_diff1 value: 38.565609931689345 - type: nauc_precision_at_10_max value: 50.0239696180852 - type: nauc_precision_at_10_std value: 3.976354829503967 - type: nauc_precision_at_1_diff1 value: 46.2178975214499 - type: nauc_precision_at_1_max value: 36.26173199049361 - type: nauc_precision_at_1_std value: -3.0897555582816443 - type: nauc_precision_at_20_diff1 value: 40.4134718566864 - type: nauc_precision_at_20_max value: 57.121778108665374 - type: nauc_precision_at_20_std value: 11.46021975428544 - type: nauc_precision_at_3_diff1 value: 40.90538379461529 - type: nauc_precision_at_3_max value: 42.18393248057992 - type: nauc_precision_at_3_std value: -3.005249943837297 - type: nauc_precision_at_5_diff1 value: 39.60162965860782 - type: nauc_precision_at_5_max value: 43.28317158174058 - type: nauc_precision_at_5_std value: -2.3469094487738054 - type: nauc_recall_at_1000_diff1 value: 49.69224988612252 - type: nauc_recall_at_1000_max value: 79.57897547127862 - type: nauc_recall_at_1000_std value: 45.04037135476256 - type: nauc_recall_at_100_diff1 value: 42.70597486048432 - type: nauc_recall_at_100_max value: 65.74628759606213 - type: nauc_recall_at_100_std value: 25.491577452448727 - type: nauc_recall_at_10_diff1 value: 38.56560993168935 - type: nauc_recall_at_10_max value: 50.02396961808522 - type: nauc_recall_at_10_std value: 3.9763548295040314 - type: nauc_recall_at_1_diff1 value: 46.2178975214499 - type: nauc_recall_at_1_max value: 36.26173199049361 - type: nauc_recall_at_1_std value: -3.0897555582816443 - type: nauc_recall_at_20_diff1 value: 40.41347185668637 - type: nauc_recall_at_20_max value: 57.12177810866533 - type: nauc_recall_at_20_std value: 11.460219754285431 - type: nauc_recall_at_3_diff1 value: 40.90538379461527 - type: nauc_recall_at_3_max value: 42.18393248057989 - type: nauc_recall_at_3_std value: -3.005249943837297 - type: nauc_recall_at_5_diff1 value: 39.601629658607784 - type: nauc_recall_at_5_max value: 43.28317158174053 - type: nauc_recall_at_5_std value: -2.3469094487738054 - type: ndcg_at_1 value: 37.047000000000004 - type: ndcg_at_10 value: 54.284 - type: ndcg_at_100 value: 58.34 - type: ndcg_at_1000 value: 59.303 - type: ndcg_at_20 value: 56.235 - type: ndcg_at_3 value: 48.503 - type: ndcg_at_5 value: 51.686 - type: precision_at_1 value: 37.047000000000004 - type: precision_at_10 value: 7.237 - type: precision_at_100 value: 0.914 - type: precision_at_1000 value: 0.099 - type: precision_at_20 value: 4.005 - type: precision_at_3 value: 18.898 - type: precision_at_5 value: 12.884 - type: recall_at_1 value: 37.047000000000004 - type: recall_at_10 value: 72.366 - type: recall_at_100 value: 91.408 - type: recall_at_1000 value: 99.136 - type: recall_at_20 value: 80.095 - type: recall_at_3 value: 56.693000000000005 - type: recall_at_5 value: 64.42099999999999 - task: type: Classification dataset: name: MTEB AmazonCounterfactualClassification (en) type: mteb/amazon_counterfactual config: en split: test revision: e8379541af4e31359cca9fbcf4b00f2671dba205 metrics: - type: accuracy value: 89.49253731343283 - type: ap value: 61.88098616359918 - type: ap_weighted value: 61.88098616359918 - type: f1 value: 84.76516623679144 - type: f1_weighted value: 89.92745276292968 - type: main_score value: 89.49253731343283 - task: type: Classification dataset: name: MTEB AmazonCounterfactualClassification (de) type: mteb/amazon_counterfactual config: de split: test revision: e8379541af4e31359cca9fbcf4b00f2671dba205 metrics: - type: accuracy value: 89.61456102783727 - type: ap value: 93.11816566733742 - type: ap_weighted value: 93.11816566733742 - type: f1 value: 88.27635757733722 - type: f1_weighted value: 89.82581568285453 - type: main_score value: 89.61456102783727 - task: type: Classification dataset: name: MTEB AmazonPolarityClassification (default) type: mteb/amazon_polarity config: default split: test revision: e2d317d38cd51312af73b3d32a06d1a08b442046 metrics: - type: accuracy value: 95.3825 - type: ap value: 93.393033869502 - type: ap_weighted value: 93.393033869502 - type: f1 value: 95.38109007966307 - type: f1_weighted value: 95.38109007966305 - type: main_score value: 95.3825 - task: type: Classification dataset: name: MTEB AmazonReviewsClassification (en) type: mteb/amazon_reviews_multi config: en split: test revision: 1399c76144fd37290681b995c656ef9b2e06e26d metrics: - type: accuracy value: 49.768 - type: f1 value: 48.95084821944411 - type: f1_weighted value: 48.9508482194441 - type: main_score value: 49.768 - task: type: Classification dataset: name: MTEB AmazonReviewsClassification (de) type: mteb/amazon_reviews_multi config: de split: test revision: 1399c76144fd37290681b995c656ef9b2e06e26d metrics: - type: accuracy value: 48.071999999999996 - type: f1 value: 47.24171107487612 - type: f1_weighted value: 47.24171107487612 - type: main_score value: 48.071999999999996 - task: type: Classification dataset: name: MTEB AmazonReviewsClassification (es) type: mteb/amazon_reviews_multi config: es split: test revision: 1399c76144fd37290681b995c656ef9b2e06e26d metrics: - type: accuracy value: 48.102000000000004 - type: f1 value: 47.27193805278696 - type: f1_weighted value: 47.27193805278696 - type: main_score value: 48.102000000000004 - task: type: Classification dataset: name: MTEB AmazonReviewsClassification (fr) type: mteb/amazon_reviews_multi config: fr split: test revision: 1399c76144fd37290681b995c656ef9b2e06e26d metrics: - type: accuracy value: 47.30800000000001 - type: f1 value: 46.41683358017851 - type: f1_weighted value: 46.41683358017851 - type: main_score value: 47.30800000000001 - task: type: Classification dataset: name: MTEB AmazonReviewsClassification (zh) type: mteb/amazon_reviews_multi config: zh split: test revision: 1399c76144fd37290681b995c656ef9b2e06e26d metrics: - type: accuracy value: 44.944 - type: f1 value: 44.223824487744395 - type: f1_weighted value: 44.22382448774439 - type: main_score value: 44.944 - task: type: Retrieval dataset: name: MTEB ArguAna (default) type: mteb/arguana config: default split: test revision: c22ab2a51041ffd869aaddef7af8d8215647e41a metrics: - type: map_at_1 value: 29.232000000000003 - type: map_at_10 value: 45.117000000000004 - type: map_at_100 value: 45.977000000000004 - type: map_at_1000 value: 45.98 - type: map_at_20 value: 45.815 - type: map_at_3 value: 39.912 - type: map_at_5 value: 42.693 - type: mrr_at_1 value: 29.659000000000002 - type: mrr_at_10 value: 45.253 - type: mrr_at_100 value: 46.125 - type: mrr_at_1000 value: 46.129 - type: mrr_at_20 value: 45.964 - type: mrr_at_3 value: 40.043 - type: mrr_at_5 value: 42.870000000000005 - type: ndcg_at_1 value: 29.232000000000003 - type: ndcg_at_10 value: 54.327999999999996 - type: ndcg_at_100 value: 57.86 - type: ndcg_at_1000 value: 57.935 - type: ndcg_at_20 value: 56.794 - type: ndcg_at_3 value: 43.516 - type: ndcg_at_5 value: 48.512 - type: precision_at_1 value: 29.232000000000003 - type: precision_at_10 value: 8.393 - type: precision_at_100 value: 0.991 - type: precision_at_1000 value: 0.1 - type: precision_at_20 value: 4.676 - type: precision_at_3 value: 17.994 - type: precision_at_5 value: 13.215 - type: recall_at_1 value: 29.232000000000003 - type: recall_at_10 value: 83.926 - type: recall_at_100 value: 99.075 - type: recall_at_1000 value: 99.644 - type: recall_at_20 value: 93.528 - type: recall_at_3 value: 53.983000000000004 - type: recall_at_5 value: 66.074 - type: main_score value: 54.327999999999996 - task: type: Clustering dataset: name: MTEB ArxivClusteringP2P (default) type: mteb/arxiv-clustering-p2p config: default split: test revision: a122ad7f3f0291bf49cc6f4d32aa80929df69d5d metrics: - type: main_score value: 46.6636824632419 - type: v_measure value: 46.6636824632419 - type: v_measure_std value: 13.817129140714963 - task: type: Clustering dataset: name: MTEB ArxivClusteringS2S (default) type: mteb/arxiv-clustering-s2s config: default split: test revision: f910caf1a6075f7329cdf8c1a6135696f37dbd53 metrics: - type: main_score value: 39.271141892800024 - type: v_measure value: 39.271141892800024 - type: v_measure_std value: 14.276782483454827 - task: type: Reranking dataset: name: MTEB AskUbuntuDupQuestions (default) type: mteb/askubuntudupquestions-reranking config: default split: test revision: 2000358ca161889fa9c082cb41daa8dcfb161a54 metrics: - type: map value: 65.04363277324629 - type: mrr value: 78.2372598162072 - type: main_score value: 65.04363277324629 - task: type: Reranking dataset: name: MTEB MindSmallReranking (default) type: mteb/mind_small config: default split: test revision: 3bdac13927fdc888b903db93b2ffdbd90b295a69 metrics: - type: map value: 30.83 - type: main_score value: 30.83 - task: type: STS dataset: name: MTEB BIOSSES (default) type: mteb/biosses-sts config: default split: test revision: d3fb88f8f02e40887cd149695127462bbcf29b4a metrics: - type: cosine_pearson value: 88.80382082011027 - type: cosine_spearman value: 88.68876782169106 - type: euclidean_pearson value: 87.00802890147176 - type: euclidean_spearman value: 87.43211268192712 - type: main_score value: 88.68876782169106 - type: manhattan_pearson value: 87.14062537179474 - type: manhattan_spearman value: 87.59115245033443 - type: pearson value: 88.80382082011027 - type: spearman value: 88.68876782169106 - task: type: STS dataset: name: MTEB BQ (default) type: C-MTEB/BQ config: default split: test revision: e3dda5e115e487b39ec7e618c0c6a29137052a55 metrics: - type: cosine_pearson value: 61.588006604878196 - type: cosine_spearman value: 63.20615427154465 - type: euclidean_pearson value: 61.818547092516496 - type: euclidean_spearman value: 63.21558009151778 - type: main_score value: 63.20615427154465 - type: manhattan_pearson value: 61.665588158487616 - type: manhattan_spearman value: 63.051544488238584 - type: pearson value: 61.588006604878196 - type: spearman value: 63.20615427154465 - task: type: Retrieval dataset: name: MTEB BSARDRetrieval (default) type: maastrichtlawtech/bsard config: default split: test revision: 5effa1b9b5fa3b0f9e12523e6e43e5f86a6e6d59 metrics: - type: main_score value: 64.414 - type: map_at_1 value: 14.865 - type: map_at_10 value: 21.605 - type: map_at_100 value: 22.762 - type: map_at_1000 value: 22.854 - type: map_at_20 value: 22.259999999999998 - type: map_at_3 value: 20.119999999999997 - type: map_at_5 value: 20.931 - type: mrr_at_1 value: 14.864864864864865 - type: mrr_at_10 value: 21.605176605176606 - type: mrr_at_100 value: 22.7622306460065 - type: mrr_at_1000 value: 22.85383406410312 - type: mrr_at_20 value: 22.259528463088845 - type: mrr_at_3 value: 20.12012012012012 - type: mrr_at_5 value: 20.930930930930934 - type: nauc_map_at_1000_diff1 value: 17.486265968689338 - type: nauc_map_at_1000_max value: 22.736799291688836 - type: nauc_map_at_1000_std value: 9.831687441977147 - type: nauc_map_at_100_diff1 value: 17.50754492049086 - type: nauc_map_at_100_max value: 22.77693662806787 - type: nauc_map_at_100_std value: 9.853899509675395 - type: nauc_map_at_10_diff1 value: 17.42133968580952 - type: nauc_map_at_10_max value: 22.45861793882279 - type: nauc_map_at_10_std value: 8.964888472915938 - type: nauc_map_at_1_diff1 value: 19.433947086968093 - type: nauc_map_at_1_max value: 24.75657047550517 - type: nauc_map_at_1_std value: 15.122329157218505 - type: nauc_map_at_20_diff1 value: 17.429856756008785 - type: nauc_map_at_20_max value: 22.438850987431017 - type: nauc_map_at_20_std value: 9.172746012213558 - type: nauc_map_at_3_diff1 value: 18.218182689678475 - type: nauc_map_at_3_max value: 23.57169444088667 - type: nauc_map_at_3_std value: 10.464473559366356 - type: nauc_map_at_5_diff1 value: 18.6075342519133 - type: nauc_map_at_5_max value: 23.308845973576673 - type: nauc_map_at_5_std value: 9.364009996445652 - type: nauc_mrr_at_1000_diff1 value: 17.486265968689338 - type: nauc_mrr_at_1000_max value: 22.736799291688836 - type: nauc_mrr_at_1000_std value: 9.831687441977147 - type: nauc_mrr_at_100_diff1 value: 17.50754492049086 - type: nauc_mrr_at_100_max value: 22.77693662806787 - type: nauc_mrr_at_100_std value: 9.853899509675395 - type: nauc_mrr_at_10_diff1 value: 17.42133968580952 - type: nauc_mrr_at_10_max value: 22.45861793882279 - type: nauc_mrr_at_10_std value: 8.964888472915938 - type: nauc_mrr_at_1_diff1 value: 19.433947086968093 - type: nauc_mrr_at_1_max value: 24.75657047550517 - type: nauc_mrr_at_1_std value: 15.122329157218505 - type: nauc_mrr_at_20_diff1 value: 17.429856756008785 - type: nauc_mrr_at_20_max value: 22.438850987431017 - type: nauc_mrr_at_20_std value: 9.172746012213558 - type: nauc_mrr_at_3_diff1 value: 18.218182689678475 - type: nauc_mrr_at_3_max value: 23.57169444088667 - type: nauc_mrr_at_3_std value: 10.464473559366356 - type: nauc_mrr_at_5_diff1 value: 18.6075342519133 - type: nauc_mrr_at_5_max value: 23.308845973576673 - type: nauc_mrr_at_5_std value: 9.364009996445652 - type: nauc_ndcg_at_1000_diff1 value: 16.327871824135745 - type: nauc_ndcg_at_1000_max value: 23.308241052911495 - type: nauc_ndcg_at_1000_std value: 11.50905911184097 - type: nauc_ndcg_at_100_diff1 value: 16.676226744692773 - type: nauc_ndcg_at_100_max value: 24.323253721240974 - type: nauc_ndcg_at_100_std value: 11.952612443651557 - type: nauc_ndcg_at_10_diff1 value: 16.030325121764594 - type: nauc_ndcg_at_10_max value: 21.306799242079542 - type: nauc_ndcg_at_10_std value: 6.63359364302513 - type: nauc_ndcg_at_1_diff1 value: 19.433947086968093 - type: nauc_ndcg_at_1_max value: 24.75657047550517 - type: nauc_ndcg_at_1_std value: 15.122329157218505 - type: nauc_ndcg_at_20_diff1 value: 16.013173605999857 - type: nauc_ndcg_at_20_max value: 21.607217260736576 - type: nauc_ndcg_at_20_std value: 7.319482417138996 - type: nauc_ndcg_at_3_diff1 value: 17.97958548328493 - type: nauc_ndcg_at_3_max value: 23.58346522810145 - type: nauc_ndcg_at_3_std value: 9.392582854708314 - type: nauc_ndcg_at_5_diff1 value: 18.734733324685287 - type: nauc_ndcg_at_5_max value: 23.273244317623742 - type: nauc_ndcg_at_5_std value: 7.638611545253834 - type: nauc_precision_at_1000_diff1 value: 7.919843339380295 - type: nauc_precision_at_1000_max value: 31.575386234270486 - type: nauc_precision_at_1000_std value: 39.332224386769404 - type: nauc_precision_at_100_diff1 value: 15.018050960000052 - type: nauc_precision_at_100_max value: 34.98209513759861 - type: nauc_precision_at_100_std value: 26.970034484359022 - type: nauc_precision_at_10_diff1 value: 12.102191084210922 - type: nauc_precision_at_10_max value: 18.112541150340675 - type: nauc_precision_at_10_std value: 0.7358784689406018 - type: nauc_precision_at_1_diff1 value: 19.433947086968093 - type: nauc_precision_at_1_max value: 24.75657047550517 - type: nauc_precision_at_1_std value: 15.122329157218505 - type: nauc_precision_at_20_diff1 value: 12.018814361204328 - type: nauc_precision_at_20_max value: 19.75123746049928 - type: nauc_precision_at_20_std value: 3.012204650582264 - type: nauc_precision_at_3_diff1 value: 17.41375604940955 - type: nauc_precision_at_3_max value: 23.699834627021037 - type: nauc_precision_at_3_std value: 6.793486779050103 - type: nauc_precision_at_5_diff1 value: 19.194631963780257 - type: nauc_precision_at_5_max value: 23.31708702442155 - type: nauc_precision_at_5_std value: 3.4591358279667332 - type: nauc_recall_at_1000_diff1 value: 7.919843339380378 - type: nauc_recall_at_1000_max value: 31.57538623427063 - type: nauc_recall_at_1000_std value: 39.332224386769546 - type: nauc_recall_at_100_diff1 value: 15.018050960000085 - type: nauc_recall_at_100_max value: 34.9820951375986 - type: nauc_recall_at_100_std value: 26.97003448435901 - type: nauc_recall_at_10_diff1 value: 12.102191084210837 - type: nauc_recall_at_10_max value: 18.112541150340594 - type: nauc_recall_at_10_std value: 0.7358784689405188 - type: nauc_recall_at_1_diff1 value: 19.433947086968093 - type: nauc_recall_at_1_max value: 24.75657047550517 - type: nauc_recall_at_1_std value: 15.122329157218505 - type: nauc_recall_at_20_diff1 value: 12.01881436120429 - type: nauc_recall_at_20_max value: 19.751237460499222 - type: nauc_recall_at_20_std value: 3.0122046505822135 - type: nauc_recall_at_3_diff1 value: 17.413756049409503 - type: nauc_recall_at_3_max value: 23.699834627020998 - type: nauc_recall_at_3_std value: 6.793486779050083 - type: nauc_recall_at_5_diff1 value: 19.194631963780203 - type: nauc_recall_at_5_max value: 23.3170870244215 - type: nauc_recall_at_5_std value: 3.459135827966664 - type: ndcg_at_1 value: 14.865 - type: ndcg_at_10 value: 24.764 - type: ndcg_at_100 value: 30.861 - type: ndcg_at_1000 value: 33.628 - type: ndcg_at_20 value: 27.078000000000003 - type: ndcg_at_3 value: 21.675 - type: ndcg_at_5 value: 23.148 - type: precision_at_1 value: 14.865 - type: precision_at_10 value: 3.4680000000000004 - type: precision_at_100 value: 0.644 - type: precision_at_1000 value: 0.087 - type: precision_at_20 value: 2.185 - type: precision_at_3 value: 8.709 - type: precision_at_5 value: 5.946 - type: recall_at_1 value: 14.865 - type: recall_at_10 value: 34.685 - type: recall_at_100 value: 64.414 - type: recall_at_1000 value: 86.937 - type: recall_at_20 value: 43.694 - type: recall_at_3 value: 26.125999999999998 - type: recall_at_5 value: 29.73 - task: type: Classification dataset: name: MTEB Banking77Classification (default) type: mteb/banking77 config: default split: test revision: 0fd18e25b25c072e09e0d92ab615fda904d66300 metrics: - type: accuracy value: 84.08116883116882 - type: f1 value: 84.05587055990273 - type: f1_weighted value: 84.05587055990274 - type: main_score value: 84.08116883116882 - task: type: Clustering dataset: name: MTEB BiorxivClusteringP2P (default) type: mteb/biorxiv-clustering-p2p config: default split: test revision: 65b79d1d13f80053f67aca9498d9402c2d9f1f40 metrics: - type: main_score value: 38.1941007822277 - type: v_measure value: 38.1941007822277 - type: v_measure_std value: 0.7502113547288178 - task: type: Clustering dataset: name: MTEB BiorxivClusteringS2S (default) type: mteb/biorxiv-clustering-s2s config: default split: test revision: 258694dd0231531bc1fd9de6ceb52a0853c6d908 metrics: - type: main_score value: 34.42075599178318 - type: v_measure value: 34.42075599178318 - type: v_measure_std value: 0.600256720497283 - task: type: Clustering dataset: name: MTEB BlurbsClusteringP2P (default) type: slvnwhrl/blurbs-clustering-p2p config: default split: test revision: a2dd5b02a77de3466a3eaa98ae586b5610314496 metrics: - type: main_score value: 41.634627363047265 - type: v_measure value: 41.634627363047265 - type: v_measure_std value: 9.726923191225307 - task: type: Clustering dataset: name: MTEB BlurbsClusteringS2S (default) type: slvnwhrl/blurbs-clustering-s2s config: default split: test revision: 22793b6a6465bf00120ad525e38c51210858132c metrics: - type: main_score value: 20.996468295584197 - type: v_measure value: 20.996468295584197 - type: v_measure_std value: 9.225766688272197 - task: type: Classification dataset: name: MTEB CBD (default) type: PL-MTEB/cbd config: default split: test revision: 36ddb419bcffe6a5374c3891957912892916f28d metrics: - type: accuracy value: 69.99 - type: ap value: 22.57826353116948 - type: ap_weighted value: 22.57826353116948 - type: f1 value: 59.04574955548393 - type: f1_weighted value: 74.36235022309789 - type: main_score value: 69.99 - task: type: PairClassification dataset: name: MTEB CDSC-E (default) type: PL-MTEB/cdsce-pairclassification config: default split: test revision: 0a3d4aa409b22f80eb22cbf59b492637637b536d metrics: - type: cosine_accuracy value: 88.7 - type: cosine_accuracy_threshold value: 97.37848043441772 - type: cosine_ap value: 73.0405088928302 - type: cosine_f1 value: 63.52201257861635 - type: cosine_f1_threshold value: 96.98888063430786 - type: cosine_precision value: 78.90625 - type: cosine_recall value: 53.1578947368421 - type: dot_accuracy value: 84.89999999999999 - type: dot_accuracy_threshold value: 43603.09753417969 - type: dot_ap value: 56.98157569085279 - type: dot_f1 value: 57.606490872210955 - type: dot_f1_threshold value: 40406.23779296875 - type: dot_precision value: 46.864686468646866 - type: dot_recall value: 74.73684210526315 - type: euclidean_accuracy value: 88.5 - type: euclidean_accuracy_threshold value: 498.0483055114746 - type: euclidean_ap value: 72.97328234816734 - type: euclidean_f1 value: 63.722397476340696 - type: euclidean_f1_threshold value: 508.6186408996582 - type: euclidean_precision value: 79.52755905511812 - type: euclidean_recall value: 53.1578947368421 - type: main_score value: 73.0405088928302 - type: manhattan_accuracy value: 88.6 - type: manhattan_accuracy_threshold value: 12233.079528808594 - type: manhattan_ap value: 72.92148503992615 - type: manhattan_f1 value: 63.69426751592356 - type: manhattan_f1_threshold value: 12392.754364013672 - type: manhattan_precision value: 80.64516129032258 - type: manhattan_recall value: 52.63157894736842 - type: max_accuracy value: 88.7 - type: max_ap value: 73.0405088928302 - type: max_f1 value: 63.722397476340696 - type: max_precision value: 80.64516129032258 - type: max_recall value: 74.73684210526315 - type: similarity_accuracy value: 88.7 - type: similarity_accuracy_threshold value: 97.37848043441772 - type: similarity_ap value: 73.0405088928302 - type: similarity_f1 value: 63.52201257861635 - type: similarity_f1_threshold value: 96.98888063430786 - type: similarity_precision value: 78.90625 - type: similarity_recall value: 53.1578947368421 - task: type: STS dataset: name: MTEB CDSC-R (default) type: PL-MTEB/cdscr-sts config: default split: test revision: 1cd6abbb00df7d14be3dbd76a7dcc64b3a79a7cd metrics: - type: cosine_pearson value: 92.97492495289738 - type: cosine_spearman value: 92.63248098608472 - type: euclidean_pearson value: 92.04712487782031 - type: euclidean_spearman value: 92.19679486755008 - type: main_score value: 92.63248098608472 - type: manhattan_pearson value: 92.0101187740438 - type: manhattan_spearman value: 92.20926859332754 - type: pearson value: 92.97492495289738 - type: spearman value: 92.63248098608472 - task: type: Clustering dataset: name: MTEB CLSClusteringP2P (default) type: C-MTEB/CLSClusteringP2P config: default split: test revision: 4b6227591c6c1a73bc76b1055f3b7f3588e72476 metrics: - type: main_score value: 39.96377851800628 - type: v_measure value: 39.96377851800628 - type: v_measure_std value: 0.9793033243093288 - task: type: Clustering dataset: name: MTEB CLSClusteringS2S (default) type: C-MTEB/CLSClusteringS2S config: default split: test revision: e458b3f5414b62b7f9f83499ac1f5497ae2e869f metrics: - type: main_score value: 38.788850224595784 - type: v_measure value: 38.788850224595784 - type: v_measure_std value: 1.0712604145916924 - task: type: Reranking dataset: name: MTEB CMedQAv1 type: C-MTEB/CMedQAv1-reranking config: default split: test revision: 8d7f1e942507dac42dc58017c1a001c3717da7df metrics: - type: map value: 77.95952507806115 - type: mrr value: 80.8643253968254 - type: main_score value: 77.95952507806115 - task: type: Reranking dataset: name: MTEB CMedQAv2 type: C-MTEB/CMedQAv2-reranking config: default split: test revision: 23d186750531a14a0357ca22cd92d712fd512ea0 metrics: - type: map value: 78.21522500165045 - type: mrr value: 81.28194444444443 - type: main_score value: 78.21522500165045 - task: type: Retrieval dataset: name: MTEB CQADupstackAndroidRetrieval (default) type: mteb/cqadupstack-android config: default split: test revision: f46a197baaae43b4f621051089b82a364682dfeb metrics: - type: map_at_1 value: 33.377 - type: map_at_10 value: 46.371 - type: map_at_100 value: 47.829 - type: map_at_1000 value: 47.94 - type: map_at_20 value: 47.205000000000005 - type: map_at_3 value: 42.782 - type: map_at_5 value: 44.86 - type: mrr_at_1 value: 41.345 - type: mrr_at_10 value: 52.187 - type: mrr_at_100 value: 52.893 - type: mrr_at_1000 value: 52.929 - type: mrr_at_20 value: 52.637 - type: mrr_at_3 value: 49.714000000000006 - type: mrr_at_5 value: 51.373000000000005 - type: ndcg_at_1 value: 41.345 - type: ndcg_at_10 value: 52.946000000000005 - type: ndcg_at_100 value: 57.92699999999999 - type: ndcg_at_1000 value: 59.609 - type: ndcg_at_20 value: 54.900999999999996 - type: ndcg_at_3 value: 48.357 - type: ndcg_at_5 value: 50.739000000000004 - type: precision_at_1 value: 41.345 - type: precision_at_10 value: 10.186 - type: precision_at_100 value: 1.554 - type: precision_at_1000 value: 0.2 - type: precision_at_20 value: 5.959 - type: precision_at_3 value: 23.796 - type: precision_at_5 value: 17.024 - type: recall_at_1 value: 33.377 - type: recall_at_10 value: 65.067 - type: recall_at_100 value: 86.04899999999999 - type: recall_at_1000 value: 96.54899999999999 - type: recall_at_20 value: 72.071 - type: recall_at_3 value: 51.349999999999994 - type: recall_at_5 value: 58.41 - type: main_score value: 52.946000000000005 - task: type: Retrieval dataset: name: MTEB CQADupstackEnglishRetrieval (default) type: mteb/cqadupstack-english config: default split: test revision: ad9991cb51e31e31e430383c75ffb2885547b5f0 metrics: - type: map_at_1 value: 31.097 - type: map_at_10 value: 42.183 - type: map_at_100 value: 43.580999999999996 - type: map_at_1000 value: 43.718 - type: map_at_20 value: 42.921 - type: map_at_3 value: 38.963 - type: map_at_5 value: 40.815 - type: mrr_at_1 value: 39.745000000000005 - type: mrr_at_10 value: 48.736000000000004 - type: mrr_at_100 value: 49.405 - type: mrr_at_1000 value: 49.452 - type: mrr_at_20 value: 49.118 - type: mrr_at_3 value: 46.497 - type: mrr_at_5 value: 47.827999999999996 - type: ndcg_at_1 value: 39.745000000000005 - type: ndcg_at_10 value: 48.248000000000005 - type: ndcg_at_100 value: 52.956 - type: ndcg_at_1000 value: 54.99699999999999 - type: ndcg_at_20 value: 50.01 - type: ndcg_at_3 value: 43.946000000000005 - type: ndcg_at_5 value: 46.038000000000004 - type: precision_at_1 value: 39.745000000000005 - type: precision_at_10 value: 9.229 - type: precision_at_100 value: 1.5070000000000001 - type: precision_at_1000 value: 0.199 - type: precision_at_20 value: 5.489999999999999 - type: precision_at_3 value: 21.38 - type: precision_at_5 value: 15.274 - type: recall_at_1 value: 31.097 - type: recall_at_10 value: 58.617 - type: recall_at_100 value: 78.55199999999999 - type: recall_at_1000 value: 91.13900000000001 - type: recall_at_20 value: 64.92 - type: recall_at_3 value: 45.672000000000004 - type: recall_at_5 value: 51.669 - type: main_score value: 48.248000000000005 - task: type: Retrieval dataset: name: MTEB CQADupstackGamingRetrieval (default) type: mteb/cqadupstack-gaming config: default split: test revision: 4885aa143210c98657558c04aaf3dc47cfb54340 metrics: - type: map_at_1 value: 39.745000000000005 - type: map_at_10 value: 52.063 - type: map_at_100 value: 53.077 - type: map_at_1000 value: 53.13 - type: map_at_20 value: 52.66 - type: map_at_3 value: 48.662 - type: map_at_5 value: 50.507000000000005 - type: mrr_at_1 value: 45.391999999999996 - type: mrr_at_10 value: 55.528 - type: mrr_at_100 value: 56.16100000000001 - type: mrr_at_1000 value: 56.192 - type: mrr_at_20 value: 55.923 - type: mrr_at_3 value: 52.93600000000001 - type: mrr_at_5 value: 54.435 - type: ndcg_at_1 value: 45.391999999999996 - type: ndcg_at_10 value: 58.019 - type: ndcg_at_100 value: 61.936 - type: ndcg_at_1000 value: 63.015 - type: ndcg_at_20 value: 59.691 - type: ndcg_at_3 value: 52.294 - type: ndcg_at_5 value: 55.017 - type: precision_at_1 value: 45.391999999999996 - type: precision_at_10 value: 9.386 - type: precision_at_100 value: 1.232 - type: precision_at_1000 value: 0.136 - type: precision_at_20 value: 5.223 - type: precision_at_3 value: 23.177 - type: precision_at_5 value: 15.9 - type: recall_at_1 value: 39.745000000000005 - type: recall_at_10 value: 72.08099999999999 - type: recall_at_100 value: 88.85300000000001 - type: recall_at_1000 value: 96.569 - type: recall_at_20 value: 78.203 - type: recall_at_3 value: 56.957 - type: recall_at_5 value: 63.63100000000001 - type: main_score value: 58.019 - task: type: Retrieval dataset: name: MTEB CQADupstackGisRetrieval (default) type: mteb/cqadupstack-gis config: default split: test revision: 5003b3064772da1887988e05400cf3806fe491f2 metrics: - type: map_at_1 value: 26.651999999999997 - type: map_at_10 value: 35.799 - type: map_at_100 value: 36.846000000000004 - type: map_at_1000 value: 36.931000000000004 - type: map_at_20 value: 36.341 - type: map_at_3 value: 32.999 - type: map_at_5 value: 34.597 - type: mrr_at_1 value: 28.814 - type: mrr_at_10 value: 37.869 - type: mrr_at_100 value: 38.728 - type: mrr_at_1000 value: 38.795 - type: mrr_at_20 value: 38.317 - type: mrr_at_3 value: 35.235 - type: mrr_at_5 value: 36.738 - type: ndcg_at_1 value: 28.814 - type: ndcg_at_10 value: 41.028 - type: ndcg_at_100 value: 46.162 - type: ndcg_at_1000 value: 48.15 - type: ndcg_at_20 value: 42.824 - type: ndcg_at_3 value: 35.621 - type: ndcg_at_5 value: 38.277 - type: precision_at_1 value: 28.814 - type: precision_at_10 value: 6.361999999999999 - type: precision_at_100 value: 0.9450000000000001 - type: precision_at_1000 value: 0.11399999999999999 - type: precision_at_20 value: 3.6159999999999997 - type: precision_at_3 value: 15.140999999999998 - type: precision_at_5 value: 10.712000000000002 - type: recall_at_1 value: 26.651999999999997 - type: recall_at_10 value: 55.038 - type: recall_at_100 value: 78.806 - type: recall_at_1000 value: 93.485 - type: recall_at_20 value: 61.742 - type: recall_at_3 value: 40.682 - type: recall_at_5 value: 46.855000000000004 - type: main_score value: 41.028 - task: type: Retrieval dataset: name: MTEB CQADupstackMathematicaRetrieval (default) type: mteb/cqadupstack-mathematica config: default split: test revision: 90fceea13679c63fe563ded68f3b6f06e50061de metrics: - type: map_at_1 value: 17.627000000000002 - type: map_at_10 value: 26.436999999999998 - type: map_at_100 value: 27.85 - type: map_at_1000 value: 27.955999999999996 - type: map_at_20 value: 27.233 - type: map_at_3 value: 23.777 - type: map_at_5 value: 25.122 - type: mrr_at_1 value: 22.387999999999998 - type: mrr_at_10 value: 31.589 - type: mrr_at_100 value: 32.641999999999996 - type: mrr_at_1000 value: 32.696999999999996 - type: mrr_at_20 value: 32.201 - type: mrr_at_3 value: 28.98 - type: mrr_at_5 value: 30.342000000000002 - type: ndcg_at_1 value: 22.387999999999998 - type: ndcg_at_10 value: 32.129999999999995 - type: ndcg_at_100 value: 38.562999999999995 - type: ndcg_at_1000 value: 40.903 - type: ndcg_at_20 value: 34.652 - type: ndcg_at_3 value: 27.26 - type: ndcg_at_5 value: 29.235 - type: precision_at_1 value: 22.387999999999998 - type: precision_at_10 value: 5.970000000000001 - type: precision_at_100 value: 1.068 - type: precision_at_1000 value: 0.13899999999999998 - type: precision_at_20 value: 3.6999999999999997 - type: precision_at_3 value: 13.267000000000001 - type: precision_at_5 value: 9.403 - type: recall_at_1 value: 17.627000000000002 - type: recall_at_10 value: 44.71 - type: recall_at_100 value: 72.426 - type: recall_at_1000 value: 88.64699999999999 - type: recall_at_20 value: 53.65 - type: recall_at_3 value: 30.989 - type: recall_at_5 value: 36.237 - type: main_score value: 32.129999999999995 - task: type: Retrieval dataset: name: MTEB CQADupstackPhysicsRetrieval (default) type: mteb/cqadupstack-physics config: default split: test revision: 79531abbd1fb92d06c6d6315a0cbbbf5bb247ea4 metrics: - type: map_at_1 value: 30.891000000000002 - type: map_at_10 value: 41.519 - type: map_at_100 value: 42.896 - type: map_at_1000 value: 42.992999999999995 - type: map_at_20 value: 42.287 - type: map_at_3 value: 37.822 - type: map_at_5 value: 39.976 - type: mrr_at_1 value: 37.921 - type: mrr_at_10 value: 47.260999999999996 - type: mrr_at_100 value: 48.044 - type: mrr_at_1000 value: 48.08 - type: mrr_at_20 value: 47.699999999999996 - type: mrr_at_3 value: 44.513999999999996 - type: mrr_at_5 value: 46.064 - type: ndcg_at_1 value: 37.921 - type: ndcg_at_10 value: 47.806 - type: ndcg_at_100 value: 53.274 - type: ndcg_at_1000 value: 55.021 - type: ndcg_at_20 value: 49.973 - type: ndcg_at_3 value: 42.046 - type: ndcg_at_5 value: 44.835 - type: precision_at_1 value: 37.921 - type: precision_at_10 value: 8.767999999999999 - type: precision_at_100 value: 1.353 - type: precision_at_1000 value: 0.168 - type: precision_at_20 value: 5.135 - type: precision_at_3 value: 20.051 - type: precision_at_5 value: 14.398 - type: recall_at_1 value: 30.891000000000002 - type: recall_at_10 value: 60.897999999999996 - type: recall_at_100 value: 83.541 - type: recall_at_1000 value: 94.825 - type: recall_at_20 value: 68.356 - type: recall_at_3 value: 44.65 - type: recall_at_5 value: 51.919000000000004 - type: main_score value: 47.806 - task: type: Retrieval dataset: name: MTEB CQADupstackProgrammersRetrieval (default) type: mteb/cqadupstack-programmers config: default split: test revision: 6184bc1440d2dbc7612be22b50686b8826d22b32 metrics: - type: map_at_1 value: 27.654 - type: map_at_10 value: 38.025999999999996 - type: map_at_100 value: 39.425 - type: map_at_1000 value: 39.528 - type: map_at_20 value: 38.838 - type: map_at_3 value: 34.745 - type: map_at_5 value: 36.537 - type: mrr_at_1 value: 34.018 - type: mrr_at_10 value: 43.314 - type: mrr_at_100 value: 44.283 - type: mrr_at_1000 value: 44.327 - type: mrr_at_20 value: 43.929 - type: mrr_at_3 value: 40.868 - type: mrr_at_5 value: 42.317 - type: ndcg_at_1 value: 34.018 - type: ndcg_at_10 value: 43.887 - type: ndcg_at_100 value: 49.791000000000004 - type: ndcg_at_1000 value: 51.834 - type: ndcg_at_20 value: 46.376 - type: ndcg_at_3 value: 38.769999999999996 - type: ndcg_at_5 value: 41.144 - type: precision_at_1 value: 34.018 - type: precision_at_10 value: 8.001999999999999 - type: precision_at_100 value: 1.2630000000000001 - type: precision_at_1000 value: 0.16 - type: precision_at_20 value: 4.737 - type: precision_at_3 value: 18.417 - type: precision_at_5 value: 13.150999999999998 - type: recall_at_1 value: 27.654 - type: recall_at_10 value: 56.111 - type: recall_at_100 value: 81.136 - type: recall_at_1000 value: 94.788 - type: recall_at_20 value: 65.068 - type: recall_at_3 value: 41.713 - type: recall_at_5 value: 48.106 - type: main_score value: 43.887 - task: type: Retrieval dataset: name: MTEB CQADupstackRetrieval (default) type: CQADupstackRetrieval_is_a_combined_dataset config: default split: test revision: CQADupstackRetrieval_is_a_combined_dataset metrics: - type: main_score value: 42.58858333333333 - type: ndcg_at_10 value: 42.58858333333333 - task: type: Retrieval dataset: name: MTEB CQADupstackStatsRetrieval (default) type: mteb/cqadupstack-stats config: default split: test revision: 65ac3a16b8e91f9cee4c9828cc7c335575432a2a metrics: - type: map_at_1 value: 24.501 - type: map_at_10 value: 32.814 - type: map_at_100 value: 33.754 - type: map_at_1000 value: 33.859 - type: map_at_20 value: 33.324 - type: map_at_3 value: 30.758000000000003 - type: map_at_5 value: 31.936999999999998 - type: mrr_at_1 value: 27.761000000000003 - type: mrr_at_10 value: 35.662 - type: mrr_at_100 value: 36.443999999999996 - type: mrr_at_1000 value: 36.516999999999996 - type: mrr_at_20 value: 36.085 - type: mrr_at_3 value: 33.742 - type: mrr_at_5 value: 34.931 - type: ndcg_at_1 value: 27.761000000000003 - type: ndcg_at_10 value: 37.208000000000006 - type: ndcg_at_100 value: 41.839 - type: ndcg_at_1000 value: 44.421 - type: ndcg_at_20 value: 38.917 - type: ndcg_at_3 value: 33.544000000000004 - type: ndcg_at_5 value: 35.374 - type: precision_at_1 value: 27.761000000000003 - type: precision_at_10 value: 5.92 - type: precision_at_100 value: 0.899 - type: precision_at_1000 value: 0.12 - type: precision_at_20 value: 3.4130000000000003 - type: precision_at_3 value: 15.031 - type: precision_at_5 value: 10.306999999999999 - type: recall_at_1 value: 24.501 - type: recall_at_10 value: 47.579 - type: recall_at_100 value: 69.045 - type: recall_at_1000 value: 88.032 - type: recall_at_20 value: 54.125 - type: recall_at_3 value: 37.202 - type: recall_at_5 value: 41.927 - type: main_score value: 37.208000000000006 - task: type: Retrieval dataset: name: MTEB CQADupstackTexRetrieval (default) type: mteb/cqadupstack-tex config: default split: test revision: 46989137a86843e03a6195de44b09deda022eec7 metrics: - type: map_at_1 value: 18.29 - type: map_at_10 value: 26.183 - type: map_at_100 value: 27.351999999999997 - type: map_at_1000 value: 27.483999999999998 - type: map_at_20 value: 26.798 - type: map_at_3 value: 23.629 - type: map_at_5 value: 24.937 - type: mrr_at_1 value: 22.299 - type: mrr_at_10 value: 30.189 - type: mrr_at_100 value: 31.098 - type: mrr_at_1000 value: 31.177 - type: mrr_at_20 value: 30.697000000000003 - type: mrr_at_3 value: 27.862 - type: mrr_at_5 value: 29.066 - type: ndcg_at_1 value: 22.299 - type: ndcg_at_10 value: 31.202 - type: ndcg_at_100 value: 36.617 - type: ndcg_at_1000 value: 39.544000000000004 - type: ndcg_at_20 value: 33.177 - type: ndcg_at_3 value: 26.639000000000003 - type: ndcg_at_5 value: 28.526 - type: precision_at_1 value: 22.299 - type: precision_at_10 value: 5.8020000000000005 - type: precision_at_100 value: 1.0070000000000001 - type: precision_at_1000 value: 0.14400000000000002 - type: precision_at_20 value: 3.505 - type: precision_at_3 value: 12.698 - type: precision_at_5 value: 9.174 - type: recall_at_1 value: 18.29 - type: recall_at_10 value: 42.254999999999995 - type: recall_at_100 value: 66.60000000000001 - type: recall_at_1000 value: 87.31400000000001 - type: recall_at_20 value: 49.572 - type: recall_at_3 value: 29.342000000000002 - type: recall_at_5 value: 34.221000000000004 - type: main_score value: 31.202 - task: type: Retrieval dataset: name: MTEB CQADupstackUnixRetrieval (default) type: mteb/cqadupstack-unix config: default split: test revision: 6c6430d3a6d36f8d2a829195bc5dc94d7e063e53 metrics: - type: map_at_1 value: 27.722 - type: map_at_10 value: 37.698 - type: map_at_100 value: 38.899 - type: map_at_1000 value: 38.998 - type: map_at_20 value: 38.381 - type: map_at_3 value: 34.244 - type: map_at_5 value: 36.295 - type: mrr_at_1 value: 32.183 - type: mrr_at_10 value: 41.429 - type: mrr_at_100 value: 42.308 - type: mrr_at_1000 value: 42.358000000000004 - type: mrr_at_20 value: 41.957 - type: mrr_at_3 value: 38.401999999999994 - type: mrr_at_5 value: 40.294999999999995 - type: ndcg_at_1 value: 32.183 - type: ndcg_at_10 value: 43.519000000000005 - type: ndcg_at_100 value: 48.786 - type: ndcg_at_1000 value: 50.861999999999995 - type: ndcg_at_20 value: 45.654 - type: ndcg_at_3 value: 37.521 - type: ndcg_at_5 value: 40.615 - type: precision_at_1 value: 32.183 - type: precision_at_10 value: 7.603 - type: precision_at_100 value: 1.135 - type: precision_at_1000 value: 0.14200000000000002 - type: precision_at_20 value: 4.408 - type: precision_at_3 value: 17.071 - type: precision_at_5 value: 12.668 - type: recall_at_1 value: 27.722 - type: recall_at_10 value: 57.230000000000004 - type: recall_at_100 value: 79.97999999999999 - type: recall_at_1000 value: 94.217 - type: recall_at_20 value: 64.864 - type: recall_at_3 value: 41.215 - type: recall_at_5 value: 48.774 - type: main_score value: 43.519000000000005 - task: type: Retrieval dataset: name: MTEB CQADupstackWebmastersRetrieval (default) type: mteb/cqadupstack-webmasters config: default split: test revision: 160c094312a0e1facb97e55eeddb698c0abe3571 metrics: - type: map_at_1 value: 25.852999999999998 - type: map_at_10 value: 35.394999999999996 - type: map_at_100 value: 37.291999999999994 - type: map_at_1000 value: 37.495 - type: map_at_20 value: 36.372 - type: map_at_3 value: 32.336 - type: map_at_5 value: 34.159 - type: mrr_at_1 value: 31.818 - type: mrr_at_10 value: 40.677 - type: mrr_at_100 value: 41.728 - type: mrr_at_1000 value: 41.778 - type: mrr_at_20 value: 41.301 - type: mrr_at_3 value: 38.208 - type: mrr_at_5 value: 39.592 - type: ndcg_at_1 value: 31.818 - type: ndcg_at_10 value: 41.559000000000005 - type: ndcg_at_100 value: 48.012 - type: ndcg_at_1000 value: 50.234 - type: ndcg_at_20 value: 44.15 - type: ndcg_at_3 value: 36.918 - type: ndcg_at_5 value: 39.227000000000004 - type: precision_at_1 value: 31.818 - type: precision_at_10 value: 8.043 - type: precision_at_100 value: 1.625 - type: precision_at_1000 value: 0.245 - type: precision_at_20 value: 5.2170000000000005 - type: precision_at_3 value: 17.655 - type: precision_at_5 value: 12.845999999999998 - type: recall_at_1 value: 25.852999999999998 - type: recall_at_10 value: 53.093 - type: recall_at_100 value: 81.05799999999999 - type: recall_at_1000 value: 94.657 - type: recall_at_20 value: 62.748000000000005 - type: recall_at_3 value: 39.300000000000004 - type: recall_at_5 value: 45.754 - type: main_score value: 41.559000000000005 - task: type: Retrieval dataset: name: MTEB CQADupstackWordpressRetrieval (default) type: mteb/cqadupstack-wordpress config: default split: test revision: 4ffe81d471b1924886b33c7567bfb200e9eec5c4 metrics: - type: map_at_1 value: 19.23 - type: map_at_10 value: 28.128999999999998 - type: map_at_100 value: 29.195 - type: map_at_1000 value: 29.310000000000002 - type: map_at_20 value: 28.713 - type: map_at_3 value: 25.191000000000003 - type: map_at_5 value: 26.69 - type: mrr_at_1 value: 21.257 - type: mrr_at_10 value: 30.253999999999998 - type: mrr_at_100 value: 31.195 - type: mrr_at_1000 value: 31.270999999999997 - type: mrr_at_20 value: 30.747999999999998 - type: mrr_at_3 value: 27.633999999999997 - type: mrr_at_5 value: 28.937 - type: ndcg_at_1 value: 21.257 - type: ndcg_at_10 value: 33.511 - type: ndcg_at_100 value: 38.733000000000004 - type: ndcg_at_1000 value: 41.489 - type: ndcg_at_20 value: 35.476 - type: ndcg_at_3 value: 27.845 - type: ndcg_at_5 value: 30.264999999999997 - type: precision_at_1 value: 21.257 - type: precision_at_10 value: 5.619 - type: precision_at_100 value: 0.893 - type: precision_at_1000 value: 0.124 - type: precision_at_20 value: 3.29 - type: precision_at_3 value: 12.508 - type: precision_at_5 value: 8.946 - type: recall_at_1 value: 19.23 - type: recall_at_10 value: 48.185 - type: recall_at_100 value: 71.932 - type: recall_at_1000 value: 92.587 - type: recall_at_20 value: 55.533 - type: recall_at_3 value: 32.865 - type: recall_at_5 value: 38.577 - type: main_score value: 33.511 - task: type: Retrieval dataset: name: MTEB ClimateFEVER (default) type: mteb/climate-fever config: default split: test revision: 47f2ac6acb640fc46020b02a5b59fdda04d39380 metrics: - type: map_at_1 value: 19.594 - type: map_at_10 value: 32.519 - type: map_at_100 value: 34.1 - type: map_at_1000 value: 34.263 - type: map_at_20 value: 33.353 - type: map_at_3 value: 27.898 - type: map_at_5 value: 30.524 - type: mrr_at_1 value: 46.515 - type: mrr_at_10 value: 56.958 - type: mrr_at_100 value: 57.54899999999999 - type: mrr_at_1000 value: 57.574999999999996 - type: mrr_at_20 value: 57.315000000000005 - type: mrr_at_3 value: 54.852999999999994 - type: mrr_at_5 value: 56.153 - type: ndcg_at_1 value: 46.515 - type: ndcg_at_10 value: 42.363 - type: ndcg_at_100 value: 48.233 - type: ndcg_at_1000 value: 50.993 - type: ndcg_at_20 value: 44.533 - type: ndcg_at_3 value: 37.297000000000004 - type: ndcg_at_5 value: 38.911 - type: precision_at_1 value: 46.515 - type: precision_at_10 value: 12.520999999999999 - type: precision_at_100 value: 1.8980000000000001 - type: precision_at_1000 value: 0.242 - type: precision_at_20 value: 7.212000000000001 - type: precision_at_3 value: 27.752 - type: precision_at_5 value: 20.391000000000002 - type: recall_at_1 value: 19.594 - type: recall_at_10 value: 46.539 - type: recall_at_100 value: 66.782 - type: recall_at_1000 value: 82.049 - type: recall_at_20 value: 52.611 - type: recall_at_3 value: 32.528 - type: recall_at_5 value: 38.933 - type: main_score value: 42.363 - task: type: Retrieval dataset: name: MTEB CmedqaRetrieval (default) type: C-MTEB/CmedqaRetrieval config: default split: dev revision: cd540c506dae1cf9e9a59c3e06f42030d54e7301 metrics: - type: main_score value: 35.927 - type: map_at_1 value: 20.144000000000002 - type: map_at_10 value: 29.94 - type: map_at_100 value: 31.630000000000003 - type: map_at_1000 value: 31.778000000000002 - type: map_at_20 value: 30.798 - type: map_at_3 value: 26.534999999999997 - type: map_at_5 value: 28.33 - type: mrr_at_1 value: 31.23280820205051 - type: mrr_at_10 value: 38.66781179421835 - type: mrr_at_100 value: 39.656936166081785 - type: mrr_at_1000 value: 39.724602893117414 - type: mrr_at_20 value: 39.21272461558451 - type: mrr_at_3 value: 36.30907726931729 - type: mrr_at_5 value: 37.59814953738436 - type: nauc_map_at_1000_diff1 value: 44.5755334437146 - type: nauc_map_at_1000_max value: 40.726916781400746 - type: nauc_map_at_1000_std value: -19.591835061497367 - type: nauc_map_at_100_diff1 value: 44.54542899921038 - type: nauc_map_at_100_max value: 40.68305902532837 - type: nauc_map_at_100_std value: -19.658902089283487 - type: nauc_map_at_10_diff1 value: 44.56110529630953 - type: nauc_map_at_10_max value: 39.89826167846008 - type: nauc_map_at_10_std value: -20.62910633667902 - type: nauc_map_at_1_diff1 value: 50.82120107004449 - type: nauc_map_at_1_max value: 33.208851367861584 - type: nauc_map_at_1_std value: -20.29409730258174 - type: nauc_map_at_20_diff1 value: 44.51171242433788 - type: nauc_map_at_20_max value: 40.30431132782945 - type: nauc_map_at_20_std value: -20.290524142792417 - type: nauc_map_at_3_diff1 value: 45.80394138665133 - type: nauc_map_at_3_max value: 37.766191281426956 - type: nauc_map_at_3_std value: -21.223601997333876 - type: nauc_map_at_5_diff1 value: 45.00457218474283 - type: nauc_map_at_5_max value: 38.901044576388365 - type: nauc_map_at_5_std value: -20.893069613941634 - type: nauc_mrr_at_1000_diff1 value: 50.09855359231429 - type: nauc_mrr_at_1000_max value: 46.481000170008826 - type: nauc_mrr_at_1000_std value: -16.053461377096102 - type: nauc_mrr_at_100_diff1 value: 50.08205026347746 - type: nauc_mrr_at_100_max value: 46.47262126963331 - type: nauc_mrr_at_100_std value: -16.049112778748693 - type: nauc_mrr_at_10_diff1 value: 50.02363239081706 - type: nauc_mrr_at_10_max value: 46.39287859062042 - type: nauc_mrr_at_10_std value: -16.280866744769657 - type: nauc_mrr_at_1_diff1 value: 55.692503735317445 - type: nauc_mrr_at_1_max value: 47.334834529801014 - type: nauc_mrr_at_1_std value: -16.985483585693512 - type: nauc_mrr_at_20_diff1 value: 50.07725225722074 - type: nauc_mrr_at_20_max value: 46.47279295070193 - type: nauc_mrr_at_20_std value: -16.15168364678318 - type: nauc_mrr_at_3_diff1 value: 51.18685337274134 - type: nauc_mrr_at_3_max value: 46.7286365021621 - type: nauc_mrr_at_3_std value: -16.708451287313718 - type: nauc_mrr_at_5_diff1 value: 50.46777237893576 - type: nauc_mrr_at_5_max value: 46.5352076502249 - type: nauc_mrr_at_5_std value: -16.557413659905034 - type: nauc_ndcg_at_1000_diff1 value: 43.974299434438066 - type: nauc_ndcg_at_1000_max value: 43.44628675071857 - type: nauc_ndcg_at_1000_std value: -15.3495102005021 - type: nauc_ndcg_at_100_diff1 value: 43.336365081508504 - type: nauc_ndcg_at_100_max value: 43.11345604460776 - type: nauc_ndcg_at_100_std value: -15.571128070860615 - type: nauc_ndcg_at_10_diff1 value: 43.41266214720136 - type: nauc_ndcg_at_10_max value: 41.519676787851914 - type: nauc_ndcg_at_10_std value: -19.217175017223568 - type: nauc_ndcg_at_1_diff1 value: 55.692503735317445 - type: nauc_ndcg_at_1_max value: 47.334834529801014 - type: nauc_ndcg_at_1_std value: -16.985483585693512 - type: nauc_ndcg_at_20_diff1 value: 43.351653862834496 - type: nauc_ndcg_at_20_max value: 42.11608469750499 - type: nauc_ndcg_at_20_std value: -18.485363540641664 - type: nauc_ndcg_at_3_diff1 value: 45.64193888236677 - type: nauc_ndcg_at_3_max value: 42.497135099009995 - type: nauc_ndcg_at_3_std value: -18.764012041130094 - type: nauc_ndcg_at_5_diff1 value: 44.523392133895186 - type: nauc_ndcg_at_5_max value: 41.564242030096345 - type: nauc_ndcg_at_5_std value: -19.31080790984941 - type: nauc_precision_at_1000_diff1 value: 6.383464615714393 - type: nauc_precision_at_1000_max value: 27.439930931284657 - type: nauc_precision_at_1000_std value: 19.070716188143034 - type: nauc_precision_at_100_diff1 value: 12.599136754501284 - type: nauc_precision_at_100_max value: 35.886310962337795 - type: nauc_precision_at_100_std value: 14.06587592659196 - type: nauc_precision_at_10_diff1 value: 25.388891173150206 - type: nauc_precision_at_10_max value: 46.10269270777384 - type: nauc_precision_at_10_std value: -5.993803607158499 - type: nauc_precision_at_1_diff1 value: 55.692503735317445 - type: nauc_precision_at_1_max value: 47.334834529801014 - type: nauc_precision_at_1_std value: -16.985483585693512 - type: nauc_precision_at_20_diff1 value: 20.984013463099707 - type: nauc_precision_at_20_max value: 42.9471854616888 - type: nauc_precision_at_20_std value: -0.8045549929346024 - type: nauc_precision_at_3_diff1 value: 36.191850547148356 - type: nauc_precision_at_3_max value: 48.09923832376049 - type: nauc_precision_at_3_std value: -13.159407051271321 - type: nauc_precision_at_5_diff1 value: 31.04967966700407 - type: nauc_precision_at_5_max value: 47.62867673349624 - type: nauc_precision_at_5_std value: -10.345790325137353 - type: nauc_recall_at_1000_diff1 value: 11.03436839065707 - type: nauc_recall_at_1000_max value: 42.32265076651575 - type: nauc_recall_at_1000_std value: 30.478521053399206 - type: nauc_recall_at_100_diff1 value: 24.788349084510806 - type: nauc_recall_at_100_max value: 36.72097184821956 - type: nauc_recall_at_100_std value: -0.2241144179522076 - type: nauc_recall_at_10_diff1 value: 31.613053567704885 - type: nauc_recall_at_10_max value: 34.4597322828833 - type: nauc_recall_at_10_std value: -18.00022912690819 - type: nauc_recall_at_1_diff1 value: 50.82120107004449 - type: nauc_recall_at_1_max value: 33.208851367861584 - type: nauc_recall_at_1_std value: -20.29409730258174 - type: nauc_recall_at_20_diff1 value: 30.277002670708384 - type: nauc_recall_at_20_max value: 35.212475675060375 - type: nauc_recall_at_20_std value: -15.822788854733687 - type: nauc_recall_at_3_diff1 value: 38.87844958322257 - type: nauc_recall_at_3_max value: 34.66914910044104 - type: nauc_recall_at_3_std value: -20.234707300209127 - type: nauc_recall_at_5_diff1 value: 35.551139991687776 - type: nauc_recall_at_5_max value: 34.61009958820695 - type: nauc_recall_at_5_std value: -19.519180149293444 - type: ndcg_at_1 value: 31.233 - type: ndcg_at_10 value: 35.927 - type: ndcg_at_100 value: 43.037 - type: ndcg_at_1000 value: 45.900999999999996 - type: ndcg_at_20 value: 38.39 - type: ndcg_at_3 value: 31.366 - type: ndcg_at_5 value: 33.108 - type: precision_at_1 value: 31.233 - type: precision_at_10 value: 8.15 - type: precision_at_100 value: 1.402 - type: precision_at_1000 value: 0.17700000000000002 - type: precision_at_20 value: 4.91 - type: precision_at_3 value: 17.871000000000002 - type: precision_at_5 value: 12.948 - type: recall_at_1 value: 20.144000000000002 - type: recall_at_10 value: 44.985 - type: recall_at_100 value: 74.866 - type: recall_at_1000 value: 94.477 - type: recall_at_20 value: 53.37 - type: recall_at_3 value: 31.141000000000002 - type: recall_at_5 value: 36.721 - task: type: PairClassification dataset: name: MTEB Cmnli (default) type: C-MTEB/CMNLI config: default split: validation revision: None metrics: - type: cos_sim_accuracy value: 71.25676488274203 - type: cos_sim_accuracy_threshold value: 78.11152935028076 - type: cos_sim_ap value: 79.10444825556077 - type: cos_sim_f1 value: 74.10750923266312 - type: cos_sim_f1_threshold value: 75.2312421798706 - type: cos_sim_precision value: 66.02083714129044 - type: cos_sim_recall value: 84.45171849427169 - type: dot_accuracy value: 68.11785929043896 - type: dot_accuracy_threshold value: 34783.23974609375 - type: dot_ap value: 75.80201827987712 - type: dot_f1 value: 72.31670990679349 - type: dot_f1_threshold value: 31978.036499023438 - type: dot_precision value: 61.386623164763456 - type: dot_recall value: 87.98223053542202 - type: euclidean_accuracy value: 71.41310883944678 - type: euclidean_accuracy_threshold value: 1374.9353408813477 - type: euclidean_ap value: 79.23359768836457 - type: euclidean_f1 value: 74.38512297540491 - type: euclidean_f1_threshold value: 1512.6035690307617 - type: euclidean_precision value: 64.97816593886463 - type: euclidean_recall value: 86.97685293429974 - type: manhattan_accuracy value: 71.32892363199038 - type: manhattan_accuracy_threshold value: 33340.49072265625 - type: manhattan_ap value: 79.11973684118587 - type: manhattan_f1 value: 74.29401993355481 - type: manhattan_f1_threshold value: 36012.52746582031 - type: manhattan_precision value: 66.81605975723622 - type: manhattan_recall value: 83.65676876315175 - type: max_accuracy value: 71.41310883944678 - type: max_ap value: 79.23359768836457 - type: max_f1 value: 74.38512297540491 - task: type: Retrieval dataset: name: MTEB CovidRetrieval (default) type: C-MTEB/CovidRetrieval config: default split: dev revision: 1271c7809071a13532e05f25fb53511ffce77117 metrics: - type: main_score value: 78.917 - type: map_at_1 value: 67.281 - type: map_at_10 value: 75.262 - type: map_at_100 value: 75.60900000000001 - type: map_at_1000 value: 75.618 - type: map_at_20 value: 75.50200000000001 - type: map_at_3 value: 73.455 - type: map_at_5 value: 74.657 - type: mrr_at_1 value: 67.43940990516333 - type: mrr_at_10 value: 75.27367989696756 - type: mrr_at_100 value: 75.62029353306437 - type: mrr_at_1000 value: 75.62934741874726 - type: mrr_at_20 value: 75.51356607409173 - type: mrr_at_3 value: 73.5159817351598 - type: mrr_at_5 value: 74.73832103969093 - type: nauc_map_at_1000_diff1 value: 77.26666391867634 - type: nauc_map_at_1000_max value: 49.928541012203496 - type: nauc_map_at_1000_std value: -40.494469470474456 - type: nauc_map_at_100_diff1 value: 77.26087423162396 - type: nauc_map_at_100_max value: 49.944275615664424 - type: nauc_map_at_100_std value: -40.48299992715398 - type: nauc_map_at_10_diff1 value: 76.97400113500906 - type: nauc_map_at_10_max value: 49.84177029115674 - type: nauc_map_at_10_std value: -40.829250876511445 - type: nauc_map_at_1_diff1 value: 81.44050620630395 - type: nauc_map_at_1_max value: 48.97711944070578 - type: nauc_map_at_1_std value: -38.963689457570254 - type: nauc_map_at_20_diff1 value: 77.21791353089375 - type: nauc_map_at_20_max value: 49.958206759079424 - type: nauc_map_at_20_std value: -40.53067571658996 - type: nauc_map_at_3_diff1 value: 77.3555925208868 - type: nauc_map_at_3_max value: 49.32158146451256 - type: nauc_map_at_3_std value: -41.93552426981978 - type: nauc_map_at_5_diff1 value: 77.07099950431504 - type: nauc_map_at_5_max value: 49.54190504495002 - type: nauc_map_at_5_std value: -41.814968130918096 - type: nauc_mrr_at_1000_diff1 value: 77.31388774540477 - type: nauc_mrr_at_1000_max value: 49.96779699175759 - type: nauc_mrr_at_1000_std value: -40.43739645160277 - type: nauc_mrr_at_100_diff1 value: 77.30817786449413 - type: nauc_mrr_at_100_max value: 49.982514428937655 - type: nauc_mrr_at_100_std value: -40.42876582797744 - type: nauc_mrr_at_10_diff1 value: 77.02048060465756 - type: nauc_mrr_at_10_max value: 49.87937207270602 - type: nauc_mrr_at_10_std value: -40.77596560333177 - type: nauc_mrr_at_1_diff1 value: 81.27219599516599 - type: nauc_mrr_at_1_max value: 49.3083394026327 - type: nauc_mrr_at_1_std value: -38.31023037552026 - type: nauc_mrr_at_20_diff1 value: 77.26497089316055 - type: nauc_mrr_at_20_max value: 49.996257597621415 - type: nauc_mrr_at_20_std value: -40.476723608868014 - type: nauc_mrr_at_3_diff1 value: 77.38971294099257 - type: nauc_mrr_at_3_max value: 49.38110328987404 - type: nauc_mrr_at_3_std value: -41.7118646715979 - type: nauc_mrr_at_5_diff1 value: 77.08286142519952 - type: nauc_mrr_at_5_max value: 49.655249374588685 - type: nauc_mrr_at_5_std value: -41.48173039989406 - type: nauc_ndcg_at_1000_diff1 value: 76.47399204021758 - type: nauc_ndcg_at_1000_max value: 50.55770139961048 - type: nauc_ndcg_at_1000_std value: -39.55650430279072 - type: nauc_ndcg_at_100_diff1 value: 76.29355616618253 - type: nauc_ndcg_at_100_max value: 51.003608112592936 - type: nauc_ndcg_at_100_std value: -39.24769744605206 - type: nauc_ndcg_at_10_diff1 value: 74.88697528447634 - type: nauc_ndcg_at_10_max value: 50.398416372815234 - type: nauc_ndcg_at_10_std value: -40.76526585772833 - type: nauc_ndcg_at_1_diff1 value: 81.27219599516599 - type: nauc_ndcg_at_1_max value: 49.3083394026327 - type: nauc_ndcg_at_1_std value: -38.31023037552026 - type: nauc_ndcg_at_20_diff1 value: 75.85463512091866 - type: nauc_ndcg_at_20_max value: 50.97338683654334 - type: nauc_ndcg_at_20_std value: -39.353128774903404 - type: nauc_ndcg_at_3_diff1 value: 75.94015726123543 - type: nauc_ndcg_at_3_max value: 49.22194251063148 - type: nauc_ndcg_at_3_std value: -43.040457030630435 - type: nauc_ndcg_at_5_diff1 value: 75.19166189770303 - type: nauc_ndcg_at_5_max value: 49.65696229797189 - type: nauc_ndcg_at_5_std value: -42.81534909184424 - type: nauc_precision_at_1000_diff1 value: -14.830901395815788 - type: nauc_precision_at_1000_max value: 19.686297136854623 - type: nauc_precision_at_1000_std value: 61.19310360166978 - type: nauc_precision_at_100_diff1 value: 20.55469986751769 - type: nauc_precision_at_100_max value: 50.78431835075583 - type: nauc_precision_at_100_std value: 31.54986568374813 - type: nauc_precision_at_10_diff1 value: 45.991938532558656 - type: nauc_precision_at_10_max value: 46.386318595630385 - type: nauc_precision_at_10_std value: -23.463011435224608 - type: nauc_precision_at_1_diff1 value: 81.27219599516599 - type: nauc_precision_at_1_max value: 49.3083394026327 - type: nauc_precision_at_1_std value: -38.31023037552026 - type: nauc_precision_at_20_diff1 value: 41.53180472410822 - type: nauc_precision_at_20_max value: 49.89800247204318 - type: nauc_precision_at_20_std value: -2.4192847331537095 - type: nauc_precision_at_3_diff1 value: 67.37504651209993 - type: nauc_precision_at_3_max value: 47.893537208629496 - type: nauc_precision_at_3_std value: -43.2362212382819 - type: nauc_precision_at_5_diff1 value: 60.03438883791718 - type: nauc_precision_at_5_max value: 48.29770502354206 - type: nauc_precision_at_5_std value: -40.39588448271546 - type: nauc_recall_at_1000_diff1 value: 71.04741174480844 - type: nauc_recall_at_1000_max value: 93.19056506596002 - type: nauc_recall_at_1000_std value: 62.96994797650912 - type: nauc_recall_at_100_diff1 value: 65.00418176852641 - type: nauc_recall_at_100_max value: 85.27352708427193 - type: nauc_recall_at_100_std value: 2.8812005546518886 - type: nauc_recall_at_10_diff1 value: 61.263254794998865 - type: nauc_recall_at_10_max value: 54.17618329507141 - type: nauc_recall_at_10_std value: -39.80603966142593 - type: nauc_recall_at_1_diff1 value: 81.44050620630395 - type: nauc_recall_at_1_max value: 48.97711944070578 - type: nauc_recall_at_1_std value: -38.963689457570254 - type: nauc_recall_at_20_diff1 value: 64.42106091745396 - type: nauc_recall_at_20_max value: 63.10796640821887 - type: nauc_recall_at_20_std value: -22.60117424572222 - type: nauc_recall_at_3_diff1 value: 70.66311436592945 - type: nauc_recall_at_3_max value: 48.69498944323469 - type: nauc_recall_at_3_std value: -47.37847524874532 - type: nauc_recall_at_5_diff1 value: 66.12701111728848 - type: nauc_recall_at_5_max value: 49.91763957934711 - type: nauc_recall_at_5_std value: -48.173252920584126 - type: ndcg_at_1 value: 67.43900000000001 - type: ndcg_at_10 value: 78.917 - type: ndcg_at_100 value: 80.53399999999999 - type: ndcg_at_1000 value: 80.768 - type: ndcg_at_20 value: 79.813 - type: ndcg_at_3 value: 75.37 - type: ndcg_at_5 value: 77.551 - type: precision_at_1 value: 67.43900000000001 - type: precision_at_10 value: 9.115 - type: precision_at_100 value: 0.985 - type: precision_at_1000 value: 0.1 - type: precision_at_20 value: 4.737 - type: precision_at_3 value: 27.081 - type: precision_at_5 value: 17.345 - type: recall_at_1 value: 67.281 - type: recall_at_10 value: 90.2 - type: recall_at_100 value: 97.576 - type: recall_at_1000 value: 99.368 - type: recall_at_20 value: 93.783 - type: recall_at_3 value: 80.822 - type: recall_at_5 value: 86.091 - task: type: Retrieval dataset: name: MTEB DBPedia (default) type: mteb/dbpedia config: default split: test revision: c0f706b76e590d620bd6618b3ca8efdd34e2d659 metrics: - type: map_at_1 value: 9.041 - type: map_at_10 value: 18.662 - type: map_at_100 value: 26.054 - type: map_at_1000 value: 27.769 - type: map_at_20 value: 21.499 - type: map_at_3 value: 13.628000000000002 - type: map_at_5 value: 15.617 - type: mrr_at_1 value: 67.25 - type: mrr_at_10 value: 74.673 - type: mrr_at_100 value: 75.022 - type: mrr_at_1000 value: 75.031 - type: mrr_at_20 value: 74.895 - type: mrr_at_3 value: 73.042 - type: mrr_at_5 value: 74.179 - type: ndcg_at_1 value: 55.75 - type: ndcg_at_10 value: 41.004000000000005 - type: ndcg_at_100 value: 44.912 - type: ndcg_at_1000 value: 51.946000000000005 - type: ndcg_at_20 value: 40.195 - type: ndcg_at_3 value: 45.803 - type: ndcg_at_5 value: 42.976 - type: precision_at_1 value: 67.25 - type: precision_at_10 value: 31.874999999999996 - type: precision_at_100 value: 10.37 - type: precision_at_1000 value: 2.1430000000000002 - type: precision_at_20 value: 24.275 - type: precision_at_3 value: 48.417 - type: precision_at_5 value: 40.2 - type: recall_at_1 value: 9.041 - type: recall_at_10 value: 23.592 - type: recall_at_100 value: 49.476 - type: recall_at_1000 value: 71.677 - type: recall_at_20 value: 30.153000000000002 - type: recall_at_3 value: 14.777000000000001 - type: recall_at_5 value: 17.829 - type: main_score value: 41.004000000000005 - task: type: Retrieval dataset: name: MTEB DuRetrieval (default) type: C-MTEB/DuRetrieval config: default split: dev revision: a1a333e290fe30b10f3f56498e3a0d911a693ced metrics: - type: main_score value: 83.134 - type: map_at_1 value: 23.907999999999998 - type: map_at_10 value: 74.566 - type: map_at_100 value: 77.706 - type: map_at_1000 value: 77.762 - type: map_at_20 value: 76.943 - type: map_at_3 value: 50.971999999999994 - type: map_at_5 value: 64.429 - type: mrr_at_1 value: 84.8 - type: mrr_at_10 value: 89.73218253968246 - type: mrr_at_100 value: 89.82853630655774 - type: mrr_at_1000 value: 89.83170411703153 - type: mrr_at_20 value: 89.79582030091501 - type: mrr_at_3 value: 89.32499999999992 - type: mrr_at_5 value: 89.58749999999992 - type: nauc_map_at_1000_diff1 value: -2.2736020650163717 - type: nauc_map_at_1000_max value: 45.3937519555142 - type: nauc_map_at_1000_std value: 10.824778228268581 - type: nauc_map_at_100_diff1 value: -2.2662939752750066 - type: nauc_map_at_100_max value: 45.423960626031366 - type: nauc_map_at_100_std value: 10.804239351738717 - type: nauc_map_at_10_diff1 value: 0.9395752585654343 - type: nauc_map_at_10_max value: 42.53814836940551 - type: nauc_map_at_10_std value: 0.7199313235265218 - type: nauc_map_at_1_diff1 value: 45.19415865267676 - type: nauc_map_at_1_max value: -1.7261947382471912 - type: nauc_map_at_1_std value: -32.16144291613605 - type: nauc_map_at_20_diff1 value: -1.884514152147472 - type: nauc_map_at_20_max value: 44.830401115927174 - type: nauc_map_at_20_std value: 8.118530414377219 - type: nauc_map_at_3_diff1 value: 25.678881127059967 - type: nauc_map_at_3_max value: 12.191400431839758 - type: nauc_map_at_3_std value: -27.201740587642327 - type: nauc_map_at_5_diff1 value: 13.227128780829572 - type: nauc_map_at_5_max value: 26.978282739708977 - type: nauc_map_at_5_std value: -17.555610348070584 - type: nauc_mrr_at_1000_diff1 value: 21.073512437502178 - type: nauc_mrr_at_1000_max value: 64.9680257861005 - type: nauc_mrr_at_1000_std value: 19.626288754404293 - type: nauc_mrr_at_100_diff1 value: 21.074637426957732 - type: nauc_mrr_at_100_max value: 64.97612675661915 - type: nauc_mrr_at_100_std value: 19.649504127800878 - type: nauc_mrr_at_10_diff1 value: 21.12003267626651 - type: nauc_mrr_at_10_max value: 65.24362289059766 - type: nauc_mrr_at_10_std value: 19.92351276180984 - type: nauc_mrr_at_1_diff1 value: 22.711430629147635 - type: nauc_mrr_at_1_max value: 58.4059429497403 - type: nauc_mrr_at_1_std value: 11.967886722567973 - type: nauc_mrr_at_20_diff1 value: 20.98220830510272 - type: nauc_mrr_at_20_max value: 65.05737535197835 - type: nauc_mrr_at_20_std value: 19.66672900782771 - type: nauc_mrr_at_3_diff1 value: 20.924796220048528 - type: nauc_mrr_at_3_max value: 65.71388669932584 - type: nauc_mrr_at_3_std value: 20.05912197134477 - type: nauc_mrr_at_5_diff1 value: 20.61978649468208 - type: nauc_mrr_at_5_max value: 65.50709154526211 - type: nauc_mrr_at_5_std value: 20.241434276181838 - type: nauc_ndcg_at_1000_diff1 value: 0.25363171946133656 - type: nauc_ndcg_at_1000_max value: 54.12840465309885 - type: nauc_ndcg_at_1000_std value: 20.749184325412546 - type: nauc_ndcg_at_100_diff1 value: 0.15649430250272792 - type: nauc_ndcg_at_100_max value: 54.47995322413234 - type: nauc_ndcg_at_100_std value: 21.266786634233267 - type: nauc_ndcg_at_10_diff1 value: 0.14579250840386346 - type: nauc_ndcg_at_10_max value: 49.8643037948353 - type: nauc_ndcg_at_10_std value: 12.960701643914216 - type: nauc_ndcg_at_1_diff1 value: 22.711430629147635 - type: nauc_ndcg_at_1_max value: 58.4059429497403 - type: nauc_ndcg_at_1_std value: 11.967886722567973 - type: nauc_ndcg_at_20_diff1 value: -0.6701559981776763 - type: nauc_ndcg_at_20_max value: 52.95443437012488 - type: nauc_ndcg_at_20_std value: 16.708883972005758 - type: nauc_ndcg_at_3_diff1 value: -0.19084922341962388 - type: nauc_ndcg_at_3_max value: 46.2110230886874 - type: nauc_ndcg_at_3_std value: 13.363250229683038 - type: nauc_ndcg_at_5_diff1 value: 0.9840019268192548 - type: nauc_ndcg_at_5_max value: 43.56594891798146 - type: nauc_ndcg_at_5_std value: 8.577017104088146 - type: nauc_precision_at_1000_diff1 value: -30.779179091501145 - type: nauc_precision_at_1000_max value: 16.056094258615673 - type: nauc_precision_at_1000_std value: 49.96303902363283 - type: nauc_precision_at_100_diff1 value: -31.583236638899585 - type: nauc_precision_at_100_max value: 19.16571713603373 - type: nauc_precision_at_100_std value: 51.870647903980036 - type: nauc_precision_at_10_diff1 value: -35.62134572732597 - type: nauc_precision_at_10_max value: 31.6935186494612 - type: nauc_precision_at_10_std value: 46.68659723766723 - type: nauc_precision_at_1_diff1 value: 22.711430629147635 - type: nauc_precision_at_1_max value: 58.4059429497403 - type: nauc_precision_at_1_std value: 11.967886722567973 - type: nauc_precision_at_20_diff1 value: -33.875460046920495 - type: nauc_precision_at_20_max value: 24.188420133566442 - type: nauc_precision_at_20_std value: 50.02387762958483 - type: nauc_precision_at_3_diff1 value: -28.875998450906827 - type: nauc_precision_at_3_max value: 44.77058831167941 - type: nauc_precision_at_3_std value: 31.77993710437207 - type: nauc_precision_at_5_diff1 value: -34.92525440306491 - type: nauc_precision_at_5_max value: 39.855219917077086 - type: nauc_precision_at_5_std value: 37.95432046169299 - type: nauc_recall_at_1000_diff1 value: -14.293309371874733 - type: nauc_recall_at_1000_max value: 59.06948692482579 - type: nauc_recall_at_1000_std value: 62.586254868312686 - type: nauc_recall_at_100_diff1 value: -4.344100947212704 - type: nauc_recall_at_100_max value: 58.42120421043602 - type: nauc_recall_at_100_std value: 46.48562009316997 - type: nauc_recall_at_10_diff1 value: 0.04948662912161709 - type: nauc_recall_at_10_max value: 42.42809687119093 - type: nauc_recall_at_10_std value: 0.6892504250411409 - type: nauc_recall_at_1_diff1 value: 45.19415865267676 - type: nauc_recall_at_1_max value: -1.7261947382471912 - type: nauc_recall_at_1_std value: -32.16144291613605 - type: nauc_recall_at_20_diff1 value: -7.634587864605111 - type: nauc_recall_at_20_max value: 49.21327187174134 - type: nauc_recall_at_20_std value: 16.408481068336346 - type: nauc_recall_at_3_diff1 value: 24.72546591038644 - type: nauc_recall_at_3_max value: 6.620763400972902 - type: nauc_recall_at_3_std value: -29.994703323331684 - type: nauc_recall_at_5_diff1 value: 12.65527364845842 - type: nauc_recall_at_5_max value: 20.400121385794694 - type: nauc_recall_at_5_std value: -22.34284568447213 - type: ndcg_at_1 value: 84.8 - type: ndcg_at_10 value: 83.134 - type: ndcg_at_100 value: 86.628 - type: ndcg_at_1000 value: 87.151 - type: ndcg_at_20 value: 85.092 - type: ndcg_at_3 value: 81.228 - type: ndcg_at_5 value: 80.2 - type: precision_at_1 value: 84.8 - type: precision_at_10 value: 40.394999999999996 - type: precision_at_100 value: 4.745 - type: precision_at_1000 value: 0.488 - type: precision_at_20 value: 22.245 - type: precision_at_3 value: 73.25 - type: precision_at_5 value: 61.86000000000001 - type: recall_at_1 value: 23.907999999999998 - type: recall_at_10 value: 85.346 - type: recall_at_100 value: 96.515 - type: recall_at_1000 value: 99.156 - type: recall_at_20 value: 91.377 - type: recall_at_3 value: 54.135 - type: recall_at_5 value: 70.488 - task: type: Retrieval dataset: name: MTEB EcomRetrieval (default) type: C-MTEB/EcomRetrieval config: default split: dev revision: 687de13dc7294d6fd9be10c6945f9e8fec8166b9 metrics: - type: main_score value: 60.887 - type: map_at_1 value: 46.6 - type: map_at_10 value: 56.035000000000004 - type: map_at_100 value: 56.741 - type: map_at_1000 value: 56.764 - type: map_at_20 value: 56.513999999999996 - type: map_at_3 value: 53.733 - type: map_at_5 value: 54.913000000000004 - type: mrr_at_1 value: 46.6 - type: mrr_at_10 value: 56.034523809523776 - type: mrr_at_100 value: 56.74056360434383 - type: mrr_at_1000 value: 56.76373487222486 - type: mrr_at_20 value: 56.51374873879128 - type: mrr_at_3 value: 53.73333333333328 - type: mrr_at_5 value: 54.91333333333327 - type: nauc_map_at_1000_diff1 value: 65.13546939953387 - type: nauc_map_at_1000_max value: 43.358890946774494 - type: nauc_map_at_1000_std value: -9.973282105235036 - type: nauc_map_at_100_diff1 value: 65.12449309472493 - type: nauc_map_at_100_max value: 43.377100882923145 - type: nauc_map_at_100_std value: -9.971781228240555 - type: nauc_map_at_10_diff1 value: 64.83020018537475 - type: nauc_map_at_10_max value: 43.25969482323034 - type: nauc_map_at_10_std value: -10.120272176001547 - type: nauc_map_at_1_diff1 value: 69.58727592100516 - type: nauc_map_at_1_max value: 38.236494689522026 - type: nauc_map_at_1_std value: -14.833390831689597 - type: nauc_map_at_20_diff1 value: 65.01159809914586 - type: nauc_map_at_20_max value: 43.33440319829618 - type: nauc_map_at_20_std value: -10.039958228659726 - type: nauc_map_at_3_diff1 value: 65.2396323885909 - type: nauc_map_at_3_max value: 42.26904017378952 - type: nauc_map_at_3_std value: -11.793017036934044 - type: nauc_map_at_5_diff1 value: 64.96397227898036 - type: nauc_map_at_5_max value: 43.231333789145424 - type: nauc_map_at_5_std value: -10.349933732151372 - type: nauc_mrr_at_1000_diff1 value: 65.13546939953387 - type: nauc_mrr_at_1000_max value: 43.358890946774494 - type: nauc_mrr_at_1000_std value: -9.973282105235036 - type: nauc_mrr_at_100_diff1 value: 65.12449309472493 - type: nauc_mrr_at_100_max value: 43.377100882923145 - type: nauc_mrr_at_100_std value: -9.971781228240555 - type: nauc_mrr_at_10_diff1 value: 64.83020018537475 - type: nauc_mrr_at_10_max value: 43.25969482323034 - type: nauc_mrr_at_10_std value: -10.120272176001547 - type: nauc_mrr_at_1_diff1 value: 69.58727592100516 - type: nauc_mrr_at_1_max value: 38.236494689522026 - type: nauc_mrr_at_1_std value: -14.833390831689597 - type: nauc_mrr_at_20_diff1 value: 65.01159809914586 - type: nauc_mrr_at_20_max value: 43.33440319829618 - type: nauc_mrr_at_20_std value: -10.039958228659726 - type: nauc_mrr_at_3_diff1 value: 65.2396323885909 - type: nauc_mrr_at_3_max value: 42.26904017378952 - type: nauc_mrr_at_3_std value: -11.793017036934044 - type: nauc_mrr_at_5_diff1 value: 64.96397227898036 - type: nauc_mrr_at_5_max value: 43.231333789145424 - type: nauc_mrr_at_5_std value: -10.349933732151372 - type: nauc_ndcg_at_1000_diff1 value: 64.26802655199876 - type: nauc_ndcg_at_1000_max value: 45.854310744745185 - type: nauc_ndcg_at_1000_std value: -6.184417305204082 - type: nauc_ndcg_at_100_diff1 value: 63.99268329609827 - type: nauc_ndcg_at_100_max value: 46.31270128748375 - type: nauc_ndcg_at_100_std value: -6.1393433180558965 - type: nauc_ndcg_at_10_diff1 value: 62.6735104141137 - type: nauc_ndcg_at_10_max value: 45.54954799462398 - type: nauc_ndcg_at_10_std value: -7.348851199024871 - type: nauc_ndcg_at_1_diff1 value: 69.58727592100516 - type: nauc_ndcg_at_1_max value: 38.236494689522026 - type: nauc_ndcg_at_1_std value: -14.833390831689597 - type: nauc_ndcg_at_20_diff1 value: 63.25899651677274 - type: nauc_ndcg_at_20_max value: 45.952196968886014 - type: nauc_ndcg_at_20_std value: -6.807607465125713 - type: nauc_ndcg_at_3_diff1 value: 63.65618337476822 - type: nauc_ndcg_at_3_max value: 43.507890965228945 - type: nauc_ndcg_at_3_std value: -10.73845622217601 - type: nauc_ndcg_at_5_diff1 value: 63.079162432921855 - type: nauc_ndcg_at_5_max value: 45.38303443868148 - type: nauc_ndcg_at_5_std value: -8.063657824835534 - type: nauc_precision_at_1000_diff1 value: 63.01459977930557 - type: nauc_precision_at_1000_max value: 92.4253034547151 - type: nauc_precision_at_1000_std value: 84.4845513963158 - type: nauc_precision_at_100_diff1 value: 57.17217119405878 - type: nauc_precision_at_100_max value: 80.70049725316484 - type: nauc_precision_at_100_std value: 41.78392287147403 - type: nauc_precision_at_10_diff1 value: 53.115665404390725 - type: nauc_precision_at_10_max value: 55.73825657341263 - type: nauc_precision_at_10_std value: 5.406226305013257 - type: nauc_precision_at_1_diff1 value: 69.58727592100516 - type: nauc_precision_at_1_max value: 38.236494689522026 - type: nauc_precision_at_1_std value: -14.833390831689597 - type: nauc_precision_at_20_diff1 value: 53.77730697622828 - type: nauc_precision_at_20_max value: 61.88170819253054 - type: nauc_precision_at_20_std value: 13.678730470003856 - type: nauc_precision_at_3_diff1 value: 58.580196992291455 - type: nauc_precision_at_3_max value: 47.404834585376626 - type: nauc_precision_at_3_std value: -7.374978769024051 - type: nauc_precision_at_5_diff1 value: 56.44564652606437 - type: nauc_precision_at_5_max value: 53.08973975162324 - type: nauc_precision_at_5_std value: 0.22762700141423803 - type: nauc_recall_at_1000_diff1 value: 63.01459977930565 - type: nauc_recall_at_1000_max value: 92.42530345471532 - type: nauc_recall_at_1000_std value: 84.48455139631602 - type: nauc_recall_at_100_diff1 value: 57.17217119405904 - type: nauc_recall_at_100_max value: 80.70049725316468 - type: nauc_recall_at_100_std value: 41.783922871474275 - type: nauc_recall_at_10_diff1 value: 53.11566540439087 - type: nauc_recall_at_10_max value: 55.738256573412656 - type: nauc_recall_at_10_std value: 5.406226305013377 - type: nauc_recall_at_1_diff1 value: 69.58727592100516 - type: nauc_recall_at_1_max value: 38.236494689522026 - type: nauc_recall_at_1_std value: -14.833390831689597 - type: nauc_recall_at_20_diff1 value: 53.77730697622846 - type: nauc_recall_at_20_max value: 61.881708192530525 - type: nauc_recall_at_20_std value: 13.678730470003947 - type: nauc_recall_at_3_diff1 value: 58.5801969922914 - type: nauc_recall_at_3_max value: 47.40483458537654 - type: nauc_recall_at_3_std value: -7.37497876902413 - type: nauc_recall_at_5_diff1 value: 56.445646526064394 - type: nauc_recall_at_5_max value: 53.08973975162332 - type: nauc_recall_at_5_std value: 0.22762700141428024 - type: ndcg_at_1 value: 46.6 - type: ndcg_at_10 value: 60.887 - type: ndcg_at_100 value: 64.18199999999999 - type: ndcg_at_1000 value: 64.726 - type: ndcg_at_20 value: 62.614999999999995 - type: ndcg_at_3 value: 56.038 - type: ndcg_at_5 value: 58.150999999999996 - type: precision_at_1 value: 46.6 - type: precision_at_10 value: 7.630000000000001 - type: precision_at_100 value: 0.914 - type: precision_at_1000 value: 0.096 - type: precision_at_20 value: 4.154999999999999 - type: precision_at_3 value: 20.9 - type: precision_at_5 value: 13.56 - type: recall_at_1 value: 46.6 - type: recall_at_10 value: 76.3 - type: recall_at_100 value: 91.4 - type: recall_at_1000 value: 95.6 - type: recall_at_20 value: 83.1 - type: recall_at_3 value: 62.7 - type: recall_at_5 value: 67.80000000000001 - task: type: Classification dataset: name: MTEB EmotionClassification (default) type: mteb/emotion config: default split: test revision: 4f58c6b202a23cf9a4da393831edf4f9183cad37 metrics: - type: accuracy value: 73.29999999999998 - type: f1 value: 67.71473706580302 - type: f1_weighted value: 74.83537255312045 - type: main_score value: 73.29999999999998 - task: type: Retrieval dataset: name: MTEB FEVER (default) type: mteb/fever config: default split: test revision: bea83ef9e8fb933d90a2f1d5515737465d613e12 metrics: - type: map_at_1 value: 78.371 - type: map_at_10 value: 85.762 - type: map_at_100 value: 85.954 - type: map_at_1000 value: 85.966 - type: map_at_20 value: 85.887 - type: map_at_3 value: 84.854 - type: map_at_5 value: 85.408 - type: mrr_at_1 value: 84.443 - type: mrr_at_10 value: 90.432 - type: mrr_at_100 value: 90.483 - type: mrr_at_1000 value: 90.484 - type: mrr_at_20 value: 90.473 - type: mrr_at_3 value: 89.89399999999999 - type: mrr_at_5 value: 90.244 - type: ndcg_at_1 value: 84.443 - type: ndcg_at_10 value: 89.05499999999999 - type: ndcg_at_100 value: 89.68 - type: ndcg_at_1000 value: 89.87899999999999 - type: ndcg_at_20 value: 89.381 - type: ndcg_at_3 value: 87.73100000000001 - type: ndcg_at_5 value: 88.425 - type: precision_at_1 value: 84.443 - type: precision_at_10 value: 10.520999999999999 - type: precision_at_100 value: 1.103 - type: precision_at_1000 value: 0.11399999999999999 - type: precision_at_20 value: 5.362 - type: precision_at_3 value: 33.198 - type: precision_at_5 value: 20.441000000000003 - type: recall_at_1 value: 78.371 - type: recall_at_10 value: 94.594 - type: recall_at_100 value: 96.97099999999999 - type: recall_at_1000 value: 98.18 - type: recall_at_20 value: 95.707 - type: recall_at_3 value: 90.853 - type: recall_at_5 value: 92.74799999999999 - type: main_score value: 89.05499999999999 - task: type: Retrieval dataset: name: MTEB FiQA2018 (default) type: mteb/fiqa config: default split: test revision: 27a168819829fe9bcd655c2df245fb19452e8e06 metrics: - type: map_at_1 value: 23.810000000000002 - type: map_at_10 value: 39.051 - type: map_at_100 value: 41.231 - type: map_at_1000 value: 41.376000000000005 - type: map_at_20 value: 40.227000000000004 - type: map_at_3 value: 33.915 - type: map_at_5 value: 36.459 - type: mrr_at_1 value: 48.148 - type: mrr_at_10 value: 55.765 - type: mrr_at_100 value: 56.495 - type: mrr_at_1000 value: 56.525999999999996 - type: mrr_at_20 value: 56.213 - type: mrr_at_3 value: 53.086 - type: mrr_at_5 value: 54.513999999999996 - type: ndcg_at_1 value: 48.148 - type: ndcg_at_10 value: 47.349999999999994 - type: ndcg_at_100 value: 54.61899999999999 - type: ndcg_at_1000 value: 56.830000000000005 - type: ndcg_at_20 value: 50.143 - type: ndcg_at_3 value: 43.108000000000004 - type: ndcg_at_5 value: 44.023 - type: precision_at_1 value: 48.148 - type: precision_at_10 value: 13.441 - type: precision_at_100 value: 2.085 - type: precision_at_1000 value: 0.248 - type: precision_at_20 value: 7.870000000000001 - type: precision_at_3 value: 28.909000000000002 - type: precision_at_5 value: 20.957 - type: recall_at_1 value: 23.810000000000002 - type: recall_at_10 value: 54.303000000000004 - type: recall_at_100 value: 81.363 - type: recall_at_1000 value: 94.391 - type: recall_at_20 value: 63.056999999999995 - type: recall_at_3 value: 38.098 - type: recall_at_5 value: 44.414 - type: main_score value: 47.349999999999994 - task: type: Classification dataset: name: MTEB GeoreviewClassification (default) type: ai-forever/georeview-classification config: default split: test revision: 3765c0d1de6b7d264bc459433c45e5a75513839c metrics: - type: accuracy value: 48.0126953125 - type: f1 value: 47.65764016160488 - type: f1_weighted value: 47.65701659482088 - type: main_score value: 48.0126953125 - task: type: Clustering dataset: name: MTEB GeoreviewClusteringP2P (default) type: ai-forever/georeview-clustering-p2p config: default split: test revision: 97a313c8fc85b47f13f33e7e9a95c1ad888c7fec metrics: - type: main_score value: 73.62357853672266 - type: v_measure value: 73.62357853672266 - type: v_measure_std value: 0.5942247545535766 - task: type: Retrieval dataset: name: MTEB GerDaLIR (default) type: jinaai/ger_da_lir config: default split: test revision: 0bb47f1d73827e96964edb84dfe552f62f4fd5eb metrics: - type: main_score value: 16.227 - type: map_at_1 value: 8.082 - type: map_at_10 value: 12.959999999999999 - type: map_at_100 value: 13.923 - type: map_at_1000 value: 14.030999999999999 - type: map_at_20 value: 13.453000000000001 - type: map_at_3 value: 11.018 - type: map_at_5 value: 12.056000000000001 - type: mrr_at_1 value: 8.993332249146203 - type: mrr_at_10 value: 13.994013092850247 - type: mrr_at_100 value: 14.913737673149308 - type: mrr_at_1000 value: 15.00843809934407 - type: mrr_at_20 value: 14.470268462334007 - type: mrr_at_3 value: 12.000596302921846 - type: mrr_at_5 value: 13.070689000921561 - type: nauc_map_at_1000_diff1 value: 28.559639584013286 - type: nauc_map_at_1000_max value: 25.533800126086714 - type: nauc_map_at_1000_std value: 9.826551026628666 - type: nauc_map_at_100_diff1 value: 28.544724499331696 - type: nauc_map_at_100_max value: 25.46734324526386 - type: nauc_map_at_100_std value: 9.739314481785591 - type: nauc_map_at_10_diff1 value: 28.77447517718118 - type: nauc_map_at_10_max value: 24.7431615237795 - type: nauc_map_at_10_std value: 8.349878188033646 - type: nauc_map_at_1_diff1 value: 37.405452629895514 - type: nauc_map_at_1_max value: 24.444208978394023 - type: nauc_map_at_1_std value: 4.043820373810528 - type: nauc_map_at_20_diff1 value: 28.69764217789062 - type: nauc_map_at_20_max value: 25.111848355996496 - type: nauc_map_at_20_std value: 9.034829905305918 - type: nauc_map_at_3_diff1 value: 30.89053285076882 - type: nauc_map_at_3_max value: 24.862886115911152 - type: nauc_map_at_3_std value: 6.654260832396586 - type: nauc_map_at_5_diff1 value: 29.230629676604263 - type: nauc_map_at_5_max value: 24.374302288018583 - type: nauc_map_at_5_std value: 7.341846952319046 - type: nauc_mrr_at_1000_diff1 value: 28.086147932781426 - type: nauc_mrr_at_1000_max value: 25.98698528264653 - type: nauc_mrr_at_1000_std value: 9.917554348624545 - type: nauc_mrr_at_100_diff1 value: 28.069163279791336 - type: nauc_mrr_at_100_max value: 25.949440010886804 - type: nauc_mrr_at_100_std value: 9.874340979732578 - type: nauc_mrr_at_10_diff1 value: 28.239920869530046 - type: nauc_mrr_at_10_max value: 25.351271409498576 - type: nauc_mrr_at_10_std value: 8.669862759875162 - type: nauc_mrr_at_1_diff1 value: 35.96543040207856 - type: nauc_mrr_at_1_max value: 25.488936487231967 - type: nauc_mrr_at_1_std value: 4.76439131038345 - type: nauc_mrr_at_20_diff1 value: 28.18865871284607 - type: nauc_mrr_at_20_max value: 25.67121763344746 - type: nauc_mrr_at_20_std value: 9.297910707519472 - type: nauc_mrr_at_3_diff1 value: 30.166714199740717 - type: nauc_mrr_at_3_max value: 25.541792491964877 - type: nauc_mrr_at_3_std value: 7.083090296398472 - type: nauc_mrr_at_5_diff1 value: 28.68475284656478 - type: nauc_mrr_at_5_max value: 24.994071363482835 - type: nauc_mrr_at_5_std value: 7.687507254902365 - type: nauc_ndcg_at_1000_diff1 value: 25.292792613586467 - type: nauc_ndcg_at_1000_max value: 29.211905289377178 - type: nauc_ndcg_at_1000_std value: 18.088867467320355 - type: nauc_ndcg_at_100_diff1 value: 25.026905011089152 - type: nauc_ndcg_at_100_max value: 27.98822281254431 - type: nauc_ndcg_at_100_std value: 16.69456904301902 - type: nauc_ndcg_at_10_diff1 value: 25.972279051109503 - type: nauc_ndcg_at_10_max value: 24.86486482734957 - type: nauc_ndcg_at_10_std value: 10.398605822106353 - type: nauc_ndcg_at_1_diff1 value: 36.134710485184826 - type: nauc_ndcg_at_1_max value: 25.384572790326025 - type: nauc_ndcg_at_1_std value: 4.591863033771824 - type: nauc_ndcg_at_20_diff1 value: 25.850033660205536 - type: nauc_ndcg_at_20_max value: 25.944243193140515 - type: nauc_ndcg_at_20_std value: 12.392409721204892 - type: nauc_ndcg_at_3_diff1 value: 29.1966056380018 - type: nauc_ndcg_at_3_max value: 24.978843156259913 - type: nauc_ndcg_at_3_std value: 7.353914459205087 - type: nauc_ndcg_at_5_diff1 value: 26.795315295756282 - type: nauc_ndcg_at_5_max value: 24.1196789150412 - type: nauc_ndcg_at_5_std value: 8.311970988265172 - type: nauc_precision_at_1000_diff1 value: 9.128270550217984 - type: nauc_precision_at_1000_max value: 35.79286915973607 - type: nauc_precision_at_1000_std value: 39.15669472887154 - type: nauc_precision_at_100_diff1 value: 14.770289799034384 - type: nauc_precision_at_100_max value: 34.58262232264337 - type: nauc_precision_at_100_std value: 34.101148102981384 - type: nauc_precision_at_10_diff1 value: 19.899104673118178 - type: nauc_precision_at_10_max value: 26.636940338985625 - type: nauc_precision_at_10_std value: 15.73871357255849 - type: nauc_precision_at_1_diff1 value: 36.134710485184826 - type: nauc_precision_at_1_max value: 25.384572790326025 - type: nauc_precision_at_1_std value: 4.591863033771824 - type: nauc_precision_at_20_diff1 value: 19.423457975148942 - type: nauc_precision_at_20_max value: 29.58123490878582 - type: nauc_precision_at_20_std value: 20.847850110821618 - type: nauc_precision_at_3_diff1 value: 24.986416623492918 - type: nauc_precision_at_3_max value: 25.973548400472975 - type: nauc_precision_at_3_std value: 9.486410455972823 - type: nauc_precision_at_5_diff1 value: 21.237741424923332 - type: nauc_precision_at_5_max value: 24.647141028200164 - type: nauc_precision_at_5_std value: 11.102785032334147 - type: nauc_recall_at_1000_diff1 value: 15.999714888817829 - type: nauc_recall_at_1000_max value: 44.34701908906545 - type: nauc_recall_at_1000_std value: 51.13471291594717 - type: nauc_recall_at_100_diff1 value: 17.401714890483706 - type: nauc_recall_at_100_max value: 33.39042631654808 - type: nauc_recall_at_100_std value: 33.944446168451584 - type: nauc_recall_at_10_diff1 value: 20.30036232399894 - type: nauc_recall_at_10_max value: 24.006718284396786 - type: nauc_recall_at_10_std value: 14.049375108518669 - type: nauc_recall_at_1_diff1 value: 37.405452629895514 - type: nauc_recall_at_1_max value: 24.444208978394023 - type: nauc_recall_at_1_std value: 4.043820373810528 - type: nauc_recall_at_20_diff1 value: 20.23582802609045 - type: nauc_recall_at_20_max value: 26.408063410785243 - type: nauc_recall_at_20_std value: 18.617479515468112 - type: nauc_recall_at_3_diff1 value: 25.53221830103098 - type: nauc_recall_at_3_max value: 24.283712329152678 - type: nauc_recall_at_3_std value: 8.428947805841867 - type: nauc_recall_at_5_diff1 value: 21.741499601020823 - type: nauc_recall_at_5_max value: 22.754924586295296 - type: nauc_recall_at_5_std value: 9.966736688169814 - type: ndcg_at_1 value: 8.977 - type: ndcg_at_10 value: 16.227 - type: ndcg_at_100 value: 21.417 - type: ndcg_at_1000 value: 24.451 - type: ndcg_at_20 value: 17.982 - type: ndcg_at_3 value: 12.206999999999999 - type: ndcg_at_5 value: 14.059 - type: precision_at_1 value: 8.977 - type: precision_at_10 value: 2.933 - type: precision_at_100 value: 0.59 - type: precision_at_1000 value: 0.087 - type: precision_at_20 value: 1.8599999999999999 - type: precision_at_3 value: 5.550999999999999 - type: precision_at_5 value: 4.340999999999999 - type: recall_at_1 value: 8.082 - type: recall_at_10 value: 25.52 - type: recall_at_100 value: 50.32 - type: recall_at_1000 value: 74.021 - type: recall_at_20 value: 32.229 - type: recall_at_3 value: 14.66 - type: recall_at_5 value: 19.062 - task: type: Retrieval dataset: name: MTEB GermanDPR (default) type: deepset/germandpr config: default split: test revision: 5129d02422a66be600ac89cd3e8531b4f97d347d metrics: - type: main_score value: 82.422 - type: map_at_1 value: 64.39 - type: map_at_10 value: 77.273 - type: map_at_100 value: 77.375 - type: map_at_1000 value: 77.376 - type: map_at_20 value: 77.351 - type: map_at_3 value: 75.46300000000001 - type: map_at_5 value: 76.878 - type: mrr_at_1 value: 64.19512195121952 - type: mrr_at_10 value: 77.15842044134736 - type: mrr_at_100 value: 77.2604854308704 - type: mrr_at_1000 value: 77.26087882190109 - type: mrr_at_20 value: 77.23572154560611 - type: mrr_at_3 value: 75.34959349593504 - type: mrr_at_5 value: 76.76422764227652 - type: nauc_map_at_1000_diff1 value: 49.73135253389972 - type: nauc_map_at_1000_max value: 8.665570717396145 - type: nauc_map_at_1000_std value: -25.920927572114522 - type: nauc_map_at_100_diff1 value: 49.729170775336605 - type: nauc_map_at_100_max value: 8.66717979705074 - type: nauc_map_at_100_std value: -25.918338868918596 - type: nauc_map_at_10_diff1 value: 49.708681691445925 - type: nauc_map_at_10_max value: 8.830640635692113 - type: nauc_map_at_10_std value: -25.843238986304858 - type: nauc_map_at_1_diff1 value: 51.750022350988914 - type: nauc_map_at_1_max value: 3.599863010364626 - type: nauc_map_at_1_std value: -27.670122127567314 - type: nauc_map_at_20_diff1 value: 49.72609185887161 - type: nauc_map_at_20_max value: 8.766556053409218 - type: nauc_map_at_20_std value: -25.85975887517904 - type: nauc_map_at_3_diff1 value: 49.328512536255595 - type: nauc_map_at_3_max value: 9.475682028996795 - type: nauc_map_at_3_std value: -26.277349632171017 - type: nauc_map_at_5_diff1 value: 49.42801822186142 - type: nauc_map_at_5_max value: 8.788822474357252 - type: nauc_map_at_5_std value: -25.959260882028573 - type: nauc_mrr_at_1000_diff1 value: 50.13038598302397 - type: nauc_mrr_at_1000_max value: 8.734338637484832 - type: nauc_mrr_at_1000_std value: -26.653343549855908 - type: nauc_mrr_at_100_diff1 value: 50.12820392111392 - type: nauc_mrr_at_100_max value: 8.735940503917966 - type: nauc_mrr_at_100_std value: -26.65074918231251 - type: nauc_mrr_at_10_diff1 value: 50.10567888458267 - type: nauc_mrr_at_10_max value: 8.898451291748575 - type: nauc_mrr_at_10_std value: -26.572046921975655 - type: nauc_mrr_at_1_diff1 value: 52.22769994409465 - type: nauc_mrr_at_1_max value: 3.6490820146062015 - type: nauc_mrr_at_1_std value: -28.535100562320498 - type: nauc_mrr_at_20_diff1 value: 50.12462222100699 - type: nauc_mrr_at_20_max value: 8.83487018268756 - type: nauc_mrr_at_20_std value: -26.591437036958332 - type: nauc_mrr_at_3_diff1 value: 49.6987353700016 - type: nauc_mrr_at_3_max value: 9.531003760756258 - type: nauc_mrr_at_3_std value: -26.949799063124818 - type: nauc_mrr_at_5_diff1 value: 49.823881656376585 - type: nauc_mrr_at_5_max value: 8.850404667985085 - type: nauc_mrr_at_5_std value: -26.680008966088582 - type: nauc_ndcg_at_1000_diff1 value: 49.41721203361181 - type: nauc_ndcg_at_1000_max value: 9.41093067609825 - type: nauc_ndcg_at_1000_std value: -25.499543637737567 - type: nauc_ndcg_at_100_diff1 value: 49.32810419509252 - type: nauc_ndcg_at_100_max value: 9.476216458766897 - type: nauc_ndcg_at_100_std value: -25.393856250990414 - type: nauc_ndcg_at_10_diff1 value: 49.181984436623694 - type: nauc_ndcg_at_10_max value: 10.65234732763274 - type: nauc_ndcg_at_10_std value: -24.737669349012297 - type: nauc_ndcg_at_1_diff1 value: 51.750022350988914 - type: nauc_ndcg_at_1_max value: 3.599863010364626 - type: nauc_ndcg_at_1_std value: -27.670122127567314 - type: nauc_ndcg_at_20_diff1 value: 49.275394594995056 - type: nauc_ndcg_at_20_max value: 10.402059796651923 - type: nauc_ndcg_at_20_std value: -24.82329915806705 - type: nauc_ndcg_at_3_diff1 value: 48.22614352152889 - type: nauc_ndcg_at_3_max value: 11.67464280791404 - type: nauc_ndcg_at_3_std value: -25.867824868234095 - type: nauc_ndcg_at_5_diff1 value: 48.35583502987241 - type: nauc_ndcg_at_5_max value: 10.494278750448451 - type: nauc_ndcg_at_5_std value: -25.11599634172764 - type: nauc_precision_at_1000_diff1 value: .nan - type: nauc_precision_at_1000_max value: .nan - type: nauc_precision_at_1000_std value: .nan - type: nauc_precision_at_100_diff1 value: -56.39478136433852 - type: nauc_precision_at_100_max value: 86.93518577529493 - type: nauc_precision_at_100_std value: 100.0 - type: nauc_precision_at_10_diff1 value: 38.662829729133094 - type: nauc_precision_at_10_max value: 56.38018435740605 - type: nauc_precision_at_10_std value: 6.288091897081105 - type: nauc_precision_at_1_diff1 value: 51.750022350988914 - type: nauc_precision_at_1_max value: 3.599863010364626 - type: nauc_precision_at_1_std value: -27.670122127567314 - type: nauc_precision_at_20_diff1 value: 34.739153182429085 - type: nauc_precision_at_20_max value: 84.86908403000989 - type: nauc_precision_at_20_std value: 29.156199421219455 - type: nauc_precision_at_3_diff1 value: 42.09287362529135 - type: nauc_precision_at_3_max value: 23.629152759287074 - type: nauc_precision_at_3_std value: -23.721376911302492 - type: nauc_precision_at_5_diff1 value: 36.03866171924644 - type: nauc_precision_at_5_max value: 29.166173558775327 - type: nauc_precision_at_5_std value: -15.096374563068448 - type: nauc_recall_at_1000_diff1 value: .nan - type: nauc_recall_at_1000_max value: .nan - type: nauc_recall_at_1000_std value: .nan - type: nauc_recall_at_100_diff1 value: -56.39478136433541 - type: nauc_recall_at_100_max value: 86.93518577528111 - type: nauc_recall_at_100_std value: 100.0 - type: nauc_recall_at_10_diff1 value: 38.66282972913384 - type: nauc_recall_at_10_max value: 56.3801843574071 - type: nauc_recall_at_10_std value: 6.288091897082639 - type: nauc_recall_at_1_diff1 value: 51.750022350988914 - type: nauc_recall_at_1_max value: 3.599863010364626 - type: nauc_recall_at_1_std value: -27.670122127567314 - type: nauc_recall_at_20_diff1 value: 34.7391531824321 - type: nauc_recall_at_20_max value: 84.86908403001016 - type: nauc_recall_at_20_std value: 29.156199421220748 - type: nauc_recall_at_3_diff1 value: 42.09287362529107 - type: nauc_recall_at_3_max value: 23.629152759286946 - type: nauc_recall_at_3_std value: -23.72137691130291 - type: nauc_recall_at_5_diff1 value: 36.0386617192469 - type: nauc_recall_at_5_max value: 29.1661735587759 - type: nauc_recall_at_5_std value: -15.09637456306774 - type: ndcg_at_1 value: 64.39 - type: ndcg_at_10 value: 82.422 - type: ndcg_at_100 value: 82.86099999999999 - type: ndcg_at_1000 value: 82.87299999999999 - type: ndcg_at_20 value: 82.67999999999999 - type: ndcg_at_3 value: 78.967 - type: ndcg_at_5 value: 81.50699999999999 - type: precision_at_1 value: 64.39 - type: precision_at_10 value: 9.795 - type: precision_at_100 value: 0.9990000000000001 - type: precision_at_1000 value: 0.1 - type: precision_at_20 value: 4.946 - type: precision_at_3 value: 29.691000000000003 - type: precision_at_5 value: 19.044 - type: recall_at_1 value: 64.39 - type: recall_at_10 value: 97.951 - type: recall_at_100 value: 99.902 - type: recall_at_1000 value: 100.0 - type: recall_at_20 value: 98.92699999999999 - type: recall_at_3 value: 89.07300000000001 - type: recall_at_5 value: 95.22 - task: type: Retrieval dataset: name: MTEB GermanQuAD-Retrieval (default) type: mteb/germanquad-retrieval config: default split: test revision: f5c87ae5a2e7a5106606314eef45255f03151bb3 metrics: - type: main_score value: 94.15532365396247 - type: map_at_1 value: 90.789 - type: map_at_10 value: 94.24 - type: map_at_100 value: 94.283 - type: map_at_1000 value: 94.284 - type: map_at_20 value: 94.272 - type: map_at_3 value: 93.913 - type: map_at_5 value: 94.155 - type: mrr_at_1 value: 90.78947368421053 - type: mrr_at_10 value: 94.23987411056376 - type: mrr_at_100 value: 94.28320936825 - type: mrr_at_1000 value: 94.28350209115848 - type: mrr_at_20 value: 94.271919092559 - type: mrr_at_3 value: 93.91258318209313 - type: mrr_at_5 value: 94.15532365396247 - type: nauc_map_at_1000_diff1 value: 89.29089310650436 - type: nauc_map_at_1000_max value: 73.83868784032414 - type: nauc_map_at_1000_std value: -11.635778561889989 - type: nauc_map_at_100_diff1 value: 89.29077225707755 - type: nauc_map_at_100_max value: 73.84002740580378 - type: nauc_map_at_100_std value: -11.644096256165092 - type: nauc_map_at_10_diff1 value: 89.29117612292366 - type: nauc_map_at_10_max value: 73.97487984981221 - type: nauc_map_at_10_std value: -11.35191794373827 - type: nauc_map_at_1_diff1 value: 89.35436544117584 - type: nauc_map_at_1_max value: 70.35936815057701 - type: nauc_map_at_1_std value: -13.598996360976903 - type: nauc_map_at_20_diff1 value: 89.2530394052653 - type: nauc_map_at_20_max value: 73.83537529419839 - type: nauc_map_at_20_std value: -11.628272822028478 - type: nauc_map_at_3_diff1 value: 89.375111893546 - type: nauc_map_at_3_max value: 74.78900366026112 - type: nauc_map_at_3_std value: -12.720905253503274 - type: nauc_map_at_5_diff1 value: 89.35358300820893 - type: nauc_map_at_5_max value: 74.31996219723239 - type: nauc_map_at_5_std value: -10.768642638210867 - type: nauc_mrr_at_1000_diff1 value: 89.29089310650436 - type: nauc_mrr_at_1000_max value: 73.83868784032414 - type: nauc_mrr_at_1000_std value: -11.635778561889989 - type: nauc_mrr_at_100_diff1 value: 89.29077225707755 - type: nauc_mrr_at_100_max value: 73.84002740580378 - type: nauc_mrr_at_100_std value: -11.644096256165092 - type: nauc_mrr_at_10_diff1 value: 89.29117612292366 - type: nauc_mrr_at_10_max value: 73.97487984981221 - type: nauc_mrr_at_10_std value: -11.35191794373827 - type: nauc_mrr_at_1_diff1 value: 89.35436544117584 - type: nauc_mrr_at_1_max value: 70.35936815057701 - type: nauc_mrr_at_1_std value: -13.598996360976903 - type: nauc_mrr_at_20_diff1 value: 89.2530394052653 - type: nauc_mrr_at_20_max value: 73.83537529419839 - type: nauc_mrr_at_20_std value: -11.628272822028478 - type: nauc_mrr_at_3_diff1 value: 89.375111893546 - type: nauc_mrr_at_3_max value: 74.78900366026112 - type: nauc_mrr_at_3_std value: -12.720905253503274 - type: nauc_mrr_at_5_diff1 value: 89.35358300820893 - type: nauc_mrr_at_5_max value: 74.31996219723239 - type: nauc_mrr_at_5_std value: -10.768642638210867 - type: nauc_ndcg_at_1000_diff1 value: 89.27620775856863 - type: nauc_ndcg_at_1000_max value: 74.2985757362615 - type: nauc_ndcg_at_1000_std value: -11.236142819703023 - type: nauc_ndcg_at_100_diff1 value: 89.27284787540731 - type: nauc_ndcg_at_100_max value: 74.33539303365968 - type: nauc_ndcg_at_100_std value: -11.469413615851936 - type: nauc_ndcg_at_10_diff1 value: 89.21496710661724 - type: nauc_ndcg_at_10_max value: 75.02035398490516 - type: nauc_ndcg_at_10_std value: -9.903255803665814 - type: nauc_ndcg_at_1_diff1 value: 89.35436544117584 - type: nauc_ndcg_at_1_max value: 70.35936815057701 - type: nauc_ndcg_at_1_std value: -13.598996360976903 - type: nauc_ndcg_at_20_diff1 value: 89.03561289544179 - type: nauc_ndcg_at_20_max value: 74.4006766600049 - type: nauc_ndcg_at_20_std value: -11.129237862587743 - type: nauc_ndcg_at_3_diff1 value: 89.46540193201693 - type: nauc_ndcg_at_3_max value: 76.87093548368378 - type: nauc_ndcg_at_3_std value: -12.484902872086767 - type: nauc_ndcg_at_5_diff1 value: 89.39924941584766 - type: nauc_ndcg_at_5_max value: 75.96975269092722 - type: nauc_ndcg_at_5_std value: -8.180295581144833 - type: nauc_precision_at_1000_diff1 value: 100.0 - type: nauc_precision_at_1000_max value: 100.0 - type: nauc_precision_at_1000_std value: 100.0 - type: nauc_precision_at_100_diff1 value: 86.93074003795302 - type: nauc_precision_at_100_max value: 100.0 - type: nauc_precision_at_100_std value: -174.07785375176616 - type: nauc_precision_at_10_diff1 value: 87.43064119412082 - type: nauc_precision_at_10_max value: 90.60785783417448 - type: nauc_precision_at_10_std value: 15.378710059645906 - type: nauc_precision_at_1_diff1 value: 89.35436544117584 - type: nauc_precision_at_1_max value: 70.35936815057701 - type: nauc_precision_at_1_std value: -13.598996360976903 - type: nauc_precision_at_20_diff1 value: 78.78206037685919 - type: nauc_precision_at_20_max value: 82.52264166455923 - type: nauc_precision_at_20_std value: -5.95806599216658 - type: nauc_precision_at_3_diff1 value: 90.12709256456401 - type: nauc_precision_at_3_max value: 90.72678805838154 - type: nauc_precision_at_3_std value: -11.047599315631993 - type: nauc_precision_at_5_diff1 value: 89.9066873566561 - type: nauc_precision_at_5_max value: 93.51571626543664 - type: nauc_precision_at_5_std value: 22.632403279126162 - type: nauc_recall_at_1000_diff1 value: .nan - type: nauc_recall_at_1000_max value: .nan - type: nauc_recall_at_1000_std value: .nan - type: nauc_recall_at_100_diff1 value: 86.93074003793416 - type: nauc_recall_at_100_max value: 100.0 - type: nauc_recall_at_100_std value: -174.07785375175723 - type: nauc_recall_at_10_diff1 value: 87.43064119411991 - type: nauc_recall_at_10_max value: 90.60785783417579 - type: nauc_recall_at_10_std value: 15.378710059643607 - type: nauc_recall_at_1_diff1 value: 89.35436544117584 - type: nauc_recall_at_1_max value: 70.35936815057701 - type: nauc_recall_at_1_std value: -13.598996360976903 - type: nauc_recall_at_20_diff1 value: 78.78206037685645 - type: nauc_recall_at_20_max value: 82.52264166455791 - type: nauc_recall_at_20_std value: -5.958065992168697 - type: nauc_recall_at_3_diff1 value: 90.12709256456463 - type: nauc_recall_at_3_max value: 90.7267880583832 - type: nauc_recall_at_3_std value: -11.047599315631881 - type: nauc_recall_at_5_diff1 value: 89.90668735665676 - type: nauc_recall_at_5_max value: 93.51571626543753 - type: nauc_recall_at_5_std value: 22.632403279126112 - type: ndcg_at_1 value: 90.789 - type: ndcg_at_10 value: 95.46 - type: ndcg_at_100 value: 95.652 - type: ndcg_at_1000 value: 95.659 - type: ndcg_at_20 value: 95.575 - type: ndcg_at_3 value: 94.82000000000001 - type: ndcg_at_5 value: 95.26400000000001 - type: precision_at_1 value: 90.789 - type: precision_at_10 value: 9.908999999999999 - type: precision_at_100 value: 1.0 - type: precision_at_1000 value: 0.1 - type: precision_at_20 value: 4.977 - type: precision_at_3 value: 32.471 - type: precision_at_5 value: 19.701 - type: recall_at_1 value: 90.789 - type: recall_at_10 value: 99.093 - type: recall_at_100 value: 99.955 - type: recall_at_1000 value: 100.0 - type: recall_at_20 value: 99.546 - type: recall_at_3 value: 97.414 - type: recall_at_5 value: 98.503 - task: type: STS dataset: name: MTEB GermanSTSBenchmark (default) type: jinaai/german-STSbenchmark config: default split: test revision: e36907544d44c3a247898ed81540310442329e20 metrics: - type: cosine_pearson value: 86.55319003300265 - type: cosine_spearman value: 87.50267373081324 - type: euclidean_pearson value: 87.41630636501863 - type: euclidean_spearman value: 88.02170803409365 - type: main_score value: 87.50267373081324 - type: manhattan_pearson value: 87.33703179056744 - type: manhattan_spearman value: 87.99192826922514 - type: pearson value: 86.55319003300265 - type: spearman value: 87.50267373081324 - task: type: Clustering dataset: name: MTEB HALClusteringS2S (default) type: lyon-nlp/clustering-hal-s2s config: default split: test revision: e06ebbbb123f8144bef1a5d18796f3dec9ae2915 metrics: - type: main_score value: 27.477557517301303 - type: v_measure value: 27.477557517301303 - type: v_measure_std value: 3.3525736581861336 - task: type: Classification dataset: name: MTEB HeadlineClassification (default) type: ai-forever/headline-classification config: default split: test revision: 2fe05ee6b5832cda29f2ef7aaad7b7fe6a3609eb metrics: - type: accuracy value: 75.0830078125 - type: f1 value: 75.08863209267814 - type: f1_weighted value: 75.08895979060917 - type: main_score value: 75.0830078125 - task: type: Retrieval dataset: name: MTEB HotpotQA (default) type: mteb/hotpotqa config: default split: test revision: ab518f4d6fcca38d87c25209f94beba119d02014 metrics: - type: map_at_1 value: 38.143 - type: map_at_10 value: 55.916999999999994 - type: map_at_100 value: 56.706 - type: map_at_1000 value: 56.77100000000001 - type: map_at_20 value: 56.367 - type: map_at_3 value: 53.111 - type: map_at_5 value: 54.839000000000006 - type: mrr_at_1 value: 76.286 - type: mrr_at_10 value: 81.879 - type: mrr_at_100 value: 82.09100000000001 - type: mrr_at_1000 value: 82.101 - type: mrr_at_20 value: 82.01 - type: mrr_at_3 value: 80.972 - type: mrr_at_5 value: 81.537 - type: ndcg_at_1 value: 76.286 - type: ndcg_at_10 value: 64.673 - type: ndcg_at_100 value: 67.527 - type: ndcg_at_1000 value: 68.857 - type: ndcg_at_20 value: 65.822 - type: ndcg_at_3 value: 60.616 - type: ndcg_at_5 value: 62.827999999999996 - type: precision_at_1 value: 76.286 - type: precision_at_10 value: 13.196 - type: precision_at_100 value: 1.544 - type: precision_at_1000 value: 0.172 - type: precision_at_20 value: 6.968000000000001 - type: precision_at_3 value: 37.992 - type: precision_at_5 value: 24.54 - type: recall_at_1 value: 38.143 - type: recall_at_10 value: 65.982 - type: recall_at_100 value: 77.225 - type: recall_at_1000 value: 86.077 - type: recall_at_20 value: 69.68299999999999 - type: recall_at_3 value: 56.989000000000004 - type: recall_at_5 value: 61.35 - type: main_score value: 64.673 - task: type: Classification dataset: name: MTEB IFlyTek (default) type: C-MTEB/IFlyTek-classification config: default split: validation revision: 421605374b29664c5fc098418fe20ada9bd55f8a metrics: - type: accuracy value: 41.67756829549827 - type: f1 value: 33.929325579581636 - type: f1_weighted value: 43.03952025643197 - type: main_score value: 41.67756829549827 - task: type: Classification dataset: name: MTEB ImdbClassification (default) type: mteb/imdb config: default split: test revision: 3d86128a09e091d6018b6d26cad27f2739fc2db7 metrics: - type: accuracy value: 91.90440000000001 - type: ap value: 88.78663714603425 - type: ap_weighted value: 88.78663714603425 - type: f1 value: 91.89564361975891 - type: f1_weighted value: 91.89564361975891 - type: main_score value: 91.90440000000001 - task: type: Classification dataset: name: MTEB InappropriatenessClassification (default) type: ai-forever/inappropriateness-classification config: default split: test revision: 601651fdc45ef243751676e62dd7a19f491c0285 metrics: - type: accuracy value: 61.0498046875 - type: ap value: 57.04240566648215 - type: ap_weighted value: 57.04240566648215 - type: f1 value: 60.867630038606954 - type: f1_weighted value: 60.867630038606954 - type: main_score value: 61.0498046875 - task: type: Classification dataset: name: MTEB JDReview (default) type: C-MTEB/JDReview-classification config: default split: test revision: b7c64bd89eb87f8ded463478346f76731f07bf8b metrics: - type: accuracy value: 83.50844277673546 - type: ap value: 48.46732380712268 - type: ap_weighted value: 48.46732380712268 - type: f1 value: 77.43967451387445 - type: f1_weighted value: 84.78462929014114 - type: main_score value: 83.50844277673546 - task: type: Classification dataset: name: MTEB KinopoiskClassification (default) type: ai-forever/kinopoisk-sentiment-classification config: default split: test revision: 5911f26666ac11af46cb9c6849d0dc80a378af24 metrics: - type: accuracy value: 62.393333333333324 - type: f1 value: 61.35940129568015 - type: f1_weighted value: 61.35940129568015 - type: main_score value: 62.393333333333324 - task: type: STS dataset: name: MTEB LCQMC (default) type: C-MTEB/LCQMC config: default split: test revision: 17f9b096f80380fce5ed12a9be8be7784b337daf metrics: - type: cosine_pearson value: 67.74375505907872 - type: cosine_spearman value: 75.94582231399434 - type: euclidean_pearson value: 74.52501692443582 - type: euclidean_spearman value: 75.88428434746646 - type: main_score value: 75.94582231399434 - type: manhattan_pearson value: 74.55015441749529 - type: manhattan_spearman value: 75.83288262176175 - type: pearson value: 67.74375505907872 - type: spearman value: 75.94582231399434 - task: type: Retrieval dataset: name: MTEB LEMBNarrativeQARetrieval (default) type: dwzhu/LongEmbed config: default split: test revision: 6e346642246bfb4928c560ee08640dc84d074e8c metrics: - type: map_at_1 value: 23.093 - type: map_at_10 value: 30.227999999999998 - type: map_at_100 value: 31.423000000000002 - type: map_at_1000 value: 31.533 - type: map_at_20 value: 30.835 - type: map_at_3 value: 27.983999999999998 - type: map_at_5 value: 29.253 - type: mrr_at_1 value: 23.093 - type: mrr_at_10 value: 30.227999999999998 - type: mrr_at_100 value: 31.423000000000002 - type: mrr_at_1000 value: 31.533 - type: mrr_at_20 value: 30.835 - type: mrr_at_3 value: 27.983999999999998 - type: mrr_at_5 value: 29.253 - type: ndcg_at_1 value: 23.093 - type: ndcg_at_10 value: 34.297 - type: ndcg_at_100 value: 41.049 - type: ndcg_at_1000 value: 43.566 - type: ndcg_at_20 value: 36.52 - type: ndcg_at_3 value: 29.629 - type: ndcg_at_5 value: 31.926 - type: precision_at_1 value: 23.093 - type: precision_at_10 value: 4.735 - type: precision_at_100 value: 0.8109999999999999 - type: precision_at_1000 value: 0.1 - type: precision_at_20 value: 2.8080000000000003 - type: precision_at_3 value: 11.468 - type: precision_at_5 value: 8.001 - type: recall_at_1 value: 23.093 - type: recall_at_10 value: 47.354 - type: recall_at_100 value: 81.147 - type: recall_at_1000 value: 100.0 - type: recall_at_20 value: 56.16799999999999 - type: recall_at_3 value: 34.405 - type: recall_at_5 value: 40.004 - type: main_score value: 34.297 - type: map_at_1 value: 24.361 - type: map_at_10 value: 33.641 - type: map_at_100 value: 35.104 - type: map_at_1000 value: 35.127 - type: map_at_20 value: 34.388999999999996 - type: map_at_3 value: 30.255 - type: map_at_5 value: 32.079 - type: mrr_at_1 value: 24.361 - type: mrr_at_10 value: 33.641 - type: mrr_at_100 value: 35.104 - type: mrr_at_1000 value: 35.127 - type: mrr_at_20 value: 34.388999999999996 - type: mrr_at_3 value: 30.255 - type: mrr_at_5 value: 32.079 - type: ndcg_at_1 value: 24.361 - type: ndcg_at_10 value: 39.337 - type: ndcg_at_100 value: 47.384 - type: ndcg_at_1000 value: 47.75 - type: ndcg_at_20 value: 42.077999999999996 - type: ndcg_at_3 value: 32.235 - type: ndcg_at_5 value: 35.524 - type: precision_at_1 value: 24.361 - type: precision_at_10 value: 5.783 - type: precision_at_100 value: 0.975 - type: precision_at_1000 value: 0.1 - type: precision_at_20 value: 3.435 - type: precision_at_3 value: 12.661 - type: precision_at_5 value: 9.193999999999999 - type: recall_at_1 value: 24.361 - type: recall_at_10 value: 57.826 - type: recall_at_100 value: 97.51100000000001 - type: recall_at_1000 value: 100.0 - type: recall_at_20 value: 68.697 - type: recall_at_3 value: 37.983 - type: recall_at_5 value: 45.972 - type: main_score value: 39.337 - type: map_at_1 value: 53.667 - type: map_at_10 value: 61.719 - type: map_at_100 value: 62.471 - type: map_at_1000 value: 62.492000000000004 - type: map_at_20 value: 62.153000000000006 - type: map_at_3 value: 59.167 - type: map_at_5 value: 60.95 - type: mrr_at_1 value: 53.667 - type: mrr_at_10 value: 61.719 - type: mrr_at_100 value: 62.471 - type: mrr_at_1000 value: 62.492000000000004 - type: mrr_at_20 value: 62.153000000000006 - type: mrr_at_3 value: 59.167 - type: mrr_at_5 value: 60.95 - type: ndcg_at_1 value: 53.667 - type: ndcg_at_10 value: 66.018 - type: ndcg_at_100 value: 69.726 - type: ndcg_at_1000 value: 70.143 - type: ndcg_at_20 value: 67.61399999999999 - type: ndcg_at_3 value: 60.924 - type: ndcg_at_5 value: 64.10900000000001 - type: precision_at_1 value: 53.667 - type: precision_at_10 value: 7.9670000000000005 - type: precision_at_100 value: 0.97 - type: precision_at_1000 value: 0.1 - type: precision_at_20 value: 4.3 - type: precision_at_3 value: 22.0 - type: precision_at_5 value: 14.732999999999999 - type: recall_at_1 value: 53.667 - type: recall_at_10 value: 79.667 - type: recall_at_100 value: 97.0 - type: recall_at_1000 value: 100.0 - type: recall_at_20 value: 86.0 - type: recall_at_3 value: 66.0 - type: recall_at_5 value: 73.667 - type: main_score value: 66.018 - task: type: Retrieval dataset: name: MTEB LEMBNeedleRetrieval (default) type: dwzhu/LongEmbed config: default split: test_256 revision: 6e346642246bfb4928c560ee08640dc84d074e8c metrics: - type: map_at_1 value: 64.0 - type: map_at_10 value: 77.083 - type: map_at_100 value: 77.265 - type: map_at_1000 value: 77.265 - type: map_at_20 value: 77.265 - type: map_at_3 value: 76.333 - type: map_at_5 value: 76.833 - type: mrr_at_1 value: 64.0 - type: mrr_at_10 value: 77.083 - type: mrr_at_100 value: 77.265 - type: mrr_at_1000 value: 77.265 - type: mrr_at_20 value: 77.265 - type: mrr_at_3 value: 76.333 - type: mrr_at_5 value: 76.833 - type: ndcg_at_1 value: 64.0 - type: ndcg_at_10 value: 82.325 - type: ndcg_at_100 value: 82.883 - type: ndcg_at_1000 value: 82.883 - type: ndcg_at_20 value: 82.883 - type: ndcg_at_3 value: 80.833 - type: ndcg_at_5 value: 81.694 - type: precision_at_1 value: 64.0 - type: precision_at_10 value: 9.8 - type: precision_at_100 value: 1.0 - type: precision_at_1000 value: 0.1 - type: precision_at_20 value: 5.0 - type: precision_at_3 value: 31.333 - type: precision_at_5 value: 19.2 - type: recall_at_1 value: 64.0 - type: recall_at_10 value: 98.0 - type: recall_at_100 value: 100.0 - type: recall_at_1000 value: 100.0 - type: recall_at_20 value: 100.0 - type: recall_at_3 value: 94.0 - type: recall_at_5 value: 96.0 - type: main_score value: 64.0 - type: map_at_1 value: 100.0 - type: map_at_10 value: 100.0 - type: map_at_100 value: 100.0 - type: map_at_1000 value: 100.0 - type: map_at_20 value: 100.0 - type: map_at_3 value: 100.0 - type: map_at_5 value: 100.0 - type: mrr_at_1 value: 100.0 - type: mrr_at_10 value: 100.0 - type: mrr_at_100 value: 100.0 - type: mrr_at_1000 value: 100.0 - type: mrr_at_20 value: 100.0 - type: mrr_at_3 value: 100.0 - type: mrr_at_5 value: 100.0 - type: ndcg_at_1 value: 100.0 - type: ndcg_at_10 value: 100.0 - type: ndcg_at_100 value: 100.0 - type: ndcg_at_1000 value: 100.0 - type: ndcg_at_20 value: 100.0 - type: ndcg_at_3 value: 100.0 - type: ndcg_at_5 value: 100.0 - type: precision_at_1 value: 100.0 - type: precision_at_10 value: 10.0 - type: precision_at_100 value: 1.0 - type: precision_at_1000 value: 0.1 - type: precision_at_20 value: 5.0 - type: precision_at_3 value: 33.333 - type: precision_at_5 value: 20.0 - type: recall_at_1 value: 100.0 - type: recall_at_10 value: 100.0 - type: recall_at_100 value: 100.0 - type: recall_at_1000 value: 100.0 - type: recall_at_20 value: 100.0 - type: recall_at_3 value: 100.0 - type: recall_at_5 value: 100.0 - type: main_score value: 100.0 - task: type: Retrieval dataset: name: MTEB LEMBSummScreenFDRetrieval (default) type: dwzhu/LongEmbed config: default split: validation revision: 6e346642246bfb4928c560ee08640dc84d074e8c metrics: - type: map_at_1 value: 84.821 - type: map_at_10 value: 90.11200000000001 - type: map_at_100 value: 90.158 - type: map_at_1000 value: 90.158 - type: map_at_20 value: 90.137 - type: map_at_3 value: 89.385 - type: map_at_5 value: 89.876 - type: mrr_at_1 value: 84.821 - type: mrr_at_10 value: 90.11200000000001 - type: mrr_at_100 value: 90.158 - type: mrr_at_1000 value: 90.158 - type: mrr_at_20 value: 90.137 - type: mrr_at_3 value: 89.385 - type: mrr_at_5 value: 89.876 - type: ndcg_at_1 value: 84.821 - type: ndcg_at_10 value: 92.334 - type: ndcg_at_100 value: 92.535 - type: ndcg_at_1000 value: 92.535 - type: ndcg_at_20 value: 92.414 - type: ndcg_at_3 value: 90.887 - type: ndcg_at_5 value: 91.758 - type: precision_at_1 value: 84.821 - type: precision_at_10 value: 9.911 - type: precision_at_100 value: 1.0 - type: precision_at_1000 value: 0.1 - type: precision_at_20 value: 4.97 - type: precision_at_3 value: 31.746000000000002 - type: precision_at_5 value: 19.464000000000002 - type: recall_at_1 value: 84.821 - type: recall_at_10 value: 99.107 - type: recall_at_100 value: 100.0 - type: recall_at_1000 value: 100.0 - type: recall_at_20 value: 99.405 - type: recall_at_3 value: 95.238 - type: recall_at_5 value: 97.321 - type: main_score value: 92.334 - task: type: Retrieval dataset: name: MTEB MLQARetrieval (deu-deu) type: facebook/mlqa config: deu-deu split: test revision: 397ed406c1a7902140303e7faf60fff35b58d285 metrics: - type: main_score value: 67.548 - type: map_at_1 value: 56.559000000000005 - type: map_at_10 value: 63.867 - type: map_at_100 value: 64.429 - type: map_at_1000 value: 64.457 - type: map_at_20 value: 64.215 - type: map_at_3 value: 62.109 - type: map_at_5 value: 63.101 - type: mrr_at_1 value: 56.56990915134057 - type: mrr_at_10 value: 63.86820789324668 - type: mrr_at_100 value: 64.42973602152581 - type: mrr_at_1000 value: 64.45818598090155 - type: mrr_at_20 value: 64.2163052263868 - type: mrr_at_3 value: 62.10946155550634 - type: mrr_at_5 value: 63.10104143585199 - type: nauc_map_at_1000_diff1 value: 73.78440163370111 - type: nauc_map_at_1000_max value: 66.37875518052162 - type: nauc_map_at_1000_std value: -17.063915098135396 - type: nauc_map_at_100_diff1 value: 73.77180802985815 - type: nauc_map_at_100_max value: 66.38365998362033 - type: nauc_map_at_100_std value: -17.053345109661972 - type: nauc_map_at_10_diff1 value: 73.70041876696037 - type: nauc_map_at_10_max value: 66.33213342705997 - type: nauc_map_at_10_std value: -17.40657791273925 - type: nauc_map_at_1_diff1 value: 76.8784374396948 - type: nauc_map_at_1_max value: 64.07170606935357 - type: nauc_map_at_1_std value: -18.464213686790654 - type: nauc_map_at_20_diff1 value: 73.72371377231813 - type: nauc_map_at_20_max value: 66.42108121059451 - type: nauc_map_at_20_std value: -17.05384923889036 - type: nauc_map_at_3_diff1 value: 74.08287018839246 - type: nauc_map_at_3_max value: 66.42422337760333 - type: nauc_map_at_3_std value: -17.79503404131652 - type: nauc_map_at_5_diff1 value: 73.9294779027339 - type: nauc_map_at_5_max value: 66.51752041065726 - type: nauc_map_at_5_std value: -17.67309805113804 - type: nauc_mrr_at_1000_diff1 value: 73.78389736923545 - type: nauc_mrr_at_1000_max value: 66.37929720858341 - type: nauc_mrr_at_1000_std value: -17.058591711291278 - type: nauc_mrr_at_100_diff1 value: 73.77126451253136 - type: nauc_mrr_at_100_max value: 66.38405917246607 - type: nauc_mrr_at_100_std value: -17.047251035212863 - type: nauc_mrr_at_10_diff1 value: 73.69960470665124 - type: nauc_mrr_at_10_max value: 66.33265194210313 - type: nauc_mrr_at_10_std value: -17.399659076827998 - type: nauc_mrr_at_1_diff1 value: 76.8689850260726 - type: nauc_mrr_at_1_max value: 64.09858188287487 - type: nauc_mrr_at_1_std value: -18.46064784201847 - type: nauc_mrr_at_20_diff1 value: 73.72312682063128 - type: nauc_mrr_at_20_max value: 66.42181932858745 - type: nauc_mrr_at_20_std value: -17.04690257511092 - type: nauc_mrr_at_3_diff1 value: 74.08287018839246 - type: nauc_mrr_at_3_max value: 66.42422337760333 - type: nauc_mrr_at_3_std value: -17.79503404131652 - type: nauc_mrr_at_5_diff1 value: 73.9294779027339 - type: nauc_mrr_at_5_max value: 66.51752041065726 - type: nauc_mrr_at_5_std value: -17.67309805113804 - type: nauc_ndcg_at_1000_diff1 value: 72.97825548342801 - type: nauc_ndcg_at_1000_max value: 66.96275437178257 - type: nauc_ndcg_at_1000_std value: -15.611902299641587 - type: nauc_ndcg_at_100_diff1 value: 72.58724738936613 - type: nauc_ndcg_at_100_max value: 67.16774012704182 - type: nauc_ndcg_at_100_std value: -14.945088654796812 - type: nauc_ndcg_at_10_diff1 value: 72.16253640477947 - type: nauc_ndcg_at_10_max value: 67.01746849484621 - type: nauc_ndcg_at_10_std value: -16.46102507270809 - type: nauc_ndcg_at_1_diff1 value: 76.8689850260726 - type: nauc_ndcg_at_1_max value: 64.09858188287487 - type: nauc_ndcg_at_1_std value: -18.46064784201847 - type: nauc_ndcg_at_20_diff1 value: 72.19995325129975 - type: nauc_ndcg_at_20_max value: 67.39639713797962 - type: nauc_ndcg_at_20_std value: -15.091689370748531 - type: nauc_ndcg_at_3_diff1 value: 73.13123604206514 - type: nauc_ndcg_at_3_max value: 67.23123167871547 - type: nauc_ndcg_at_3_std value: -17.492755234009156 - type: nauc_ndcg_at_5_diff1 value: 72.8154718929895 - type: nauc_ndcg_at_5_max value: 67.44578008373777 - type: nauc_ndcg_at_5_std value: -17.251840358751362 - type: nauc_precision_at_1000_diff1 value: 47.89748325983604 - type: nauc_precision_at_1000_max value: 70.47466197804906 - type: nauc_precision_at_1000_std value: 72.66193512114775 - type: nauc_precision_at_100_diff1 value: 59.493743734005356 - type: nauc_precision_at_100_max value: 74.02140147220713 - type: nauc_precision_at_100_std value: 17.26664098026236 - type: nauc_precision_at_10_diff1 value: 64.94415011040277 - type: nauc_precision_at_10_max value: 69.6963814950747 - type: nauc_precision_at_10_std value: -11.663043657012954 - type: nauc_precision_at_1_diff1 value: 76.8689850260726 - type: nauc_precision_at_1_max value: 64.09858188287487 - type: nauc_precision_at_1_std value: -18.46064784201847 - type: nauc_precision_at_20_diff1 value: 63.145886909986416 - type: nauc_precision_at_20_max value: 72.95708033630744 - type: nauc_precision_at_20_std value: -1.5039593629280323 - type: nauc_precision_at_3_diff1 value: 69.88902201644449 - type: nauc_precision_at_3_max value: 69.80499971089935 - type: nauc_precision_at_3_std value: -16.444680766676647 - type: nauc_precision_at_5_diff1 value: 68.60869967062919 - type: nauc_precision_at_5_max value: 70.75998207564281 - type: nauc_precision_at_5_std value: -15.62613396998262 - type: nauc_recall_at_1000_diff1 value: 62.6646436338833 - type: nauc_recall_at_1000_max value: 86.17801636476078 - type: nauc_recall_at_1000_std value: 71.84718775540334 - type: nauc_recall_at_100_diff1 value: 61.110492191439505 - type: nauc_recall_at_100_max value: 75.45730686603042 - type: nauc_recall_at_100_std value: 16.202465011589428 - type: nauc_recall_at_10_diff1 value: 65.1522196516815 - type: nauc_recall_at_10_max value: 69.7626435962161 - type: nauc_recall_at_10_std value: -11.801178474770449 - type: nauc_recall_at_1_diff1 value: 76.8784374396948 - type: nauc_recall_at_1_max value: 64.07170606935357 - type: nauc_recall_at_1_std value: -18.464213686790654 - type: nauc_recall_at_20_diff1 value: 63.40332739504143 - type: nauc_recall_at_20_max value: 73.04113661090965 - type: nauc_recall_at_20_std value: -1.6609741140266947 - type: nauc_recall_at_3_diff1 value: 70.03728086098866 - type: nauc_recall_at_3_max value: 69.85953774320521 - type: nauc_recall_at_3_std value: -16.482993123411706 - type: nauc_recall_at_5_diff1 value: 68.77396121765933 - type: nauc_recall_at_5_max value: 70.8231205493519 - type: nauc_recall_at_5_std value: -15.668037770700863 - type: ndcg_at_1 value: 56.57 - type: ndcg_at_10 value: 67.548 - type: ndcg_at_100 value: 70.421 - type: ndcg_at_1000 value: 71.198 - type: ndcg_at_20 value: 68.829 - type: ndcg_at_3 value: 63.88700000000001 - type: ndcg_at_5 value: 65.689 - type: precision_at_1 value: 56.57 - type: precision_at_10 value: 7.922 - type: precision_at_100 value: 0.9299999999999999 - type: precision_at_1000 value: 0.099 - type: precision_at_20 value: 4.216 - type: precision_at_3 value: 23.015 - type: precision_at_5 value: 14.691 - type: recall_at_1 value: 56.559000000000005 - type: recall_at_10 value: 79.182 - type: recall_at_100 value: 92.946 - type: recall_at_1000 value: 99.092 - type: recall_at_20 value: 84.27900000000001 - type: recall_at_3 value: 69.023 - type: recall_at_5 value: 73.432 - task: type: Retrieval dataset: name: MTEB MLQARetrieval (deu-spa) type: facebook/mlqa config: deu-spa split: test revision: 397ed406c1a7902140303e7faf60fff35b58d285 metrics: - type: main_score value: 70.645 - type: map_at_1 value: 58.423 - type: map_at_10 value: 66.613 - type: map_at_100 value: 67.14099999999999 - type: map_at_1000 value: 67.161 - type: map_at_20 value: 66.965 - type: map_at_3 value: 64.714 - type: map_at_5 value: 65.835 - type: mrr_at_1 value: 58.4225352112676 - type: mrr_at_10 value: 66.61321260898735 - type: mrr_at_100 value: 67.13991570812132 - type: mrr_at_1000 value: 67.1598532168174 - type: mrr_at_20 value: 66.96384710024888 - type: mrr_at_3 value: 64.71361502347425 - type: mrr_at_5 value: 65.83474178403769 - type: nauc_map_at_1000_diff1 value: 73.9485117118935 - type: nauc_map_at_1000_max value: 65.74479869396299 - type: nauc_map_at_1000_std value: -20.300269749495563 - type: nauc_map_at_100_diff1 value: 73.93900406302829 - type: nauc_map_at_100_max value: 65.75508449194885 - type: nauc_map_at_100_std value: -20.265330791570175 - type: nauc_map_at_10_diff1 value: 73.84863233472605 - type: nauc_map_at_10_max value: 65.89377317378211 - type: nauc_map_at_10_std value: -20.404123131964695 - type: nauc_map_at_1_diff1 value: 76.73627284218519 - type: nauc_map_at_1_max value: 62.94957512510876 - type: nauc_map_at_1_std value: -20.99649749330682 - type: nauc_map_at_20_diff1 value: 73.88712006109598 - type: nauc_map_at_20_max value: 65.82057018162664 - type: nauc_map_at_20_std value: -20.269476512431915 - type: nauc_map_at_3_diff1 value: 74.21419190161502 - type: nauc_map_at_3_max value: 65.64993368062119 - type: nauc_map_at_3_std value: -21.34641749007071 - type: nauc_map_at_5_diff1 value: 74.0119419385777 - type: nauc_map_at_5_max value: 65.69809416369732 - type: nauc_map_at_5_std value: -21.16901556082261 - type: nauc_mrr_at_1000_diff1 value: 73.94915184134923 - type: nauc_mrr_at_1000_max value: 65.74522469633418 - type: nauc_mrr_at_1000_std value: -20.303028367132246 - type: nauc_mrr_at_100_diff1 value: 73.93964394728808 - type: nauc_mrr_at_100_max value: 65.75550992323707 - type: nauc_mrr_at_100_std value: -20.26808820438918 - type: nauc_mrr_at_10_diff1 value: 73.84863233472605 - type: nauc_mrr_at_10_max value: 65.89377317378211 - type: nauc_mrr_at_10_std value: -20.404123131964695 - type: nauc_mrr_at_1_diff1 value: 76.73627284218519 - type: nauc_mrr_at_1_max value: 62.94957512510876 - type: nauc_mrr_at_1_std value: -20.99649749330682 - type: nauc_mrr_at_20_diff1 value: 73.88775721128745 - type: nauc_mrr_at_20_max value: 65.820991355628 - type: nauc_mrr_at_20_std value: -20.272216587019734 - type: nauc_mrr_at_3_diff1 value: 74.21419190161502 - type: nauc_mrr_at_3_max value: 65.64993368062119 - type: nauc_mrr_at_3_std value: -21.34641749007071 - type: nauc_mrr_at_5_diff1 value: 74.0119419385777 - type: nauc_mrr_at_5_max value: 65.69809416369732 - type: nauc_mrr_at_5_std value: -21.16901556082261 - type: nauc_ndcg_at_1000_diff1 value: 73.29396365944277 - type: nauc_ndcg_at_1000_max value: 66.44879592109541 - type: nauc_ndcg_at_1000_std value: -19.285991058788195 - type: nauc_ndcg_at_100_diff1 value: 73.0159172721162 - type: nauc_ndcg_at_100_max value: 66.76216389231388 - type: nauc_ndcg_at_100_std value: -18.27931368094887 - type: nauc_ndcg_at_10_diff1 value: 72.42096650774693 - type: nauc_ndcg_at_10_max value: 67.48592688463306 - type: nauc_ndcg_at_10_std value: -18.91453756077581 - type: nauc_ndcg_at_1_diff1 value: 76.73627284218519 - type: nauc_ndcg_at_1_max value: 62.94957512510876 - type: nauc_ndcg_at_1_std value: -20.99649749330682 - type: nauc_ndcg_at_20_diff1 value: 72.53699362385684 - type: nauc_ndcg_at_20_max value: 67.22763976357872 - type: nauc_ndcg_at_20_std value: -18.299910635008338 - type: nauc_ndcg_at_3_diff1 value: 73.3698453761989 - type: nauc_ndcg_at_3_max value: 66.71056987289383 - type: nauc_ndcg_at_3_std value: -21.405154376652803 - type: nauc_ndcg_at_5_diff1 value: 72.9491030712935 - type: nauc_ndcg_at_5_max value: 66.85786103137077 - type: nauc_ndcg_at_5_std value: -21.04005053344073 - type: nauc_precision_at_1000_diff1 value: 17.02462370967451 - type: nauc_precision_at_1000_max value: 48.03260752496052 - type: nauc_precision_at_1000_std value: 87.56077915079334 - type: nauc_precision_at_100_diff1 value: 58.590352501194985 - type: nauc_precision_at_100_max value: 78.2649015433222 - type: nauc_precision_at_100_std value: 28.05030453158992 - type: nauc_precision_at_10_diff1 value: 64.89497928764766 - type: nauc_precision_at_10_max value: 75.93257124951242 - type: nauc_precision_at_10_std value: -9.825306994117462 - type: nauc_precision_at_1_diff1 value: 76.73627284218519 - type: nauc_precision_at_1_max value: 62.94957512510876 - type: nauc_precision_at_1_std value: -20.99649749330682 - type: nauc_precision_at_20_diff1 value: 62.11366204321558 - type: nauc_precision_at_20_max value: 75.9571427846493 - type: nauc_precision_at_20_std value: -0.94585212808191 - type: nauc_precision_at_3_diff1 value: 70.52940972112398 - type: nauc_precision_at_3_max value: 70.3402053170779 - type: nauc_precision_at_3_std value: -21.579778424241304 - type: nauc_precision_at_5_diff1 value: 68.78962580223575 - type: nauc_precision_at_5_max value: 71.41410894398376 - type: nauc_precision_at_5_std value: -20.415603405161956 - type: nauc_recall_at_1000_diff1 value: 55.88625447348128 - type: nauc_recall_at_1000_max value: 100.0 - type: nauc_recall_at_1000_std value: 100.0 - type: nauc_recall_at_100_diff1 value: 61.17942268389525 - type: nauc_recall_at_100_max value: 81.12207841563487 - type: nauc_recall_at_100_std value: 27.141215257528113 - type: nauc_recall_at_10_diff1 value: 64.8949792876478 - type: nauc_recall_at_10_max value: 75.93257124951249 - type: nauc_recall_at_10_std value: -9.825306994117323 - type: nauc_recall_at_1_diff1 value: 76.73627284218519 - type: nauc_recall_at_1_max value: 62.94957512510876 - type: nauc_recall_at_1_std value: -20.99649749330682 - type: nauc_recall_at_20_diff1 value: 63.07808719241162 - type: nauc_recall_at_20_max value: 76.96808746317542 - type: nauc_recall_at_20_std value: -1.5235053258631275 - type: nauc_recall_at_3_diff1 value: 70.52940972112405 - type: nauc_recall_at_3_max value: 70.3402053170779 - type: nauc_recall_at_3_std value: -21.57977842424124 - type: nauc_recall_at_5_diff1 value: 68.78962580223575 - type: nauc_recall_at_5_max value: 71.41410894398392 - type: nauc_recall_at_5_std value: -20.415603405161793 - type: ndcg_at_1 value: 58.423 - type: ndcg_at_10 value: 70.645 - type: ndcg_at_100 value: 73.277 - type: ndcg_at_1000 value: 73.785 - type: ndcg_at_20 value: 71.918 - type: ndcg_at_3 value: 66.679 - type: ndcg_at_5 value: 68.72200000000001 - type: precision_at_1 value: 58.423 - type: precision_at_10 value: 8.338 - type: precision_at_100 value: 0.959 - type: precision_at_1000 value: 0.1 - type: precision_at_20 value: 4.423 - type: precision_at_3 value: 24.113 - type: precision_at_5 value: 15.47 - type: recall_at_1 value: 58.423 - type: recall_at_10 value: 83.38 - type: recall_at_100 value: 95.887 - type: recall_at_1000 value: 99.831 - type: recall_at_20 value: 88.39399999999999 - type: recall_at_3 value: 72.33800000000001 - type: recall_at_5 value: 77.352 - task: type: Retrieval dataset: name: MTEB MLQARetrieval (deu-eng) type: facebook/mlqa config: deu-eng split: test revision: 397ed406c1a7902140303e7faf60fff35b58d285 metrics: - type: main_score value: 67.067 - type: map_at_1 value: 55.861000000000004 - type: map_at_10 value: 63.42100000000001 - type: map_at_100 value: 64.03 - type: map_at_1000 value: 64.05999999999999 - type: map_at_20 value: 63.819 - type: map_at_3 value: 61.773 - type: map_at_5 value: 62.736999999999995 - type: mrr_at_1 value: 55.88300465322402 - type: mrr_at_10 value: 63.43111082973707 - type: mrr_at_100 value: 64.03962373590272 - type: mrr_at_1000 value: 64.0698259866376 - type: mrr_at_20 value: 63.82871766489112 - type: mrr_at_3 value: 61.78447448112865 - type: mrr_at_5 value: 62.74835659945346 - type: nauc_map_at_1000_diff1 value: 74.58505763417352 - type: nauc_map_at_1000_max value: 66.26060764852198 - type: nauc_map_at_1000_std value: -16.896178230873897 - type: nauc_map_at_100_diff1 value: 74.57057487892857 - type: nauc_map_at_100_max value: 66.26600433283826 - type: nauc_map_at_100_std value: -16.87596113104189 - type: nauc_map_at_10_diff1 value: 74.53453636322749 - type: nauc_map_at_10_max value: 66.27501737773804 - type: nauc_map_at_10_std value: -17.178743257781775 - type: nauc_map_at_1_diff1 value: 77.63067209375254 - type: nauc_map_at_1_max value: 64.17718675702672 - type: nauc_map_at_1_std value: -17.639521106853717 - type: nauc_map_at_20_diff1 value: 74.52007402431164 - type: nauc_map_at_20_max value: 66.28276291359268 - type: nauc_map_at_20_std value: -16.939292897754758 - type: nauc_map_at_3_diff1 value: 74.79187974631951 - type: nauc_map_at_3_max value: 66.23256568210611 - type: nauc_map_at_3_std value: -17.894889918934112 - type: nauc_map_at_5_diff1 value: 74.63011328882517 - type: nauc_map_at_5_max value: 66.35411054978499 - type: nauc_map_at_5_std value: -17.50140342194211 - type: nauc_mrr_at_1000_diff1 value: 74.57520089771667 - type: nauc_mrr_at_1000_max value: 66.27270912845914 - type: nauc_mrr_at_1000_std value: -16.84012675362397 - type: nauc_mrr_at_100_diff1 value: 74.56070964572156 - type: nauc_mrr_at_100_max value: 66.2780701126926 - type: nauc_mrr_at_100_std value: -16.820035083069865 - type: nauc_mrr_at_10_diff1 value: 74.52455978435117 - type: nauc_mrr_at_10_max value: 66.28697244023137 - type: nauc_mrr_at_10_std value: -17.122477723330523 - type: nauc_mrr_at_1_diff1 value: 77.60643512422061 - type: nauc_mrr_at_1_max value: 64.21736966061896 - type: nauc_mrr_at_1_std value: -17.56627338275146 - type: nauc_mrr_at_20_diff1 value: 74.5099814266373 - type: nauc_mrr_at_20_max value: 66.29485560556576 - type: nauc_mrr_at_20_std value: -16.882350027335306 - type: nauc_mrr_at_3_diff1 value: 74.78132817375507 - type: nauc_mrr_at_3_max value: 66.24761860047623 - type: nauc_mrr_at_3_std value: -17.833128575678998 - type: nauc_mrr_at_5_diff1 value: 74.6193031207433 - type: nauc_mrr_at_5_max value: 66.36951764432901 - type: nauc_mrr_at_5_std value: -17.438203106324227 - type: nauc_ndcg_at_1000_diff1 value: 73.79386161629151 - type: nauc_ndcg_at_1000_max value: 66.84013038018082 - type: nauc_ndcg_at_1000_std value: -15.387358822700667 - type: nauc_ndcg_at_100_diff1 value: 73.36132885277745 - type: nauc_ndcg_at_100_max value: 67.04416926901568 - type: nauc_ndcg_at_100_std value: -14.503256942521972 - type: nauc_ndcg_at_10_diff1 value: 73.11847332785027 - type: nauc_ndcg_at_10_max value: 67.02149621303091 - type: nauc_ndcg_at_10_std value: -16.142234662067782 - type: nauc_ndcg_at_1_diff1 value: 77.60643512422061 - type: nauc_ndcg_at_1_max value: 64.21736966061896 - type: nauc_ndcg_at_1_std value: -17.56627338275146 - type: nauc_ndcg_at_20_diff1 value: 72.97961452569768 - type: nauc_ndcg_at_20_max value: 67.12369127081152 - type: nauc_ndcg_at_20_std value: -15.11921773223936 - type: nauc_ndcg_at_3_diff1 value: 73.77769312598772 - type: nauc_ndcg_at_3_max value: 66.94438755852309 - type: nauc_ndcg_at_3_std value: -17.75960443830741 - type: nauc_ndcg_at_5_diff1 value: 73.43991209562891 - type: nauc_ndcg_at_5_max value: 67.21682951737418 - type: nauc_ndcg_at_5_std value: -17.013510008231805 - type: nauc_precision_at_1000_diff1 value: 51.30633281948362 - type: nauc_precision_at_1000_max value: 76.78675288883846 - type: nauc_precision_at_1000_std value: 71.70041985304397 - type: nauc_precision_at_100_diff1 value: 59.86656455853326 - type: nauc_precision_at_100_max value: 74.41958422732161 - type: nauc_precision_at_100_std value: 22.098920296069124 - type: nauc_precision_at_10_diff1 value: 66.4696166928741 - type: nauc_precision_at_10_max value: 69.88463108697104 - type: nauc_precision_at_10_std value: -10.707950954702742 - type: nauc_precision_at_1_diff1 value: 77.60643512422061 - type: nauc_precision_at_1_max value: 64.21736966061896 - type: nauc_precision_at_1_std value: -17.56627338275146 - type: nauc_precision_at_20_diff1 value: 63.45094585276983 - type: nauc_precision_at_20_max value: 71.57741245347195 - type: nauc_precision_at_20_std value: -2.2211545419051744 - type: nauc_precision_at_3_diff1 value: 70.28060818081384 - type: nauc_precision_at_3_max value: 69.22652927816439 - type: nauc_precision_at_3_std value: -17.158576243559434 - type: nauc_precision_at_5_diff1 value: 68.90765418427162 - type: nauc_precision_at_5_max value: 70.32585273389111 - type: nauc_precision_at_5_std value: -14.950363729664524 - type: nauc_recall_at_1000_diff1 value: 65.11255117927331 - type: nauc_recall_at_1000_max value: 88.35641213283338 - type: nauc_recall_at_1000_std value: 69.89792573640547 - type: nauc_recall_at_100_diff1 value: 61.46376457272238 - type: nauc_recall_at_100_max value: 75.48265142243015 - type: nauc_recall_at_100_std value: 21.223182712042178 - type: nauc_recall_at_10_diff1 value: 66.89353375308997 - type: nauc_recall_at_10_max value: 70.06655416883785 - type: nauc_recall_at_10_std value: -11.100871879439435 - type: nauc_recall_at_1_diff1 value: 77.63067209375254 - type: nauc_recall_at_1_max value: 64.17718675702672 - type: nauc_recall_at_1_std value: -17.639521106853717 - type: nauc_recall_at_20_diff1 value: 63.98532276331878 - type: nauc_recall_at_20_max value: 71.81562599791899 - type: nauc_recall_at_20_std value: -2.696537977147695 - type: nauc_recall_at_3_diff1 value: 70.4507655865698 - type: nauc_recall_at_3_max value: 69.25705030141037 - type: nauc_recall_at_3_std value: -17.299948348202836 - type: nauc_recall_at_5_diff1 value: 69.09152857901888 - type: nauc_recall_at_5_max value: 70.35609636026405 - type: nauc_recall_at_5_std value: -15.105012139255896 - type: ndcg_at_1 value: 55.883 - type: ndcg_at_10 value: 67.067 - type: ndcg_at_100 value: 70.07 - type: ndcg_at_1000 value: 70.875 - type: ndcg_at_20 value: 68.498 - type: ndcg_at_3 value: 63.666 - type: ndcg_at_5 value: 65.40599999999999 - type: precision_at_1 value: 55.883 - type: precision_at_10 value: 7.8549999999999995 - type: precision_at_100 value: 0.928 - type: precision_at_1000 value: 0.099 - type: precision_at_20 value: 4.2090000000000005 - type: precision_at_3 value: 23.052 - type: precision_at_5 value: 14.677999999999999 - type: recall_at_1 value: 55.861000000000004 - type: recall_at_10 value: 78.495 - type: recall_at_100 value: 92.688 - type: recall_at_1000 value: 99.02499999999999 - type: recall_at_20 value: 84.124 - type: recall_at_3 value: 69.123 - type: recall_at_5 value: 73.355 - task: type: Retrieval dataset: name: MTEB MLQARetrieval (spa-deu) type: facebook/mlqa config: spa-deu split: test revision: 397ed406c1a7902140303e7faf60fff35b58d285 metrics: - type: main_score value: 73.90299999999999 - type: map_at_1 value: 61.236000000000004 - type: map_at_10 value: 69.88799999999999 - type: map_at_100 value: 70.319 - type: map_at_1000 value: 70.341 - type: map_at_20 value: 70.16799999999999 - type: map_at_3 value: 68.104 - type: map_at_5 value: 69.164 - type: mrr_at_1 value: 61.2739571589628 - type: mrr_at_10 value: 69.92589162684993 - type: mrr_at_100 value: 70.35245455509234 - type: mrr_at_1000 value: 70.37438351396742 - type: mrr_at_20 value: 70.20247469915404 - type: mrr_at_3 value: 68.14167606163099 - type: mrr_at_5 value: 69.20142803457354 - type: nauc_map_at_1000_diff1 value: 74.70416754842327 - type: nauc_map_at_1000_max value: 65.86915994583384 - type: nauc_map_at_1000_std value: -19.04437483534443 - type: nauc_map_at_100_diff1 value: 74.70011798058674 - type: nauc_map_at_100_max value: 65.88507779167188 - type: nauc_map_at_100_std value: -19.018670970643786 - type: nauc_map_at_10_diff1 value: 74.6362126804427 - type: nauc_map_at_10_max value: 66.05733054427198 - type: nauc_map_at_10_std value: -19.034317737897354 - type: nauc_map_at_1_diff1 value: 77.24970536833601 - type: nauc_map_at_1_max value: 62.07820573048406 - type: nauc_map_at_1_std value: -20.917086586335078 - type: nauc_map_at_20_diff1 value: 74.64113920401083 - type: nauc_map_at_20_max value: 65.89991740166793 - type: nauc_map_at_20_std value: -19.09987515041243 - type: nauc_map_at_3_diff1 value: 74.6518162332119 - type: nauc_map_at_3_max value: 66.10312348194024 - type: nauc_map_at_3_std value: -18.95881457716116 - type: nauc_map_at_5_diff1 value: 74.55141020670321 - type: nauc_map_at_5_max value: 65.94345752979342 - type: nauc_map_at_5_std value: -19.453976877992304 - type: nauc_mrr_at_1000_diff1 value: 74.64458488344088 - type: nauc_mrr_at_1000_max value: 65.84575328456057 - type: nauc_mrr_at_1000_std value: -18.901614615119904 - type: nauc_mrr_at_100_diff1 value: 74.64058497924627 - type: nauc_mrr_at_100_max value: 65.86170461767928 - type: nauc_mrr_at_100_std value: -18.87601697091505 - type: nauc_mrr_at_10_diff1 value: 74.57266634464752 - type: nauc_mrr_at_10_max value: 66.03331587645152 - type: nauc_mrr_at_10_std value: -18.87888060105393 - type: nauc_mrr_at_1_diff1 value: 77.19578272647183 - type: nauc_mrr_at_1_max value: 62.05252035478773 - type: nauc_mrr_at_1_std value: -20.790530940625267 - type: nauc_mrr_at_20_diff1 value: 74.5808171250021 - type: nauc_mrr_at_20_max value: 65.87643606587798 - type: nauc_mrr_at_20_std value: -18.95476583474199 - type: nauc_mrr_at_3_diff1 value: 74.5917053289191 - type: nauc_mrr_at_3_max value: 66.08044079438714 - type: nauc_mrr_at_3_std value: -18.81168463163586 - type: nauc_mrr_at_5_diff1 value: 74.48934579694608 - type: nauc_mrr_at_5_max value: 65.91993162383771 - type: nauc_mrr_at_5_std value: -19.302710791338797 - type: nauc_ndcg_at_1000_diff1 value: 74.20191283992186 - type: nauc_ndcg_at_1000_max value: 66.60831175771229 - type: nauc_ndcg_at_1000_std value: -18.175208725175484 - type: nauc_ndcg_at_100_diff1 value: 74.07713451642955 - type: nauc_ndcg_at_100_max value: 67.02028626335476 - type: nauc_ndcg_at_100_std value: -17.36560972181693 - type: nauc_ndcg_at_10_diff1 value: 73.63235521598476 - type: nauc_ndcg_at_10_max value: 67.8118473312638 - type: nauc_ndcg_at_10_std value: -17.647560577355915 - type: nauc_ndcg_at_1_diff1 value: 77.19578272647183 - type: nauc_ndcg_at_1_max value: 62.05252035478773 - type: nauc_ndcg_at_1_std value: -20.790530940625267 - type: nauc_ndcg_at_20_diff1 value: 73.65300308228291 - type: nauc_ndcg_at_20_max value: 67.18353402731985 - type: nauc_ndcg_at_20_std value: -17.9240756389792 - type: nauc_ndcg_at_3_diff1 value: 73.73764900202292 - type: nauc_ndcg_at_3_max value: 67.60840957876889 - type: nauc_ndcg_at_3_std value: -17.962667543518933 - type: nauc_ndcg_at_5_diff1 value: 73.49040500302092 - type: nauc_ndcg_at_5_max value: 67.41251918514402 - type: nauc_ndcg_at_5_std value: -18.851877225955523 - type: nauc_precision_at_1000_diff1 value: -18.652906102973922 - type: nauc_precision_at_1000_max value: 2.1701672475574885 - type: nauc_precision_at_1000_std value: 61.713411950188835 - type: nauc_precision_at_100_diff1 value: 62.37565302288498 - type: nauc_precision_at_100_max value: 76.96921843049006 - type: nauc_precision_at_100_std value: 19.152009040219678 - type: nauc_precision_at_10_diff1 value: 68.14047344105212 - type: nauc_precision_at_10_max value: 77.7177273849099 - type: nauc_precision_at_10_std value: -9.124325941493698 - type: nauc_precision_at_1_diff1 value: 77.19578272647183 - type: nauc_precision_at_1_max value: 62.05252035478773 - type: nauc_precision_at_1_std value: -20.790530940625267 - type: nauc_precision_at_20_diff1 value: 65.38487456362745 - type: nauc_precision_at_20_max value: 74.61122933443669 - type: nauc_precision_at_20_std value: -8.129775929648341 - type: nauc_precision_at_3_diff1 value: 70.45937744142297 - type: nauc_precision_at_3_max value: 73.03004233073901 - type: nauc_precision_at_3_std value: -14.246554579025158 - type: nauc_precision_at_5_diff1 value: 69.02821772428955 - type: nauc_precision_at_5_max value: 73.52949774726446 - type: nauc_precision_at_5_std value: -16.355747231517757 - type: nauc_recall_at_1000_diff1 value: 35.804192824985755 - type: nauc_recall_at_1000_max value: 61.367785756485894 - type: nauc_recall_at_1000_std value: 54.01380822466869 - type: nauc_recall_at_100_diff1 value: 67.96210883597479 - type: nauc_recall_at_100_max value: 82.38124823732169 - type: nauc_recall_at_100_std value: 16.814922595309966 - type: nauc_recall_at_10_diff1 value: 68.21964459634341 - type: nauc_recall_at_10_max value: 77.68301934858845 - type: nauc_recall_at_10_std value: -9.430792913885066 - type: nauc_recall_at_1_diff1 value: 77.24970536833601 - type: nauc_recall_at_1_max value: 62.07820573048406 - type: nauc_recall_at_1_std value: -20.917086586335078 - type: nauc_recall_at_20_diff1 value: 66.60569906579487 - type: nauc_recall_at_20_max value: 75.66163186604354 - type: nauc_recall_at_20_std value: -9.09826205489828 - type: nauc_recall_at_3_diff1 value: 70.52323701841641 - type: nauc_recall_at_3_max value: 73.03478107411232 - type: nauc_recall_at_3_std value: -14.432325989967962 - type: nauc_recall_at_5_diff1 value: 69.08521261524373 - type: nauc_recall_at_5_max value: 73.51150270382094 - type: nauc_recall_at_5_std value: -16.569387503524368 - type: ndcg_at_1 value: 61.273999999999994 - type: ndcg_at_10 value: 73.90299999999999 - type: ndcg_at_100 value: 75.983 - type: ndcg_at_1000 value: 76.488 - type: ndcg_at_20 value: 74.921 - type: ndcg_at_3 value: 70.277 - type: ndcg_at_5 value: 72.172 - type: precision_at_1 value: 61.273999999999994 - type: precision_at_10 value: 8.641 - type: precision_at_100 value: 0.962 - type: precision_at_1000 value: 0.1 - type: precision_at_20 value: 4.524 - type: precision_at_3 value: 25.517 - type: precision_at_5 value: 16.223000000000003 - type: recall_at_1 value: 61.236000000000004 - type: recall_at_10 value: 86.37700000000001 - type: recall_at_100 value: 96.054 - type: recall_at_1000 value: 99.887 - type: recall_at_20 value: 90.398 - type: recall_at_3 value: 76.51299999999999 - type: recall_at_5 value: 81.07900000000001 - task: type: Retrieval dataset: name: MTEB MLQARetrieval (spa-spa) type: facebook/mlqa config: spa-spa split: test revision: 397ed406c1a7902140303e7faf60fff35b58d285 metrics: - type: main_score value: 68.632 - type: map_at_1 value: 57.046 - type: map_at_10 value: 64.869 - type: map_at_100 value: 65.384 - type: map_at_1000 value: 65.413 - type: map_at_20 value: 65.185 - type: map_at_3 value: 63.178 - type: map_at_5 value: 64.12 - type: mrr_at_1 value: 57.05579889544848 - type: mrr_at_10 value: 64.8806425382317 - type: mrr_at_100 value: 65.39469233244084 - type: mrr_at_1000 value: 65.42342199403159 - type: mrr_at_20 value: 65.19634815919534 - type: mrr_at_3 value: 63.18796419729591 - type: mrr_at_5 value: 64.13159398209874 - type: nauc_map_at_1000_diff1 value: 73.23803038674018 - type: nauc_map_at_1000_max value: 67.44156201421714 - type: nauc_map_at_1000_std value: -8.60143026450049 - type: nauc_map_at_100_diff1 value: 73.22575613034235 - type: nauc_map_at_100_max value: 67.44735143420195 - type: nauc_map_at_100_std value: -8.576905069492895 - type: nauc_map_at_10_diff1 value: 73.11950129610865 - type: nauc_map_at_10_max value: 67.45107232305055 - type: nauc_map_at_10_std value: -8.799837857015392 - type: nauc_map_at_1_diff1 value: 76.18354072047988 - type: nauc_map_at_1_max value: 65.03342186728786 - type: nauc_map_at_1_std value: -10.867650288695796 - type: nauc_map_at_20_diff1 value: 73.21570748770948 - type: nauc_map_at_20_max value: 67.50340321088724 - type: nauc_map_at_20_std value: -8.594057184944676 - type: nauc_map_at_3_diff1 value: 73.17239276163892 - type: nauc_map_at_3_max value: 67.06319504819103 - type: nauc_map_at_3_std value: -9.883216310270528 - type: nauc_map_at_5_diff1 value: 73.11913507367727 - type: nauc_map_at_5_max value: 67.27497019567078 - type: nauc_map_at_5_std value: -9.497714822103118 - type: nauc_mrr_at_1000_diff1 value: 73.22971233311306 - type: nauc_mrr_at_1000_max value: 67.42977229057223 - type: nauc_mrr_at_1000_std value: -8.550068702273297 - type: nauc_mrr_at_100_diff1 value: 73.21744467317815 - type: nauc_mrr_at_100_max value: 67.43557491068093 - type: nauc_mrr_at_100_std value: -8.52559275190607 - type: nauc_mrr_at_10_diff1 value: 73.11075619726137 - type: nauc_mrr_at_10_max value: 67.43889760205286 - type: nauc_mrr_at_10_std value: -8.74617232559183 - type: nauc_mrr_at_1_diff1 value: 76.17529975949547 - type: nauc_mrr_at_1_max value: 65.02401127001608 - type: nauc_mrr_at_1_std value: -10.817814457633952 - type: nauc_mrr_at_20_diff1 value: 73.20689275225138 - type: nauc_mrr_at_20_max value: 67.49111752272192 - type: nauc_mrr_at_20_std value: -8.539827528410353 - type: nauc_mrr_at_3_diff1 value: 73.16291729623958 - type: nauc_mrr_at_3_max value: 67.05300993427998 - type: nauc_mrr_at_3_std value: -9.827915885680811 - type: nauc_mrr_at_5_diff1 value: 73.11055686484109 - type: nauc_mrr_at_5_max value: 67.26299851089122 - type: nauc_mrr_at_5_std value: -9.445190276650903 - type: nauc_ndcg_at_1000_diff1 value: 72.58833638407177 - type: nauc_ndcg_at_1000_max value: 68.10447506371374 - type: nauc_ndcg_at_1000_std value: -6.910306241546282 - type: nauc_ndcg_at_100_diff1 value: 72.24524849631476 - type: nauc_ndcg_at_100_max value: 68.30659210081238 - type: nauc_ndcg_at_100_std value: -6.04305364268931 - type: nauc_ndcg_at_10_diff1 value: 71.87363502582961 - type: nauc_ndcg_at_10_max value: 68.5010009653693 - type: nauc_ndcg_at_10_std value: -7.021281296450588 - type: nauc_ndcg_at_1_diff1 value: 76.17529975949547 - type: nauc_ndcg_at_1_max value: 65.02401127001608 - type: nauc_ndcg_at_1_std value: -10.817814457633952 - type: nauc_ndcg_at_20_diff1 value: 72.21241010439327 - type: nauc_ndcg_at_20_max value: 68.71743274030551 - type: nauc_ndcg_at_20_std value: -6.186629577195946 - type: nauc_ndcg_at_3_diff1 value: 72.08204674794459 - type: nauc_ndcg_at_3_max value: 67.5958365046156 - type: nauc_ndcg_at_3_std value: -9.576418336610345 - type: nauc_ndcg_at_5_diff1 value: 71.93179095844508 - type: nauc_ndcg_at_5_max value: 68.01914639754217 - type: nauc_ndcg_at_5_std value: -8.833768332910777 - type: nauc_precision_at_1000_diff1 value: 63.0051360227489 - type: nauc_precision_at_1000_max value: 79.93532442313229 - type: nauc_precision_at_1000_std value: 52.869517607133254 - type: nauc_precision_at_100_diff1 value: 62.43301501857154 - type: nauc_precision_at_100_max value: 75.57280416668183 - type: nauc_precision_at_100_std value: 26.758300486132747 - type: nauc_precision_at_10_diff1 value: 66.29806375971134 - type: nauc_precision_at_10_max value: 73.40301413754797 - type: nauc_precision_at_10_std value: 1.9858547295235462 - type: nauc_precision_at_1_diff1 value: 76.17529975949547 - type: nauc_precision_at_1_max value: 65.02401127001608 - type: nauc_precision_at_1_std value: -10.817814457633952 - type: nauc_precision_at_20_diff1 value: 67.05111836051105 - type: nauc_precision_at_20_max value: 76.09783190824155 - type: nauc_precision_at_20_std value: 9.906010659515564 - type: nauc_precision_at_3_diff1 value: 68.44186679250453 - type: nauc_precision_at_3_max value: 69.30301351119388 - type: nauc_precision_at_3_std value: -8.566522518882348 - type: nauc_precision_at_5_diff1 value: 67.51737199297388 - type: nauc_precision_at_5_max value: 70.75887601590472 - type: nauc_precision_at_5_std value: -6.278983102710238 - type: nauc_recall_at_1000_diff1 value: 65.12360093170948 - type: nauc_recall_at_1000_max value: 82.60209843191132 - type: nauc_recall_at_1000_std value: 51.740179583368636 - type: nauc_recall_at_100_diff1 value: 62.82007697326819 - type: nauc_recall_at_100_max value: 76.04844844677562 - type: nauc_recall_at_100_std value: 26.4678415019248 - type: nauc_recall_at_10_diff1 value: 66.28557566848767 - type: nauc_recall_at_10_max value: 73.40302709828738 - type: nauc_recall_at_10_std value: 1.9224272854613582 - type: nauc_recall_at_1_diff1 value: 76.18354072047988 - type: nauc_recall_at_1_max value: 65.03342186728786 - type: nauc_recall_at_1_std value: -10.867650288695796 - type: nauc_recall_at_20_diff1 value: 67.03430451094992 - type: nauc_recall_at_20_max value: 76.09474005171319 - type: nauc_recall_at_20_std value: 9.815888637851074 - type: nauc_recall_at_3_diff1 value: 68.44411411344718 - type: nauc_recall_at_3_max value: 69.30502737137265 - type: nauc_recall_at_3_std value: -8.629526329714132 - type: nauc_recall_at_5_diff1 value: 67.51469265953514 - type: nauc_recall_at_5_max value: 70.76969893818111 - type: nauc_recall_at_5_std value: -6.325600167105444 - type: ndcg_at_1 value: 57.056 - type: ndcg_at_10 value: 68.632 - type: ndcg_at_100 value: 71.202 - type: ndcg_at_1000 value: 71.97099999999999 - type: ndcg_at_20 value: 69.785 - type: ndcg_at_3 value: 65.131 - type: ndcg_at_5 value: 66.834 - type: precision_at_1 value: 57.056 - type: precision_at_10 value: 8.044 - type: precision_at_100 value: 0.9259999999999999 - type: precision_at_1000 value: 0.099 - type: precision_at_20 value: 4.251 - type: precision_at_3 value: 23.589 - type: precision_at_5 value: 14.984 - type: recall_at_1 value: 57.046 - type: recall_at_10 value: 80.423 - type: recall_at_100 value: 92.582 - type: recall_at_1000 value: 98.638 - type: recall_at_20 value: 84.993 - type: recall_at_3 value: 70.758 - type: recall_at_5 value: 74.9 - task: type: Retrieval dataset: name: MTEB MLQARetrieval (spa-eng) type: facebook/mlqa config: spa-eng split: test revision: 397ed406c1a7902140303e7faf60fff35b58d285 metrics: - type: main_score value: 68.765 - type: map_at_1 value: 56.538999999999994 - type: map_at_10 value: 64.816 - type: map_at_100 value: 65.325 - type: map_at_1000 value: 65.352 - type: map_at_20 value: 65.113 - type: map_at_3 value: 62.934999999999995 - type: map_at_5 value: 64.063 - type: mrr_at_1 value: 56.539120502569965 - type: mrr_at_10 value: 64.81561556661505 - type: mrr_at_100 value: 65.32464238613954 - type: mrr_at_1000 value: 65.35206516602133 - type: mrr_at_20 value: 65.11270445292227 - type: mrr_at_3 value: 62.935465448315384 - type: mrr_at_5 value: 64.06339234723022 - type: nauc_map_at_1000_diff1 value: 73.20701050428072 - type: nauc_map_at_1000_max value: 67.32797480614404 - type: nauc_map_at_1000_std value: -6.211540626528362 - type: nauc_map_at_100_diff1 value: 73.19497683923063 - type: nauc_map_at_100_max value: 67.33392646467817 - type: nauc_map_at_100_std value: -6.196671563900051 - type: nauc_map_at_10_diff1 value: 73.16010547612956 - type: nauc_map_at_10_max value: 67.37793741307372 - type: nauc_map_at_10_std value: -6.3443240322521675 - type: nauc_map_at_1_diff1 value: 76.63696578575964 - type: nauc_map_at_1_max value: 65.08189618178105 - type: nauc_map_at_1_std value: -8.594195451782733 - type: nauc_map_at_20_diff1 value: 73.15233479381568 - type: nauc_map_at_20_max value: 67.3679607256072 - type: nauc_map_at_20_std value: -6.175928265286352 - type: nauc_map_at_3_diff1 value: 73.14853380980746 - type: nauc_map_at_3_max value: 67.10354198073468 - type: nauc_map_at_3_std value: -7.409679815529866 - type: nauc_map_at_5_diff1 value: 73.13425961877715 - type: nauc_map_at_5_max value: 67.22452899371224 - type: nauc_map_at_5_std value: -6.895257774506354 - type: nauc_mrr_at_1000_diff1 value: 73.20701050428072 - type: nauc_mrr_at_1000_max value: 67.32797480614404 - type: nauc_mrr_at_1000_std value: -6.211540626528362 - type: nauc_mrr_at_100_diff1 value: 73.19497683923063 - type: nauc_mrr_at_100_max value: 67.33392646467817 - type: nauc_mrr_at_100_std value: -6.196671563900051 - type: nauc_mrr_at_10_diff1 value: 73.16010547612956 - type: nauc_mrr_at_10_max value: 67.37793741307372 - type: nauc_mrr_at_10_std value: -6.3443240322521675 - type: nauc_mrr_at_1_diff1 value: 76.63696578575964 - type: nauc_mrr_at_1_max value: 65.08189618178105 - type: nauc_mrr_at_1_std value: -8.594195451782733 - type: nauc_mrr_at_20_diff1 value: 73.15233479381568 - type: nauc_mrr_at_20_max value: 67.3679607256072 - type: nauc_mrr_at_20_std value: -6.175928265286352 - type: nauc_mrr_at_3_diff1 value: 73.14853380980746 - type: nauc_mrr_at_3_max value: 67.10354198073468 - type: nauc_mrr_at_3_std value: -7.409679815529866 - type: nauc_mrr_at_5_diff1 value: 73.13425961877715 - type: nauc_mrr_at_5_max value: 67.22452899371224 - type: nauc_mrr_at_5_std value: -6.895257774506354 - type: nauc_ndcg_at_1000_diff1 value: 72.44364625096874 - type: nauc_ndcg_at_1000_max value: 67.93635761141552 - type: nauc_ndcg_at_1000_std value: -4.616429464350954 - type: nauc_ndcg_at_100_diff1 value: 72.11352383758482 - type: nauc_ndcg_at_100_max value: 68.1627312575955 - type: nauc_ndcg_at_100_std value: -3.894213672131282 - type: nauc_ndcg_at_10_diff1 value: 71.8526850770812 - type: nauc_ndcg_at_10_max value: 68.41366561888562 - type: nauc_ndcg_at_10_std value: -4.472146861145989 - type: nauc_ndcg_at_1_diff1 value: 76.63696578575964 - type: nauc_ndcg_at_1_max value: 65.08189618178105 - type: nauc_ndcg_at_1_std value: -8.594195451782733 - type: nauc_ndcg_at_20_diff1 value: 71.76464418138866 - type: nauc_ndcg_at_20_max value: 68.41174963313698 - type: nauc_ndcg_at_20_std value: -3.7449762037540157 - type: nauc_ndcg_at_3_diff1 value: 71.93808990683131 - type: nauc_ndcg_at_3_max value: 67.7010029507334 - type: nauc_ndcg_at_3_std value: -6.971858419379321 - type: nauc_ndcg_at_5_diff1 value: 71.8505224811326 - type: nauc_ndcg_at_5_max value: 67.97139549500251 - type: nauc_ndcg_at_5_std value: -5.958491308070017 - type: nauc_precision_at_1000_diff1 value: 62.20956180320043 - type: nauc_precision_at_1000_max value: 82.53412670611299 - type: nauc_precision_at_1000_std value: 55.57278124999575 - type: nauc_precision_at_100_diff1 value: 62.03792857023201 - type: nauc_precision_at_100_max value: 76.77130713424538 - type: nauc_precision_at_100_std value: 26.674102719959564 - type: nauc_precision_at_10_diff1 value: 65.89798055049931 - type: nauc_precision_at_10_max value: 73.41908620140674 - type: nauc_precision_at_10_std value: 5.21818573283179 - type: nauc_precision_at_1_diff1 value: 76.63696578575964 - type: nauc_precision_at_1_max value: 65.08189618178105 - type: nauc_precision_at_1_std value: -8.594195451782733 - type: nauc_precision_at_20_diff1 value: 63.734308542647355 - type: nauc_precision_at_20_max value: 74.69578825096144 - type: nauc_precision_at_20_std value: 12.627842502659162 - type: nauc_precision_at_3_diff1 value: 67.91189666671904 - type: nauc_precision_at_3_max value: 69.64986036783209 - type: nauc_precision_at_3_std value: -5.505669087429055 - type: nauc_precision_at_5_diff1 value: 67.01880006360248 - type: nauc_precision_at_5_max value: 70.78916423358686 - type: nauc_precision_at_5_std value: -2.2273742736401045 - type: nauc_recall_at_1000_diff1 value: 62.20956180319936 - type: nauc_recall_at_1000_max value: 82.53412670611287 - type: nauc_recall_at_1000_std value: 55.57278124999549 - type: nauc_recall_at_100_diff1 value: 62.03792857023208 - type: nauc_recall_at_100_max value: 76.77130713424577 - type: nauc_recall_at_100_std value: 26.67410271995973 - type: nauc_recall_at_10_diff1 value: 65.8979805504994 - type: nauc_recall_at_10_max value: 73.41908620140678 - type: nauc_recall_at_10_std value: 5.2181857328318655 - type: nauc_recall_at_1_diff1 value: 76.63696578575964 - type: nauc_recall_at_1_max value: 65.08189618178105 - type: nauc_recall_at_1_std value: -8.594195451782733 - type: nauc_recall_at_20_diff1 value: 63.734308542647334 - type: nauc_recall_at_20_max value: 74.69578825096123 - type: nauc_recall_at_20_std value: 12.627842502658982 - type: nauc_recall_at_3_diff1 value: 67.91189666671897 - type: nauc_recall_at_3_max value: 69.64986036783203 - type: nauc_recall_at_3_std value: -5.505669087428989 - type: nauc_recall_at_5_diff1 value: 67.01880006360243 - type: nauc_recall_at_5_max value: 70.78916423358686 - type: nauc_recall_at_5_std value: -2.227374273640135 - type: ndcg_at_1 value: 56.538999999999994 - type: ndcg_at_10 value: 68.765 - type: ndcg_at_100 value: 71.314 - type: ndcg_at_1000 value: 72.038 - type: ndcg_at_20 value: 69.828 - type: ndcg_at_3 value: 64.937 - type: ndcg_at_5 value: 66.956 - type: precision_at_1 value: 56.538999999999994 - type: precision_at_10 value: 8.113 - type: precision_at_100 value: 0.932 - type: precision_at_1000 value: 0.099 - type: precision_at_20 value: 4.265 - type: precision_at_3 value: 23.567 - type: precision_at_5 value: 15.115 - type: recall_at_1 value: 56.538999999999994 - type: recall_at_10 value: 81.135 - type: recall_at_100 value: 93.223 - type: recall_at_1000 value: 98.896 - type: recall_at_20 value: 85.304 - type: recall_at_3 value: 70.702 - type: recall_at_5 value: 75.576 - task: type: Retrieval dataset: name: MTEB MLQARetrieval (eng-deu) type: facebook/mlqa config: eng-deu split: test revision: 397ed406c1a7902140303e7faf60fff35b58d285 metrics: - type: main_score value: 69.298 - type: map_at_1 value: 58.553 - type: map_at_10 value: 65.769 - type: map_at_100 value: 66.298 - type: map_at_1000 value: 66.328 - type: map_at_20 value: 66.101 - type: map_at_3 value: 64.048 - type: map_at_5 value: 65.09 - type: mrr_at_1 value: 58.564148016840235 - type: mrr_at_10 value: 65.7685997066675 - type: mrr_at_100 value: 66.29874034432214 - type: mrr_at_1000 value: 66.32844979939088 - type: mrr_at_20 value: 66.10120513957821 - type: mrr_at_3 value: 64.04830489696437 - type: mrr_at_5 value: 65.08974074894746 - type: nauc_map_at_1000_diff1 value: 76.8409650183994 - type: nauc_map_at_1000_max value: 71.86367015521367 - type: nauc_map_at_1000_std value: -14.464881539957256 - type: nauc_map_at_100_diff1 value: 76.82536521842064 - type: nauc_map_at_100_max value: 71.86811127965429 - type: nauc_map_at_100_std value: -14.441105539722244 - type: nauc_map_at_10_diff1 value: 76.75522453447859 - type: nauc_map_at_10_max value: 71.87677500176706 - type: nauc_map_at_10_std value: -14.741331625103559 - type: nauc_map_at_1_diff1 value: 79.64060747740989 - type: nauc_map_at_1_max value: 69.84278563569617 - type: nauc_map_at_1_std value: -15.936904929655832 - type: nauc_map_at_20_diff1 value: 76.78894776059715 - type: nauc_map_at_20_max value: 71.89637938044827 - type: nauc_map_at_20_std value: -14.500564106990769 - type: nauc_map_at_3_diff1 value: 77.20562577450342 - type: nauc_map_at_3_max value: 71.80578229361525 - type: nauc_map_at_3_std value: -15.344134588512201 - type: nauc_map_at_5_diff1 value: 77.00480147367867 - type: nauc_map_at_5_max value: 71.98335924076163 - type: nauc_map_at_5_std value: -15.16537653041026 - type: nauc_mrr_at_1000_diff1 value: 76.84165367691193 - type: nauc_mrr_at_1000_max value: 71.8642679499795 - type: nauc_mrr_at_1000_std value: -14.461717954593158 - type: nauc_mrr_at_100_diff1 value: 76.8263363557998 - type: nauc_mrr_at_100_max value: 71.86874522368626 - type: nauc_mrr_at_100_std value: -14.437105168707426 - type: nauc_mrr_at_10_diff1 value: 76.75522453447859 - type: nauc_mrr_at_10_max value: 71.87677500176706 - type: nauc_mrr_at_10_std value: -14.741331625103559 - type: nauc_mrr_at_1_diff1 value: 79.65642669321981 - type: nauc_mrr_at_1_max value: 69.89135358784799 - type: nauc_mrr_at_1_std value: -15.919357002229589 - type: nauc_mrr_at_20_diff1 value: 76.78883171270601 - type: nauc_mrr_at_20_max value: 71.89806887245291 - type: nauc_mrr_at_20_std value: -14.497139746907905 - type: nauc_mrr_at_3_diff1 value: 77.20562577450342 - type: nauc_mrr_at_3_max value: 71.80578229361525 - type: nauc_mrr_at_3_std value: -15.344134588512201 - type: nauc_mrr_at_5_diff1 value: 77.00480147367867 - type: nauc_mrr_at_5_max value: 71.98335924076163 - type: nauc_mrr_at_5_std value: -15.16537653041026 - type: nauc_ndcg_at_1000_diff1 value: 76.07802417817047 - type: nauc_ndcg_at_1000_max value: 72.31792804426776 - type: nauc_ndcg_at_1000_std value: -13.049160715132244 - type: nauc_ndcg_at_100_diff1 value: 75.63343849116544 - type: nauc_ndcg_at_100_max value: 72.48362076101817 - type: nauc_ndcg_at_100_std value: -12.089600993516777 - type: nauc_ndcg_at_10_diff1 value: 75.23387929929208 - type: nauc_ndcg_at_10_max value: 72.51436288271807 - type: nauc_ndcg_at_10_std value: -13.624132103038104 - type: nauc_ndcg_at_1_diff1 value: 79.65642669321981 - type: nauc_ndcg_at_1_max value: 69.89135358784799 - type: nauc_ndcg_at_1_std value: -15.919357002229589 - type: nauc_ndcg_at_20_diff1 value: 75.32926047656296 - type: nauc_ndcg_at_20_max value: 72.61254165918145 - type: nauc_ndcg_at_20_std value: -12.683157599238701 - type: nauc_ndcg_at_3_diff1 value: 76.3089337665469 - type: nauc_ndcg_at_3_max value: 72.40014674426054 - type: nauc_ndcg_at_3_std value: -15.08624226353458 - type: nauc_ndcg_at_5_diff1 value: 75.88857331641834 - type: nauc_ndcg_at_5_max value: 72.7719386827224 - type: nauc_ndcg_at_5_std value: -14.70546521089236 - type: nauc_precision_at_1000_diff1 value: 59.66563879069911 - type: nauc_precision_at_1000_max value: 74.57123562956772 - type: nauc_precision_at_1000_std value: 58.61396866718965 - type: nauc_precision_at_100_diff1 value: 62.8695896550042 - type: nauc_precision_at_100_max value: 77.81408796785 - type: nauc_precision_at_100_std value: 23.819735672317826 - type: nauc_precision_at_10_diff1 value: 68.08051625224569 - type: nauc_precision_at_10_max value: 75.14432336036869 - type: nauc_precision_at_10_std value: -7.97602345252735 - type: nauc_precision_at_1_diff1 value: 79.65642669321981 - type: nauc_precision_at_1_max value: 69.89135358784799 - type: nauc_precision_at_1_std value: -15.919357002229589 - type: nauc_precision_at_20_diff1 value: 66.7168005185165 - type: nauc_precision_at_20_max value: 76.58522761697147 - type: nauc_precision_at_20_std value: -0.17923428317323292 - type: nauc_precision_at_3_diff1 value: 73.23394851561207 - type: nauc_precision_at_3_max value: 74.32517846819215 - type: nauc_precision_at_3_std value: -14.142301336188348 - type: nauc_precision_at_5_diff1 value: 71.5666882547012 - type: nauc_precision_at_5_max value: 75.71098205440033 - type: nauc_precision_at_5_std value: -12.808362513638052 - type: nauc_recall_at_1000_diff1 value: 71.73736112325805 - type: nauc_recall_at_1000_max value: 86.70743436225898 - type: nauc_recall_at_1000_std value: 54.45802578371167 - type: nauc_recall_at_100_diff1 value: 64.07053861428128 - type: nauc_recall_at_100_max value: 78.8348308099261 - type: nauc_recall_at_100_std value: 22.72263677785103 - type: nauc_recall_at_10_diff1 value: 68.20272901407903 - type: nauc_recall_at_10_max value: 75.16315335381938 - type: nauc_recall_at_10_std value: -8.060716748913386 - type: nauc_recall_at_1_diff1 value: 79.64060747740989 - type: nauc_recall_at_1_max value: 69.84278563569617 - type: nauc_recall_at_1_std value: -15.936904929655832 - type: nauc_recall_at_20_diff1 value: 66.88206981973654 - type: nauc_recall_at_20_max value: 76.54824917595687 - type: nauc_recall_at_20_std value: -0.40294589316962287 - type: nauc_recall_at_3_diff1 value: 73.33076087258938 - type: nauc_recall_at_3_max value: 74.33763112508771 - type: nauc_recall_at_3_std value: -14.213355414905399 - type: nauc_recall_at_5_diff1 value: 71.67487623469464 - type: nauc_recall_at_5_max value: 75.72770292516316 - type: nauc_recall_at_5_std value: -12.887572274644818 - type: ndcg_at_1 value: 58.56400000000001 - type: ndcg_at_10 value: 69.298 - type: ndcg_at_100 value: 71.95899999999999 - type: ndcg_at_1000 value: 72.735 - type: ndcg_at_20 value: 70.50699999999999 - type: ndcg_at_3 value: 65.81700000000001 - type: ndcg_at_5 value: 67.681 - type: precision_at_1 value: 58.56400000000001 - type: precision_at_10 value: 8.039 - type: precision_at_100 value: 0.931 - type: precision_at_1000 value: 0.099 - type: precision_at_20 value: 4.259 - type: precision_at_3 value: 23.65 - type: precision_at_5 value: 15.09 - type: recall_at_1 value: 58.553 - type: recall_at_10 value: 80.368 - type: recall_at_100 value: 93.013 - type: recall_at_1000 value: 99.092 - type: recall_at_20 value: 85.143 - type: recall_at_3 value: 70.928 - type: recall_at_5 value: 75.42699999999999 - task: type: Retrieval dataset: name: MTEB MLQARetrieval (eng-spa) type: facebook/mlqa config: eng-spa split: test revision: 397ed406c1a7902140303e7faf60fff35b58d285 metrics: - type: main_score value: 66.374 - type: map_at_1 value: 55.494 - type: map_at_10 value: 62.763999999999996 - type: map_at_100 value: 63.33 - type: map_at_1000 value: 63.36000000000001 - type: map_at_20 value: 63.104000000000006 - type: map_at_3 value: 61.065000000000005 - type: map_at_5 value: 62.053000000000004 - type: mrr_at_1 value: 55.49419158255571 - type: mrr_at_10 value: 62.765195140457095 - type: mrr_at_100 value: 63.33083349354529 - type: mrr_at_1000 value: 63.3611897014839 - type: mrr_at_20 value: 63.10543590095977 - type: mrr_at_3 value: 61.06455913159412 - type: mrr_at_5 value: 62.052942296705474 - type: nauc_map_at_1000_diff1 value: 75.04200018088618 - type: nauc_map_at_1000_max value: 70.49937782771909 - type: nauc_map_at_1000_std value: -5.257206317083184 - type: nauc_map_at_100_diff1 value: 75.02786834256312 - type: nauc_map_at_100_max value: 70.5016476500189 - type: nauc_map_at_100_std value: -5.228770832077681 - type: nauc_map_at_10_diff1 value: 74.9626552701647 - type: nauc_map_at_10_max value: 70.56253732243214 - type: nauc_map_at_10_std value: -5.359037281768563 - type: nauc_map_at_1_diff1 value: 78.46858307815857 - type: nauc_map_at_1_max value: 69.03908373759435 - type: nauc_map_at_1_std value: -7.479412070736642 - type: nauc_map_at_20_diff1 value: 74.98121458084796 - type: nauc_map_at_20_max value: 70.51885366822565 - type: nauc_map_at_20_std value: -5.286051287133815 - type: nauc_map_at_3_diff1 value: 75.36078454383373 - type: nauc_map_at_3_max value: 70.34997144546014 - type: nauc_map_at_3_std value: -6.663517224039184 - type: nauc_map_at_5_diff1 value: 75.0274512828238 - type: nauc_map_at_5_max value: 70.45292551591874 - type: nauc_map_at_5_std value: -6.029224488640147 - type: nauc_mrr_at_1000_diff1 value: 75.04018768469983 - type: nauc_mrr_at_1000_max value: 70.49855509132635 - type: nauc_mrr_at_1000_std value: -5.258929961409948 - type: nauc_mrr_at_100_diff1 value: 75.02605732810112 - type: nauc_mrr_at_100_max value: 70.50082584929103 - type: nauc_mrr_at_100_std value: -5.2304917988542154 - type: nauc_mrr_at_10_diff1 value: 74.96079080525713 - type: nauc_mrr_at_10_max value: 70.56167294920391 - type: nauc_mrr_at_10_std value: -5.360650630655072 - type: nauc_mrr_at_1_diff1 value: 78.46858307815857 - type: nauc_mrr_at_1_max value: 69.03908373759435 - type: nauc_mrr_at_1_std value: -7.479412070736642 - type: nauc_mrr_at_20_diff1 value: 74.97939804960517 - type: nauc_mrr_at_20_max value: 70.51804078965411 - type: nauc_mrr_at_20_std value: -5.287681954889177 - type: nauc_mrr_at_3_diff1 value: 75.36078454383373 - type: nauc_mrr_at_3_max value: 70.34997144546014 - type: nauc_mrr_at_3_std value: -6.663517224039184 - type: nauc_mrr_at_5_diff1 value: 75.0274512828238 - type: nauc_mrr_at_5_max value: 70.45292551591874 - type: nauc_mrr_at_5_std value: -6.029224488640147 - type: nauc_ndcg_at_1000_diff1 value: 74.22106834748942 - type: nauc_ndcg_at_1000_max value: 70.93625922934912 - type: nauc_ndcg_at_1000_std value: -3.4878399005946017 - type: nauc_ndcg_at_100_diff1 value: 73.74068883646733 - type: nauc_ndcg_at_100_max value: 71.02357018347472 - type: nauc_ndcg_at_100_std value: -2.462293184201324 - type: nauc_ndcg_at_10_diff1 value: 73.40967965536565 - type: nauc_ndcg_at_10_max value: 71.29379828672067 - type: nauc_ndcg_at_10_std value: -3.295547756383108 - type: nauc_ndcg_at_1_diff1 value: 78.46858307815857 - type: nauc_ndcg_at_1_max value: 69.03908373759435 - type: nauc_ndcg_at_1_std value: -7.479412070736642 - type: nauc_ndcg_at_20_diff1 value: 73.45790057693699 - type: nauc_ndcg_at_20_max value: 71.16598432419126 - type: nauc_ndcg_at_20_std value: -2.962877157646097 - type: nauc_ndcg_at_3_diff1 value: 74.30696173964847 - type: nauc_ndcg_at_3_max value: 70.79878978459556 - type: nauc_ndcg_at_3_std value: -6.297286578628299 - type: nauc_ndcg_at_5_diff1 value: 73.65858211199816 - type: nauc_ndcg_at_5_max value: 71.01122417463776 - type: nauc_ndcg_at_5_std value: -5.075990882646765 - type: nauc_precision_at_1000_diff1 value: 68.71065091972568 - type: nauc_precision_at_1000_max value: 81.38173585624777 - type: nauc_precision_at_1000_std value: 58.035497889797895 - type: nauc_precision_at_100_diff1 value: 61.93634256957017 - type: nauc_precision_at_100_max value: 74.84191770203093 - type: nauc_precision_at_100_std value: 31.3325983123831 - type: nauc_precision_at_10_diff1 value: 66.68247010944937 - type: nauc_precision_at_10_max value: 74.48773524654571 - type: nauc_precision_at_10_std value: 6.560421880785153 - type: nauc_precision_at_1_diff1 value: 78.46858307815857 - type: nauc_precision_at_1_max value: 69.03908373759435 - type: nauc_precision_at_1_std value: -7.479412070736642 - type: nauc_precision_at_20_diff1 value: 65.51592872758067 - type: nauc_precision_at_20_max value: 74.50684066823096 - type: nauc_precision_at_20_std value: 10.830479877698208 - type: nauc_precision_at_3_diff1 value: 70.89587884861588 - type: nauc_precision_at_3_max value: 72.25310558370424 - type: nauc_precision_at_3_std value: -5.0796100900749765 - type: nauc_precision_at_5_diff1 value: 68.71885719845497 - type: nauc_precision_at_5_max value: 73.02601751485672 - type: nauc_precision_at_5_std value: -1.4382681421626857 - type: nauc_recall_at_1000_diff1 value: 71.95510299834734 - type: nauc_recall_at_1000_max value: 84.03647166092985 - type: nauc_recall_at_1000_std value: 56.87490604776847 - type: nauc_recall_at_100_diff1 value: 62.446624924715955 - type: nauc_recall_at_100_max value: 75.25666892464507 - type: nauc_recall_at_100_std value: 31.068789794554686 - type: nauc_recall_at_10_diff1 value: 66.70676336328988 - type: nauc_recall_at_10_max value: 74.4963699656397 - type: nauc_recall_at_10_std value: 6.57498399706916 - type: nauc_recall_at_1_diff1 value: 78.46858307815857 - type: nauc_recall_at_1_max value: 69.03908373759435 - type: nauc_recall_at_1_std value: -7.479412070736642 - type: nauc_recall_at_20_diff1 value: 65.54082767974772 - type: nauc_recall_at_20_max value: 74.5111529838772 - type: nauc_recall_at_20_std value: 10.84574829707354 - type: nauc_recall_at_3_diff1 value: 70.89587884861584 - type: nauc_recall_at_3_max value: 72.25310558370421 - type: nauc_recall_at_3_std value: -5.07961009007491 - type: nauc_recall_at_5_diff1 value: 68.71885719845501 - type: nauc_recall_at_5_max value: 73.02601751485666 - type: nauc_recall_at_5_std value: -1.4382681421626995 - type: ndcg_at_1 value: 55.494 - type: ndcg_at_10 value: 66.374 - type: ndcg_at_100 value: 69.254 - type: ndcg_at_1000 value: 70.136 - type: ndcg_at_20 value: 67.599 - type: ndcg_at_3 value: 62.863 - type: ndcg_at_5 value: 64.644 - type: precision_at_1 value: 55.494 - type: precision_at_10 value: 7.776 - type: precision_at_100 value: 0.9159999999999999 - type: precision_at_1000 value: 0.099 - type: precision_at_20 value: 4.1290000000000004 - type: precision_at_3 value: 22.688 - type: precision_at_5 value: 14.477 - type: recall_at_1 value: 55.494 - type: recall_at_10 value: 77.747 - type: recall_at_100 value: 91.535 - type: recall_at_1000 value: 98.619 - type: recall_at_20 value: 82.565 - type: recall_at_3 value: 68.063 - type: recall_at_5 value: 72.386 - task: type: Retrieval dataset: name: MTEB MLQARetrieval (eng-eng) type: facebook/mlqa config: eng-eng split: test revision: 397ed406c1a7902140303e7faf60fff35b58d285 metrics: - type: main_score value: 64.723 - type: map_at_1 value: 54.308 - type: map_at_10 value: 61.26200000000001 - type: map_at_100 value: 61.82299999999999 - type: map_at_1000 value: 61.856 - type: map_at_20 value: 61.575 - type: map_at_3 value: 59.565 - type: map_at_5 value: 60.561 - type: mrr_at_1 value: 54.31704368848212 - type: mrr_at_10 value: 61.26520216098834 - type: mrr_at_100 value: 61.82588321127103 - type: mrr_at_1000 value: 61.859333030574334 - type: mrr_at_20 value: 61.57780339921337 - type: mrr_at_3 value: 59.569446842801646 - type: mrr_at_5 value: 60.56323029989004 - type: nauc_map_at_1000_diff1 value: 74.21413722468635 - type: nauc_map_at_1000_max value: 70.41741227882316 - type: nauc_map_at_1000_std value: -2.5438707209848506 - type: nauc_map_at_100_diff1 value: 74.19812315947975 - type: nauc_map_at_100_max value: 70.41589146728445 - type: nauc_map_at_100_std value: -2.5336117059429553 - type: nauc_map_at_10_diff1 value: 74.21810561152937 - type: nauc_map_at_10_max value: 70.48816115200171 - type: nauc_map_at_10_std value: -2.7443834681406734 - type: nauc_map_at_1_diff1 value: 77.69378738778958 - type: nauc_map_at_1_max value: 68.64652310701173 - type: nauc_map_at_1_std value: -4.667071946448379 - type: nauc_map_at_20_diff1 value: 74.16105697562438 - type: nauc_map_at_20_max value: 70.42491994631179 - type: nauc_map_at_20_std value: -2.6070416022440472 - type: nauc_map_at_3_diff1 value: 74.60449392878863 - type: nauc_map_at_3_max value: 70.39888609914269 - type: nauc_map_at_3_std value: -3.5401151125723986 - type: nauc_map_at_5_diff1 value: 74.2423420992663 - type: nauc_map_at_5_max value: 70.36574501826757 - type: nauc_map_at_5_std value: -3.2707393116898964 - type: nauc_mrr_at_1000_diff1 value: 74.21029843731323 - type: nauc_mrr_at_1000_max value: 70.43020492688913 - type: nauc_mrr_at_1000_std value: -2.526895582202081 - type: nauc_mrr_at_100_diff1 value: 74.19440960479243 - type: nauc_mrr_at_100_max value: 70.4288998824232 - type: nauc_mrr_at_100_std value: -2.5160929945118107 - type: nauc_mrr_at_10_diff1 value: 74.2141357266166 - type: nauc_mrr_at_10_max value: 70.5005683347807 - type: nauc_mrr_at_10_std value: -2.727154557882168 - type: nauc_mrr_at_1_diff1 value: 77.69891248239793 - type: nauc_mrr_at_1_max value: 68.68255231164922 - type: nauc_mrr_at_1_std value: -4.630226727154317 - type: nauc_mrr_at_20_diff1 value: 74.15705434409723 - type: nauc_mrr_at_20_max value: 70.43741835972747 - type: nauc_mrr_at_20_std value: -2.5896756472464495 - type: nauc_mrr_at_3_diff1 value: 74.5981844349412 - type: nauc_mrr_at_3_max value: 70.41834937080564 - type: nauc_mrr_at_3_std value: -3.5161656408031163 - type: nauc_mrr_at_5_diff1 value: 74.23847535424844 - type: nauc_mrr_at_5_max value: 70.37763810013656 - type: nauc_mrr_at_5_std value: -3.2560955164581733 - type: nauc_ndcg_at_1000_diff1 value: 73.20994496725493 - type: nauc_ndcg_at_1000_max value: 70.8903016277125 - type: nauc_ndcg_at_1000_std value: -0.625772298462309 - type: nauc_ndcg_at_100_diff1 value: 72.6847141682645 - type: nauc_ndcg_at_100_max value: 70.86564422034162 - type: nauc_ndcg_at_100_std value: -0.07195786766326141 - type: nauc_ndcg_at_10_diff1 value: 72.78806493754281 - type: nauc_ndcg_at_10_max value: 71.21957067926769 - type: nauc_ndcg_at_10_std value: -1.2760418313382227 - type: nauc_ndcg_at_1_diff1 value: 77.69891248239793 - type: nauc_ndcg_at_1_max value: 68.68255231164922 - type: nauc_ndcg_at_1_std value: -4.630226727154317 - type: nauc_ndcg_at_20_diff1 value: 72.52082440882546 - type: nauc_ndcg_at_20_max value: 70.98185004796734 - type: nauc_ndcg_at_20_std value: -0.6908280874815464 - type: nauc_ndcg_at_3_diff1 value: 73.59870660843939 - type: nauc_ndcg_at_3_max value: 70.94391957288654 - type: nauc_ndcg_at_3_std value: -3.147723179140428 - type: nauc_ndcg_at_5_diff1 value: 72.90122868193457 - type: nauc_ndcg_at_5_max value: 70.89376368965165 - type: nauc_ndcg_at_5_std value: -2.6451807385626744 - type: nauc_precision_at_1000_diff1 value: 58.14737201864067 - type: nauc_precision_at_1000_max value: 78.79011251144826 - type: nauc_precision_at_1000_std value: 59.98985420476577 - type: nauc_precision_at_100_diff1 value: 59.21069121644552 - type: nauc_precision_at_100_max value: 73.00557835912306 - type: nauc_precision_at_100_std value: 26.85027406282173 - type: nauc_precision_at_10_diff1 value: 66.8760831023675 - type: nauc_precision_at_10_max value: 74.21167950452596 - type: nauc_precision_at_10_std value: 5.453652499335947 - type: nauc_precision_at_1_diff1 value: 77.69891248239793 - type: nauc_precision_at_1_max value: 68.68255231164922 - type: nauc_precision_at_1_std value: -4.630226727154317 - type: nauc_precision_at_20_diff1 value: 64.3118559132602 - type: nauc_precision_at_20_max value: 73.33078184673825 - type: nauc_precision_at_20_std value: 9.993299523049402 - type: nauc_precision_at_3_diff1 value: 70.38667185155593 - type: nauc_precision_at_3_max value: 72.66495006030951 - type: nauc_precision_at_3_std value: -1.8532839591326276 - type: nauc_precision_at_5_diff1 value: 68.12161337583686 - type: nauc_precision_at_5_max value: 72.65644960375046 - type: nauc_precision_at_5_std value: -0.33317164167012875 - type: nauc_recall_at_1000_diff1 value: 61.63204394739985 - type: nauc_recall_at_1000_max value: 81.77241537319897 - type: nauc_recall_at_1000_std value: 58.44841544062308 - type: nauc_recall_at_100_diff1 value: 59.72072697224705 - type: nauc_recall_at_100_max value: 73.28519507061553 - type: nauc_recall_at_100_std value: 26.27318390763456 - type: nauc_recall_at_10_diff1 value: 66.9757135465418 - type: nauc_recall_at_10_max value: 74.21919493374149 - type: nauc_recall_at_10_std value: 5.323369605377166 - type: nauc_recall_at_1_diff1 value: 77.69378738778958 - type: nauc_recall_at_1_max value: 68.64652310701173 - type: nauc_recall_at_1_std value: -4.667071946448379 - type: nauc_recall_at_20_diff1 value: 64.42290081731899 - type: nauc_recall_at_20_max value: 73.3358289439033 - type: nauc_recall_at_20_std value: 9.846598361586073 - type: nauc_recall_at_3_diff1 value: 70.41211290964785 - type: nauc_recall_at_3_max value: 72.64451776775402 - type: nauc_recall_at_3_std value: -1.916280959835826 - type: nauc_recall_at_5_diff1 value: 68.20695272727916 - type: nauc_recall_at_5_max value: 72.66404224006101 - type: nauc_recall_at_5_std value: -0.431125323007886 - type: ndcg_at_1 value: 54.31700000000001 - type: ndcg_at_10 value: 64.723 - type: ndcg_at_100 value: 67.648 - type: ndcg_at_1000 value: 68.619 - type: ndcg_at_20 value: 65.85499999999999 - type: ndcg_at_3 value: 61.244 - type: ndcg_at_5 value: 63.038000000000004 - type: precision_at_1 value: 54.31700000000001 - type: precision_at_10 value: 7.564 - type: precision_at_100 value: 0.898 - type: precision_at_1000 value: 0.098 - type: precision_at_20 value: 4.005 - type: precision_at_3 value: 22.034000000000002 - type: precision_at_5 value: 14.093 - type: recall_at_1 value: 54.308 - type: recall_at_10 value: 75.622 - type: recall_at_100 value: 89.744 - type: recall_at_1000 value: 97.539 - type: recall_at_20 value: 80.085 - type: recall_at_3 value: 66.09 - type: recall_at_5 value: 70.446 - task: type: Clustering dataset: name: MTEB MLSUMClusteringP2P (de) type: reciTAL/mlsum config: de split: test revision: b5d54f8f3b61ae17845046286940f03c6bc79bc7 metrics: - type: main_score value: 41.267647761702854 - type: v_measure value: 41.267647761702854 - type: v_measure_std value: 10.93390895077248 - type: main_score value: 40.07927325071353 - type: v_measure value: 40.07927325071353 - type: v_measure_std value: 9.296680835266145 - task: type: Clustering dataset: name: MTEB MLSUMClusteringP2P (fr) type: reciTAL/mlsum config: fr split: test revision: b5d54f8f3b61ae17845046286940f03c6bc79bc7 metrics: - type: main_score value: 44.68714862333979 - type: v_measure value: 44.68714862333979 - type: v_measure_std value: 1.811036989797814 - type: main_score value: 44.88484854069901 - type: v_measure value: 44.88484854069901 - type: v_measure_std value: 2.3704247819781843 - task: type: Clustering dataset: name: MTEB MLSUMClusteringP2P (ru) type: reciTAL/mlsum config: ru split: test revision: b5d54f8f3b61ae17845046286940f03c6bc79bc7 metrics: - type: main_score value: 41.92518785753813 - type: v_measure value: 41.92518785753813 - type: v_measure_std value: 5.9356661900220775 - type: main_score value: 43.97657450929179 - type: v_measure value: 43.97657450929179 - type: v_measure_std value: 6.087547931333613 - task: type: Clustering dataset: name: MTEB MLSUMClusteringP2P (es) type: reciTAL/mlsum config: es split: test revision: b5d54f8f3b61ae17845046286940f03c6bc79bc7 metrics: - type: main_score value: 48.69875719812033 - type: v_measure value: 48.69875719812033 - type: v_measure_std value: 1.204253881950113 - type: main_score value: 48.41108671948728 - type: v_measure value: 48.41108671948728 - type: v_measure_std value: 1.3848320630151243 - task: type: Reranking dataset: name: MTEB MMarcoReranking (default) type: C-MTEB/Mmarco-reranking config: default split: dev revision: 8e0c766dbe9e16e1d221116a3f36795fbade07f6 metrics: - type: map value: 21.050447576170395 - type: mrr value: 20.201984126984126 - type: main_score value: 21.050447576170395 - task: type: Retrieval dataset: name: MTEB MMarcoRetrieval (default) type: C-MTEB/MMarcoRetrieval config: default split: dev revision: 539bbde593d947e2a124ba72651aafc09eb33fc2 metrics: - type: main_score value: 79.687 - type: map_at_1 value: 66.872 - type: map_at_10 value: 75.949 - type: map_at_100 value: 76.25 - type: map_at_1000 value: 76.259 - type: map_at_20 value: 76.145 - type: map_at_3 value: 74.01299999999999 - type: map_at_5 value: 75.232 - type: mrr_at_1 value: 69.18338108882521 - type: mrr_at_10 value: 76.5424227952881 - type: mrr_at_100 value: 76.8019342792628 - type: mrr_at_1000 value: 76.81002278342808 - type: mrr_at_20 value: 76.7115234815896 - type: mrr_at_3 value: 74.83046800382044 - type: mrr_at_5 value: 75.88490926456515 - type: nauc_map_at_1000_diff1 value: 78.06933310424179 - type: nauc_map_at_1000_max value: 49.392948209665896 - type: nauc_map_at_1000_std value: -15.126109322591166 - type: nauc_map_at_100_diff1 value: 78.06612779298378 - type: nauc_map_at_100_max value: 49.40761618630397 - type: nauc_map_at_100_std value: -15.099282408159349 - type: nauc_map_at_10_diff1 value: 77.94565685470538 - type: nauc_map_at_10_max value: 49.50559610363201 - type: nauc_map_at_10_std value: -15.182130695916355 - type: nauc_map_at_1_diff1 value: 79.84814509858211 - type: nauc_map_at_1_max value: 40.78978466656547 - type: nauc_map_at_1_std value: -19.96189264026715 - type: nauc_map_at_20_diff1 value: 78.03597839981245 - type: nauc_map_at_20_max value: 49.49477427223376 - type: nauc_map_at_20_std value: -15.084990000838378 - type: nauc_map_at_3_diff1 value: 78.0637014655507 - type: nauc_map_at_3_max value: 48.63214001973341 - type: nauc_map_at_3_std value: -17.093950563306596 - type: nauc_map_at_5_diff1 value: 77.94068229240348 - type: nauc_map_at_5_max value: 49.38930719689204 - type: nauc_map_at_5_std value: -15.9919454201954 - type: nauc_mrr_at_1000_diff1 value: 78.34582398092816 - type: nauc_mrr_at_1000_max value: 49.623566992784156 - type: nauc_mrr_at_1000_std value: -14.381347765493265 - type: nauc_mrr_at_100_diff1 value: 78.3429966714221 - type: nauc_mrr_at_100_max value: 49.63684922240546 - type: nauc_mrr_at_100_std value: -14.354914066301236 - type: nauc_mrr_at_10_diff1 value: 78.2208070219624 - type: nauc_mrr_at_10_max value: 49.77720536573364 - type: nauc_mrr_at_10_std value: -14.316233764741812 - type: nauc_mrr_at_1_diff1 value: 80.22305496572142 - type: nauc_mrr_at_1_max value: 44.30231210192536 - type: nauc_mrr_at_1_std value: -18.942549914934492 - type: nauc_mrr_at_20_diff1 value: 78.31006724240147 - type: nauc_mrr_at_20_max value: 49.72338465276142 - type: nauc_mrr_at_20_std value: -14.30722621948953 - type: nauc_mrr_at_3_diff1 value: 78.39832634634523 - type: nauc_mrr_at_3_max value: 49.24985961036677 - type: nauc_mrr_at_3_std value: -15.966286866763191 - type: nauc_mrr_at_5_diff1 value: 78.2406507247798 - type: nauc_mrr_at_5_max value: 49.71276359754787 - type: nauc_mrr_at_5_std value: -14.979526226149698 - type: nauc_ndcg_at_1000_diff1 value: 77.74892471071016 - type: nauc_ndcg_at_1000_max value: 51.11543344053061 - type: nauc_ndcg_at_1000_std value: -12.208878737005096 - type: nauc_ndcg_at_100_diff1 value: 77.67462502211228 - type: nauc_ndcg_at_100_max value: 51.593977338939034 - type: nauc_ndcg_at_100_std value: -11.312126179513802 - type: nauc_ndcg_at_10_diff1 value: 77.0571291760012 - type: nauc_ndcg_at_10_max value: 52.35435572808972 - type: nauc_ndcg_at_10_std value: -11.33242546164059 - type: nauc_ndcg_at_1_diff1 value: 80.22305496572142 - type: nauc_ndcg_at_1_max value: 44.30231210192536 - type: nauc_ndcg_at_1_std value: -18.942549914934492 - type: nauc_ndcg_at_20_diff1 value: 77.4141216117471 - type: nauc_ndcg_at_20_max value: 52.340600871365375 - type: nauc_ndcg_at_20_std value: -10.989010161550912 - type: nauc_ndcg_at_3_diff1 value: 77.43971989259062 - type: nauc_ndcg_at_3_max value: 50.59251358320663 - type: nauc_ndcg_at_3_std value: -15.59337960636058 - type: nauc_ndcg_at_5_diff1 value: 77.12174287031847 - type: nauc_ndcg_at_5_max value: 51.97108510288907 - type: nauc_ndcg_at_5_std value: -13.474902612427167 - type: nauc_precision_at_1000_diff1 value: -19.36793534929367 - type: nauc_precision_at_1000_max value: 11.803383262344036 - type: nauc_precision_at_1000_std value: 24.304436015177046 - type: nauc_precision_at_100_diff1 value: -6.273790806909921 - type: nauc_precision_at_100_max value: 23.372606271300747 - type: nauc_precision_at_100_std value: 29.085768971612342 - type: nauc_precision_at_10_diff1 value: 21.67045907336595 - type: nauc_precision_at_10_max value: 41.68948432407223 - type: nauc_precision_at_10_std value: 17.837055074458092 - type: nauc_precision_at_1_diff1 value: 80.22305496572142 - type: nauc_precision_at_1_max value: 44.30231210192536 - type: nauc_precision_at_1_std value: -18.942549914934492 - type: nauc_precision_at_20_diff1 value: 12.577671896684803 - type: nauc_precision_at_20_max value: 37.44944702246691 - type: nauc_precision_at_20_std value: 23.635897665206087 - type: nauc_precision_at_3_diff1 value: 47.165335112814056 - type: nauc_precision_at_3_max value: 47.0458691263379 - type: nauc_precision_at_3_std value: -3.3181861146890217 - type: nauc_precision_at_5_diff1 value: 35.406205343514806 - type: nauc_precision_at_5_max value: 45.56549449285401 - type: nauc_precision_at_5_std value: 5.612378074562386 - type: nauc_recall_at_1000_diff1 value: 72.32762520815842 - type: nauc_recall_at_1000_max value: 85.64979256307343 - type: nauc_recall_at_1000_std value: 73.61925297037476 - type: nauc_recall_at_100_diff1 value: 72.31946328709962 - type: nauc_recall_at_100_max value: 83.76576070068353 - type: nauc_recall_at_100_std value: 57.39376538662535 - type: nauc_recall_at_10_diff1 value: 69.51307788072499 - type: nauc_recall_at_10_max value: 69.60124733654142 - type: nauc_recall_at_10_std value: 13.483540424716892 - type: nauc_recall_at_1_diff1 value: 79.84814509858211 - type: nauc_recall_at_1_max value: 40.78978466656547 - type: nauc_recall_at_1_std value: -19.96189264026715 - type: nauc_recall_at_20_diff1 value: 70.92168324710599 - type: nauc_recall_at_20_max value: 76.09106252420084 - type: nauc_recall_at_20_std value: 25.406842300761447 - type: nauc_recall_at_3_diff1 value: 74.1212680517145 - type: nauc_recall_at_3_max value: 56.24921832879403 - type: nauc_recall_at_3_std value: -11.55542913578436 - type: nauc_recall_at_5_diff1 value: 72.31262959872993 - type: nauc_recall_at_5_max value: 62.761214896697915 - type: nauc_recall_at_5_std value: -3.280167584070396 - type: ndcg_at_1 value: 69.18299999999999 - type: ndcg_at_10 value: 79.687 - type: ndcg_at_100 value: 81.062 - type: ndcg_at_1000 value: 81.312 - type: ndcg_at_20 value: 80.34599999999999 - type: ndcg_at_3 value: 75.98700000000001 - type: ndcg_at_5 value: 78.039 - type: precision_at_1 value: 69.18299999999999 - type: precision_at_10 value: 9.636 - type: precision_at_100 value: 1.0330000000000001 - type: precision_at_1000 value: 0.105 - type: precision_at_20 value: 4.958 - type: precision_at_3 value: 28.515 - type: precision_at_5 value: 18.201 - type: recall_at_1 value: 66.872 - type: recall_at_10 value: 90.688 - type: recall_at_100 value: 96.99 - type: recall_at_1000 value: 98.958 - type: recall_at_20 value: 93.21199999999999 - type: recall_at_3 value: 80.84599999999999 - type: recall_at_5 value: 85.732 - task: type: Retrieval dataset: name: MTEB MSMARCO (default) type: mteb/msmarco config: default split: dev revision: c5a29a104738b98a9e76336939199e264163d4a0 metrics: - type: map_at_1 value: 21.861 - type: map_at_10 value: 34.008 - type: map_at_100 value: 35.174 - type: map_at_1000 value: 35.224 - type: map_at_20 value: 34.705999999999996 - type: map_at_3 value: 30.209000000000003 - type: map_at_5 value: 32.351 - type: mrr_at_1 value: 22.493 - type: mrr_at_10 value: 34.583999999999996 - type: mrr_at_100 value: 35.691 - type: mrr_at_1000 value: 35.736000000000004 - type: mrr_at_20 value: 35.257 - type: mrr_at_3 value: 30.85 - type: mrr_at_5 value: 32.962 - type: ndcg_at_1 value: 22.493 - type: ndcg_at_10 value: 40.815 - type: ndcg_at_100 value: 46.483999999999995 - type: ndcg_at_1000 value: 47.73 - type: ndcg_at_20 value: 43.302 - type: ndcg_at_3 value: 33.056000000000004 - type: ndcg_at_5 value: 36.879 - type: precision_at_1 value: 22.493 - type: precision_at_10 value: 6.465999999999999 - type: precision_at_100 value: 0.932 - type: precision_at_1000 value: 0.104 - type: precision_at_20 value: 3.752 - type: precision_at_3 value: 14.069 - type: precision_at_5 value: 10.384 - type: recall_at_1 value: 21.861 - type: recall_at_10 value: 61.781 - type: recall_at_100 value: 88.095 - type: recall_at_1000 value: 97.625 - type: recall_at_20 value: 71.44500000000001 - type: recall_at_3 value: 40.653 - type: recall_at_5 value: 49.841 - type: main_score value: 40.815 - task: type: Classification dataset: name: MTEB MTOPDomainClassification (en) type: mteb/mtop_domain config: en split: test revision: d80d48c1eb48d3562165c59d59d0034df9fff0bf metrics: - type: accuracy value: 97.4874601003192 - type: f1 value: 97.19067544931094 - type: f1_weighted value: 97.49331776181019 - type: main_score value: 97.4874601003192 - task: type: Classification dataset: name: MTEB MTOPDomainClassification (de) type: mteb/mtop_domain config: de split: test revision: d80d48c1eb48d3562165c59d59d0034df9fff0bf metrics: - type: accuracy value: 96.89489997182305 - type: f1 value: 96.51138586512977 - type: f1_weighted value: 96.89723065967186 - type: main_score value: 96.89489997182305 - task: type: Classification dataset: name: MTEB MTOPDomainClassification (es) type: mteb/mtop_domain config: es split: test revision: d80d48c1eb48d3562165c59d59d0034df9fff0bf metrics: - type: accuracy value: 97.17144763175452 - type: f1 value: 96.81785681878274 - type: f1_weighted value: 97.1778974586874 - type: main_score value: 97.17144763175452 - task: type: Classification dataset: name: MTEB MTOPDomainClassification (fr) type: mteb/mtop_domain config: fr split: test revision: d80d48c1eb48d3562165c59d59d0034df9fff0bf metrics: - type: accuracy value: 96.30128405887879 - type: f1 value: 95.94555923088487 - type: f1_weighted value: 96.30399416794926 - type: main_score value: 96.30128405887879 - task: type: Classification dataset: name: MTEB MTOPIntentClassification (en) type: mteb/mtop_intent config: en split: test revision: ae001d0e6b1228650b7bd1c2c65fb50ad11a8aba metrics: - type: accuracy value: 84.53488372093022 - type: f1 value: 61.77995074251401 - type: f1_weighted value: 86.8005170485101 - type: main_score value: 84.53488372093022 - task: type: Classification dataset: name: MTEB MTOPIntentClassification (de) type: mteb/mtop_intent config: de split: test revision: ae001d0e6b1228650b7bd1c2c65fb50ad11a8aba metrics: - type: accuracy value: 80.79459002535924 - type: f1 value: 56.08938302001448 - type: f1_weighted value: 83.66582131948252 - type: main_score value: 80.79459002535924 - task: type: Classification dataset: name: MTEB MTOPIntentClassification (es) type: mteb/mtop_intent config: es split: test revision: ae001d0e6b1228650b7bd1c2c65fb50ad11a8aba metrics: - type: accuracy value: 84.7765176784523 - type: f1 value: 61.39860057885528 - type: f1_weighted value: 86.94881745670745 - type: main_score value: 84.7765176784523 - task: type: Classification dataset: name: MTEB MTOPIntentClassification (fr) type: mteb/mtop_intent config: fr split: test revision: ae001d0e6b1228650b7bd1c2c65fb50ad11a8aba metrics: - type: accuracy value: 82.2079549013467 - type: f1 value: 59.90260478749016 - type: f1_weighted value: 84.36861708593257 - type: main_score value: 82.2079549013467 - task: type: Classification dataset: name: MTEB MasakhaNEWSClassification (eng) type: mteb/masakhanews config: eng split: test revision: 18193f187b92da67168c655c9973a165ed9593dd metrics: - type: accuracy value: 74.98945147679325 - type: f1 value: 74.3157483560261 - type: f1_weighted value: 75.01179008904884 - type: main_score value: 74.98945147679325 - task: type: Classification dataset: name: MTEB MasakhaNEWSClassification (fra) type: mteb/masakhanews config: fra split: test revision: 18193f187b92da67168c655c9973a165ed9593dd metrics: - type: accuracy value: 74.02843601895735 - type: f1 value: 70.40326349620732 - type: f1_weighted value: 74.6596277063484 - type: main_score value: 74.02843601895735 - task: type: Clustering dataset: name: MTEB MasakhaNEWSClusteringP2P (amh) type: masakhane/masakhanews config: amh split: test revision: 8ccc72e69e65f40c70e117d8b3c08306bb788b60 metrics: - type: main_score value: 69.45780291725053 - type: v_measure value: 69.45780291725053 - type: v_measure_std value: 36.54340055904091 - type: main_score value: 60.95132147787602 - type: v_measure value: 60.95132147787602 - type: v_measure_std value: 37.330148394033365 - task: type: Clustering dataset: name: MTEB MasakhaNEWSClusteringP2P (eng) type: masakhane/masakhanews config: eng split: test revision: 8ccc72e69e65f40c70e117d8b3c08306bb788b60 metrics: - type: main_score value: 64.88996119332239 - type: v_measure value: 64.88996119332239 - type: v_measure_std value: 30.017223408197268 - type: main_score value: 60.974810831426595 - type: v_measure value: 60.974810831426595 - type: v_measure_std value: 24.934675467507827 - task: type: Clustering dataset: name: MTEB MasakhaNEWSClusteringP2P (fra) type: masakhane/masakhanews config: fra split: test revision: 8ccc72e69e65f40c70e117d8b3c08306bb788b60 metrics: - type: main_score value: 42.362383958691666 - type: v_measure value: 42.362383958691666 - type: v_measure_std value: 37.61076788039063 - type: main_score value: 44.479206673553335 - type: v_measure value: 44.479206673553335 - type: v_measure_std value: 32.58254804499339 - task: type: Clustering dataset: name: MTEB MasakhaNEWSClusteringP2P (hau) type: masakhane/masakhanews config: hau split: test revision: 8ccc72e69e65f40c70e117d8b3c08306bb788b60 metrics: - type: main_score value: 43.29201252405562 - type: v_measure value: 43.29201252405562 - type: v_measure_std value: 34.31987945146255 - type: main_score value: 26.4742082741682 - type: v_measure value: 26.4742082741682 - type: v_measure_std value: 22.344929192323097 - task: type: Clustering dataset: name: MTEB MasakhaNEWSClusteringP2P (ibo) type: masakhane/masakhanews config: ibo split: test revision: 8ccc72e69e65f40c70e117d8b3c08306bb788b60 metrics: - type: main_score value: 33.59926542995238 - type: v_measure value: 33.59926542995238 - type: v_measure_std value: 35.70048601084112 - type: main_score value: 38.906129911741985 - type: v_measure value: 38.906129911741985 - type: v_measure_std value: 34.785601792668444 - task: type: Clustering dataset: name: MTEB MasakhaNEWSClusteringP2P (lin) type: masakhane/masakhanews config: lin split: test revision: 8ccc72e69e65f40c70e117d8b3c08306bb788b60 metrics: - type: main_score value: 67.58487601893106 - type: v_measure value: 67.58487601893106 - type: v_measure_std value: 35.16784970777931 - type: main_score value: 62.60982020876592 - type: v_measure value: 62.60982020876592 - type: v_measure_std value: 40.7368955715045 - task: type: Clustering dataset: name: MTEB MasakhaNEWSClusteringP2P (lug) type: masakhane/masakhanews config: lug split: test revision: 8ccc72e69e65f40c70e117d8b3c08306bb788b60 metrics: - type: main_score value: 50.01220872023533 - type: v_measure value: 50.01220872023533 - type: v_measure_std value: 41.87411574676182 - type: main_score value: 42.70424106365967 - type: v_measure value: 42.70424106365967 - type: v_measure_std value: 46.80946241135087 - task: type: Clustering dataset: name: MTEB MasakhaNEWSClusteringP2P (orm) type: masakhane/masakhanews config: orm split: test revision: 8ccc72e69e65f40c70e117d8b3c08306bb788b60 metrics: - type: main_score value: 29.007847502598317 - type: v_measure value: 29.007847502598317 - type: v_measure_std value: 38.374997395079994 - type: main_score value: 28.609942199922322 - type: v_measure value: 28.609942199922322 - type: v_measure_std value: 38.46685040191088 - task: type: Clustering dataset: name: MTEB MasakhaNEWSClusteringP2P (pcm) type: masakhane/masakhanews config: pcm split: test revision: 8ccc72e69e65f40c70e117d8b3c08306bb788b60 metrics: - type: main_score value: 79.13520228554611 - type: v_measure value: 79.13520228554611 - type: v_measure_std value: 18.501843848275183 - type: main_score value: 76.83901348810822 - type: v_measure value: 76.83901348810822 - type: v_measure_std value: 17.57617141269189 - task: type: Clustering dataset: name: MTEB MasakhaNEWSClusteringP2P (run) type: masakhane/masakhanews config: run split: test revision: 8ccc72e69e65f40c70e117d8b3c08306bb788b60 metrics: - type: main_score value: 60.317213909746656 - type: v_measure value: 60.317213909746656 - type: v_measure_std value: 36.500281823747386 - type: main_score value: 46.89757547846193 - type: v_measure value: 46.89757547846193 - type: v_measure_std value: 44.58903590203438 - task: type: Clustering dataset: name: MTEB MasakhaNEWSClusteringP2P (sna) type: masakhane/masakhanews config: sna split: test revision: 8ccc72e69e65f40c70e117d8b3c08306bb788b60 metrics: - type: main_score value: 59.395277358240946 - type: v_measure value: 59.395277358240946 - type: v_measure_std value: 37.500916816164654 - type: main_score value: 55.37185207068829 - type: v_measure value: 55.37185207068829 - type: v_measure_std value: 36.944574863543004 - task: type: Clustering dataset: name: MTEB MasakhaNEWSClusteringP2P (som) type: masakhane/masakhanews config: som split: test revision: 8ccc72e69e65f40c70e117d8b3c08306bb788b60 metrics: - type: main_score value: 38.18638688704302 - type: v_measure value: 38.18638688704302 - type: v_measure_std value: 35.453681137564466 - type: main_score value: 37.44211021681754 - type: v_measure value: 37.44211021681754 - type: v_measure_std value: 33.41469994463241 - task: type: Clustering dataset: name: MTEB MasakhaNEWSClusteringP2P (swa) type: masakhane/masakhanews config: swa split: test revision: 8ccc72e69e65f40c70e117d8b3c08306bb788b60 metrics: - type: main_score value: 29.49230755729658 - type: v_measure value: 29.49230755729658 - type: v_measure_std value: 28.284313285264645 - type: main_score value: 26.020680621216062 - type: v_measure value: 26.020680621216062 - type: v_measure_std value: 25.480037522570413 - task: type: Clustering dataset: name: MTEB MasakhaNEWSClusteringP2P (tir) type: masakhane/masakhanews config: tir split: test revision: 8ccc72e69e65f40c70e117d8b3c08306bb788b60 metrics: - type: main_score value: 60.632258622750115 - type: v_measure value: 60.632258622750115 - type: v_measure_std value: 34.429711214740564 - type: main_score value: 63.74306846771303 - type: v_measure value: 63.74306846771303 - type: v_measure_std value: 32.19119631078685 - task: type: Clustering dataset: name: MTEB MasakhaNEWSClusteringP2P (xho) type: masakhane/masakhanews config: xho split: test revision: 8ccc72e69e65f40c70e117d8b3c08306bb788b60 metrics: - type: main_score value: 41.76322918806381 - type: v_measure value: 41.76322918806381 - type: v_measure_std value: 36.43245296200775 - type: main_score value: 24.580890519243777 - type: v_measure value: 24.580890519243777 - type: v_measure_std value: 37.941836363967106 - task: type: Clustering dataset: name: MTEB MasakhaNEWSClusteringP2P (yor) type: masakhane/masakhanews config: yor split: test revision: 8ccc72e69e65f40c70e117d8b3c08306bb788b60 metrics: - type: main_score value: 33.17083910808645 - type: v_measure value: 33.17083910808645 - type: v_measure_std value: 34.87547994284835 - type: main_score value: 43.63458888828314 - type: v_measure value: 43.63458888828314 - type: v_measure_std value: 31.28169350649098 - task: type: Classification dataset: name: MTEB MassiveIntentClassification (pl) type: mteb/amazon_massive_intent config: pl split: test revision: 4672e20407010da34463acc759c162ca9734bca6 metrics: - type: accuracy value: 75.37323470073974 - type: f1 value: 71.1836877753734 - type: f1_weighted value: 75.72073213955457 - type: main_score value: 75.37323470073974 - task: type: Classification dataset: name: MTEB MassiveIntentClassification (de) type: mteb/amazon_massive_intent config: de split: test revision: 4672e20407010da34463acc759c162ca9734bca6 metrics: - type: accuracy value: 74.83523873570948 - type: f1 value: 70.72375821116886 - type: f1_weighted value: 75.20800490010755 - type: main_score value: 74.83523873570948 - task: type: Classification dataset: name: MTEB MassiveIntentClassification (es) type: mteb/amazon_massive_intent config: es split: test revision: 4672e20407010da34463acc759c162ca9734bca6 metrics: - type: accuracy value: 75.31607262945528 - type: f1 value: 72.06063554897662 - type: f1_weighted value: 75.72438161355252 - type: main_score value: 75.31607262945528 - task: type: Classification dataset: name: MTEB MassiveIntentClassification (ru) type: mteb/amazon_massive_intent config: ru split: test revision: 4672e20407010da34463acc759c162ca9734bca6 metrics: - type: accuracy value: 76.7955615332885 - type: f1 value: 73.08099648499756 - type: f1_weighted value: 77.18482068239668 - type: main_score value: 76.7955615332885 - task: type: Classification dataset: name: MTEB MassiveIntentClassification (en) type: mteb/amazon_massive_intent config: en split: test revision: 4672e20407010da34463acc759c162ca9734bca6 metrics: - type: accuracy value: 77.60591795561534 - type: f1 value: 74.46676705370395 - type: f1_weighted value: 77.69888062336614 - type: main_score value: 77.60591795561534 - task: type: Classification dataset: name: MTEB MassiveIntentClassification (fr) type: mteb/amazon_massive_intent config: fr split: test revision: 4672e20407010da34463acc759c162ca9734bca6 metrics: - type: accuracy value: 76.32145258910558 - type: f1 value: 72.89824154178328 - type: f1_weighted value: 76.6539327979472 - type: main_score value: 76.32145258910558 - task: type: Classification dataset: name: MTEB MassiveIntentClassification (zh-CN) type: mteb/amazon_massive_intent config: zh-CN split: test revision: 4672e20407010da34463acc759c162ca9734bca6 metrics: - type: accuracy value: 73.21788836583724 - type: f1 value: 70.45594512246377 - type: f1_weighted value: 73.67862536499393 - type: main_score value: 73.21788836583724 - task: type: Classification dataset: name: MTEB MassiveScenarioClassification (zh-CN) type: mteb/amazon_massive_scenario config: zh-CN split: test revision: fad2c6e8459f9e1c45d9315f4953d921437d70f8 metrics: - type: accuracy value: 80.82044384667114 - type: f1 value: 80.53217664465089 - type: f1_weighted value: 80.94535087010512 - type: main_score value: 80.82044384667114 - task: type: Classification dataset: name: MTEB MassiveScenarioClassification (pl) type: mteb/amazon_massive_scenario config: pl split: test revision: fad2c6e8459f9e1c45d9315f4953d921437d70f8 metrics: - type: accuracy value: 82.1049092131809 - type: f1 value: 81.55343463694733 - type: f1_weighted value: 82.33509098770782 - type: main_score value: 82.1049092131809 - task: type: Classification dataset: name: MTEB MassiveScenarioClassification (es) type: mteb/amazon_massive_scenario config: es split: test revision: fad2c6e8459f9e1c45d9315f4953d921437d70f8 metrics: - type: accuracy value: 82.58238063214526 - type: f1 value: 82.27974449333072 - type: f1_weighted value: 82.81337569618209 - type: main_score value: 82.58238063214526 - task: type: Classification dataset: name: MTEB MassiveScenarioClassification (de) type: mteb/amazon_massive_scenario config: de split: test revision: fad2c6e8459f9e1c45d9315f4953d921437d70f8 metrics: - type: accuracy value: 83.97108271687962 - type: f1 value: 83.56285606936076 - type: f1_weighted value: 84.10198745390771 - type: main_score value: 83.97108271687962 - task: type: Classification dataset: name: MTEB MassiveScenarioClassification (en) type: mteb/amazon_massive_scenario config: en split: test revision: fad2c6e8459f9e1c45d9315f4953d921437d70f8 metrics: - type: accuracy value: 84.71082716879623 - type: f1 value: 84.09447062371402 - type: f1_weighted value: 84.73765765551342 - type: main_score value: 84.71082716879623 - task: type: Classification dataset: name: MTEB MassiveScenarioClassification (fr) type: mteb/amazon_massive_scenario config: fr split: test revision: fad2c6e8459f9e1c45d9315f4953d921437d70f8 metrics: - type: accuracy value: 83.093476798924 - type: f1 value: 82.72656900752943 - type: f1_weighted value: 83.26606516503364 - type: main_score value: 83.093476798924 - task: type: Classification dataset: name: MTEB MassiveScenarioClassification (ru) type: mteb/amazon_massive_scenario config: ru split: test revision: fad2c6e8459f9e1c45d9315f4953d921437d70f8 metrics: - type: accuracy value: 84.05850706119705 - type: f1 value: 83.64234048881222 - type: f1_weighted value: 84.17315768381876 - type: main_score value: 84.05850706119705 - task: type: Retrieval dataset: name: MTEB MedicalRetrieval (default) type: C-MTEB/MedicalRetrieval config: default split: dev revision: 2039188fb5800a9803ba5048df7b76e6fb151fc6 metrics: - type: main_score value: 56.635999999999996 - type: map_at_1 value: 48.699999999999996 - type: map_at_10 value: 53.991 - type: map_at_100 value: 54.449999999999996 - type: map_at_1000 value: 54.515 - type: map_at_20 value: 54.212 - type: map_at_3 value: 52.833 - type: map_at_5 value: 53.503 - type: mrr_at_1 value: 48.699999999999996 - type: mrr_at_10 value: 53.991309523809505 - type: mrr_at_100 value: 54.45008993448266 - type: mrr_at_1000 value: 54.515253990549795 - type: mrr_at_20 value: 54.21201762247036 - type: mrr_at_3 value: 52.8333333333333 - type: mrr_at_5 value: 53.50333333333328 - type: nauc_map_at_1000_diff1 value: 79.96867989401643 - type: nauc_map_at_1000_max value: 69.75230895599029 - type: nauc_map_at_1000_std value: 2.6418738289740213 - type: nauc_map_at_100_diff1 value: 79.95343709599133 - type: nauc_map_at_100_max value: 69.751282671507 - type: nauc_map_at_100_std value: 2.621719966106279 - type: nauc_map_at_10_diff1 value: 80.02875864565634 - type: nauc_map_at_10_max value: 69.80948662290187 - type: nauc_map_at_10_std value: 2.329151604733765 - type: nauc_map_at_1_diff1 value: 83.616940281383 - type: nauc_map_at_1_max value: 69.08142651929452 - type: nauc_map_at_1_std value: 1.9687791394035643 - type: nauc_map_at_20_diff1 value: 79.95555601275339 - type: nauc_map_at_20_max value: 69.76604695002925 - type: nauc_map_at_20_std value: 2.556184141901367 - type: nauc_map_at_3_diff1 value: 80.74790131023668 - type: nauc_map_at_3_max value: 70.57797991892402 - type: nauc_map_at_3_std value: 2.7115149849964117 - type: nauc_map_at_5_diff1 value: 80.31796539878381 - type: nauc_map_at_5_max value: 69.93573796420061 - type: nauc_map_at_5_std value: 2.0731614029506606 - type: nauc_mrr_at_1000_diff1 value: 79.96867999907981 - type: nauc_mrr_at_1000_max value: 69.57395578976896 - type: nauc_mrr_at_1000_std value: 2.46351945887829 - type: nauc_mrr_at_100_diff1 value: 79.95343709599133 - type: nauc_mrr_at_100_max value: 69.57322054130803 - type: nauc_mrr_at_100_std value: 2.4436578359073433 - type: nauc_mrr_at_10_diff1 value: 80.02875864565634 - type: nauc_mrr_at_10_max value: 69.63292630937411 - type: nauc_mrr_at_10_std value: 2.1525912912060012 - type: nauc_mrr_at_1_diff1 value: 83.616940281383 - type: nauc_mrr_at_1_max value: 68.74717310480305 - type: nauc_mrr_at_1_std value: 1.6345257249120868 - type: nauc_mrr_at_20_diff1 value: 79.95555601275339 - type: nauc_mrr_at_20_max value: 69.58883608470444 - type: nauc_mrr_at_20_std value: 2.378973276576547 - type: nauc_mrr_at_3_diff1 value: 80.74790131023668 - type: nauc_mrr_at_3_max value: 70.40430475488604 - type: nauc_mrr_at_3_std value: 2.5378398209583817 - type: nauc_mrr_at_5_diff1 value: 80.31796539878381 - type: nauc_mrr_at_5_max value: 69.7605991748183 - type: nauc_mrr_at_5_std value: 1.898022613568352 - type: nauc_ndcg_at_1000_diff1 value: 78.35504059321225 - type: nauc_ndcg_at_1000_max value: 69.06752522437093 - type: nauc_ndcg_at_1000_std value: 3.9624036886099265 - type: nauc_ndcg_at_100_diff1 value: 77.79729140249833 - type: nauc_ndcg_at_100_max value: 68.93113791506029 - type: nauc_ndcg_at_100_std value: 3.642178826886181 - type: nauc_ndcg_at_10_diff1 value: 78.160158293918 - type: nauc_ndcg_at_10_max value: 69.28122202281361 - type: nauc_ndcg_at_10_std value: 2.438976810940962 - type: nauc_ndcg_at_1_diff1 value: 83.616940281383 - type: nauc_ndcg_at_1_max value: 69.08142651929452 - type: nauc_ndcg_at_1_std value: 1.9687791394035643 - type: nauc_ndcg_at_20_diff1 value: 77.88514432874997 - type: nauc_ndcg_at_20_max value: 69.06148818508873 - type: nauc_ndcg_at_20_std value: 3.1800249272363676 - type: nauc_ndcg_at_3_diff1 value: 79.73510384405803 - type: nauc_ndcg_at_3_max value: 70.78000695123832 - type: nauc_ndcg_at_3_std value: 2.9041415468363274 - type: nauc_ndcg_at_5_diff1 value: 78.91872808866195 - type: nauc_ndcg_at_5_max value: 69.61478429620091 - type: nauc_ndcg_at_5_std value: 1.734699636301054 - type: nauc_precision_at_1000_diff1 value: 66.37858395390673 - type: nauc_precision_at_1000_max value: 60.651659037598534 - type: nauc_precision_at_1000_std value: 27.388353715469798 - type: nauc_precision_at_100_diff1 value: 66.34325807776025 - type: nauc_precision_at_100_max value: 63.63855305621111 - type: nauc_precision_at_100_std value: 10.641748149575351 - type: nauc_precision_at_10_diff1 value: 71.3784685491089 - type: nauc_precision_at_10_max value: 67.05313695174542 - type: nauc_precision_at_10_std value: 3.000406867930561 - type: nauc_precision_at_1_diff1 value: 83.616940281383 - type: nauc_precision_at_1_max value: 69.08142651929452 - type: nauc_precision_at_1_std value: 1.9687791394035643 - type: nauc_precision_at_20_diff1 value: 69.73407910977694 - type: nauc_precision_at_20_max value: 65.77426240320742 - type: nauc_precision_at_20_std value: 6.204416838482586 - type: nauc_precision_at_3_diff1 value: 76.63737537643107 - type: nauc_precision_at_3_max value: 71.29710200719668 - type: nauc_precision_at_3_std value: 3.47180961484546 - type: nauc_precision_at_5_diff1 value: 74.36945983536717 - type: nauc_precision_at_5_max value: 68.33292218003061 - type: nauc_precision_at_5_std value: 0.47128762620258075 - type: nauc_recall_at_1000_diff1 value: 66.37858395390681 - type: nauc_recall_at_1000_max value: 60.65165903759889 - type: nauc_recall_at_1000_std value: 27.388353715469822 - type: nauc_recall_at_100_diff1 value: 66.34325807776025 - type: nauc_recall_at_100_max value: 63.63855305621116 - type: nauc_recall_at_100_std value: 10.641748149575351 - type: nauc_recall_at_10_diff1 value: 71.37846854910892 - type: nauc_recall_at_10_max value: 67.05313695174546 - type: nauc_recall_at_10_std value: 3.000406867930663 - type: nauc_recall_at_1_diff1 value: 83.616940281383 - type: nauc_recall_at_1_max value: 69.08142651929452 - type: nauc_recall_at_1_std value: 1.9687791394035643 - type: nauc_recall_at_20_diff1 value: 69.73407910977691 - type: nauc_recall_at_20_max value: 65.77426240320746 - type: nauc_recall_at_20_std value: 6.204416838482536 - type: nauc_recall_at_3_diff1 value: 76.63737537643112 - type: nauc_recall_at_3_max value: 71.29710200719668 - type: nauc_recall_at_3_std value: 3.471809614845442 - type: nauc_recall_at_5_diff1 value: 74.36945983536715 - type: nauc_recall_at_5_max value: 68.33292218003065 - type: nauc_recall_at_5_std value: 0.4712876262026442 - type: ndcg_at_1 value: 48.699999999999996 - type: ndcg_at_10 value: 56.635999999999996 - type: ndcg_at_100 value: 59.193 - type: ndcg_at_1000 value: 60.97 - type: ndcg_at_20 value: 57.426 - type: ndcg_at_3 value: 54.186 - type: ndcg_at_5 value: 55.407 - type: precision_at_1 value: 48.699999999999996 - type: precision_at_10 value: 6.5 - type: precision_at_100 value: 0.777 - type: precision_at_1000 value: 0.092 - type: precision_at_20 value: 3.405 - type: precision_at_3 value: 19.367 - type: precision_at_5 value: 12.22 - type: recall_at_1 value: 48.699999999999996 - type: recall_at_10 value: 65.0 - type: recall_at_100 value: 77.7 - type: recall_at_1000 value: 91.8 - type: recall_at_20 value: 68.10000000000001 - type: recall_at_3 value: 58.099999999999994 - type: recall_at_5 value: 61.1 - task: type: Clustering dataset: name: MTEB MedrxivClusteringP2P (default) type: mteb/medrxiv-clustering-p2p config: default split: test revision: e7a26af6f3ae46b30dde8737f02c07b1505bcc73 metrics: - type: main_score value: 34.80188561439236 - type: v_measure value: 34.80188561439236 - type: v_measure_std value: 1.5703148841573102 - task: type: Clustering dataset: name: MTEB MedrxivClusteringS2S (default) type: mteb/medrxiv-clustering-s2s config: default split: test revision: 35191c8c0dca72d8ff3efcd72aa802307d469663 metrics: - type: main_score value: 32.42285513996236 - type: v_measure value: 32.42285513996236 - type: v_measure_std value: 1.3769867487457566 - task: type: Retrieval dataset: name: MTEB MintakaRetrieval (de) type: jinaai/mintakaqa config: de split: test revision: efa78cc2f74bbcd21eff2261f9e13aebe40b814e metrics: - type: main_score value: 27.025 - type: map_at_1 value: 14.532 - type: map_at_10 value: 22.612 - type: map_at_100 value: 23.802 - type: map_at_1000 value: 23.9 - type: map_at_20 value: 23.275000000000002 - type: map_at_3 value: 20.226 - type: map_at_5 value: 21.490000000000002 - type: mrr_at_1 value: 14.532434709351305 - type: mrr_at_10 value: 22.612077265615575 - type: mrr_at_100 value: 23.801523356874675 - type: mrr_at_1000 value: 23.900118499340238 - type: mrr_at_20 value: 23.275466430108995 - type: mrr_at_3 value: 20.22606009547877 - type: mrr_at_5 value: 21.489750070204945 - type: nauc_map_at_1000_diff1 value: 14.148987799763596 - type: nauc_map_at_1000_max value: 44.70338461387784 - type: nauc_map_at_1000_std value: 15.868006767707637 - type: nauc_map_at_100_diff1 value: 14.11371769080442 - type: nauc_map_at_100_max value: 44.67995540936296 - type: nauc_map_at_100_std value: 15.890796502029076 - type: nauc_map_at_10_diff1 value: 14.29066834165688 - type: nauc_map_at_10_max value: 45.10997111765282 - type: nauc_map_at_10_std value: 15.508568918629864 - type: nauc_map_at_1_diff1 value: 23.473291302576396 - type: nauc_map_at_1_max value: 44.68942599764586 - type: nauc_map_at_1_std value: 12.424377262427253 - type: nauc_map_at_20_diff1 value: 14.112652046087831 - type: nauc_map_at_20_max value: 44.82014861413682 - type: nauc_map_at_20_std value: 15.739350613646385 - type: nauc_map_at_3_diff1 value: 16.119659221396347 - type: nauc_map_at_3_max value: 46.04766378953525 - type: nauc_map_at_3_std value: 13.969878046315925 - type: nauc_map_at_5_diff1 value: 15.095453434076184 - type: nauc_map_at_5_max value: 45.802128149314406 - type: nauc_map_at_5_std value: 14.957442173319949 - type: nauc_mrr_at_1000_diff1 value: 14.148987799763596 - type: nauc_mrr_at_1000_max value: 44.70338461387784 - type: nauc_mrr_at_1000_std value: 15.868006767707637 - type: nauc_mrr_at_100_diff1 value: 14.11371769080442 - type: nauc_mrr_at_100_max value: 44.67995540936296 - type: nauc_mrr_at_100_std value: 15.890796502029076 - type: nauc_mrr_at_10_diff1 value: 14.29066834165688 - type: nauc_mrr_at_10_max value: 45.10997111765282 - type: nauc_mrr_at_10_std value: 15.508568918629864 - type: nauc_mrr_at_1_diff1 value: 23.473291302576396 - type: nauc_mrr_at_1_max value: 44.68942599764586 - type: nauc_mrr_at_1_std value: 12.424377262427253 - type: nauc_mrr_at_20_diff1 value: 14.112652046087831 - type: nauc_mrr_at_20_max value: 44.82014861413682 - type: nauc_mrr_at_20_std value: 15.739350613646385 - type: nauc_mrr_at_3_diff1 value: 16.119659221396347 - type: nauc_mrr_at_3_max value: 46.04766378953525 - type: nauc_mrr_at_3_std value: 13.969878046315925 - type: nauc_mrr_at_5_diff1 value: 15.095453434076184 - type: nauc_mrr_at_5_max value: 45.802128149314406 - type: nauc_mrr_at_5_std value: 14.957442173319949 - type: nauc_ndcg_at_1000_diff1 value: 11.626606894574028 - type: nauc_ndcg_at_1000_max value: 43.328592841065536 - type: nauc_ndcg_at_1000_std value: 18.049446272245547 - type: nauc_ndcg_at_100_diff1 value: 10.485720606660239 - type: nauc_ndcg_at_100_max value: 42.405317674170966 - type: nauc_ndcg_at_100_std value: 19.107151641936987 - type: nauc_ndcg_at_10_diff1 value: 11.029351078162982 - type: nauc_ndcg_at_10_max value: 44.36855031964681 - type: nauc_ndcg_at_10_std value: 17.302796171409305 - type: nauc_ndcg_at_1_diff1 value: 23.473291302576396 - type: nauc_ndcg_at_1_max value: 44.68942599764586 - type: nauc_ndcg_at_1_std value: 12.424377262427253 - type: nauc_ndcg_at_20_diff1 value: 10.356662718168412 - type: nauc_ndcg_at_20_max value: 43.31602680430083 - type: nauc_ndcg_at_20_std value: 18.162891267850316 - type: nauc_ndcg_at_3_diff1 value: 14.42844952297869 - type: nauc_ndcg_at_3_max value: 46.26603339466543 - type: nauc_ndcg_at_3_std value: 14.449362723887857 - type: nauc_ndcg_at_5_diff1 value: 12.783416563486396 - type: nauc_ndcg_at_5_max value: 45.852176479124424 - type: nauc_ndcg_at_5_std value: 16.11775016428085 - type: nauc_precision_at_1000_diff1 value: -8.045361059399795 - type: nauc_precision_at_1000_max value: 21.970273281738777 - type: nauc_precision_at_1000_std value: 49.564650488193266 - type: nauc_precision_at_100_diff1 value: -2.118628861593353 - type: nauc_precision_at_100_max value: 31.32498977104778 - type: nauc_precision_at_100_std value: 32.96087731883451 - type: nauc_precision_at_10_diff1 value: 3.0335517475367615 - type: nauc_precision_at_10_max value: 42.21620215030219 - type: nauc_precision_at_10_std value: 21.90159732315962 - type: nauc_precision_at_1_diff1 value: 23.473291302576396 - type: nauc_precision_at_1_max value: 44.68942599764586 - type: nauc_precision_at_1_std value: 12.424377262427253 - type: nauc_precision_at_20_diff1 value: 0.4087201843719047 - type: nauc_precision_at_20_max value: 38.485034773895734 - type: nauc_precision_at_20_std value: 25.077397979916682 - type: nauc_precision_at_3_diff1 value: 10.408327736589833 - type: nauc_precision_at_3_max value: 46.757216289175076 - type: nauc_precision_at_3_std value: 15.62594354926867 - type: nauc_precision_at_5_diff1 value: 7.326752744229544 - type: nauc_precision_at_5_max value: 45.89190518573553 - type: nauc_precision_at_5_std value: 19.01717163438957 - type: nauc_recall_at_1000_diff1 value: -8.045361059400387 - type: nauc_recall_at_1000_max value: 21.97027328173812 - type: nauc_recall_at_1000_std value: 49.56465048819266 - type: nauc_recall_at_100_diff1 value: -2.118628861593277 - type: nauc_recall_at_100_max value: 31.324989771047818 - type: nauc_recall_at_100_std value: 32.96087731883457 - type: nauc_recall_at_10_diff1 value: 3.0335517475367166 - type: nauc_recall_at_10_max value: 42.21620215030217 - type: nauc_recall_at_10_std value: 21.901597323159606 - type: nauc_recall_at_1_diff1 value: 23.473291302576396 - type: nauc_recall_at_1_max value: 44.68942599764586 - type: nauc_recall_at_1_std value: 12.424377262427253 - type: nauc_recall_at_20_diff1 value: 0.40872018437190905 - type: nauc_recall_at_20_max value: 38.485034773895734 - type: nauc_recall_at_20_std value: 25.077397979916693 - type: nauc_recall_at_3_diff1 value: 10.408327736589843 - type: nauc_recall_at_3_max value: 46.75721628917507 - type: nauc_recall_at_3_std value: 15.625943549268664 - type: nauc_recall_at_5_diff1 value: 7.326752744229548 - type: nauc_recall_at_5_max value: 45.89190518573557 - type: nauc_recall_at_5_std value: 19.01717163438958 - type: ndcg_at_1 value: 14.532 - type: ndcg_at_10 value: 27.025 - type: ndcg_at_100 value: 33.305 - type: ndcg_at_1000 value: 36.38 - type: ndcg_at_20 value: 29.443 - type: ndcg_at_3 value: 22.035 - type: ndcg_at_5 value: 24.319 - type: precision_at_1 value: 14.532 - type: precision_at_10 value: 4.115 - type: precision_at_100 value: 0.717 - type: precision_at_1000 value: 0.097 - type: precision_at_20 value: 2.536 - type: precision_at_3 value: 9.085 - type: precision_at_5 value: 6.563 - type: recall_at_1 value: 14.532 - type: recall_at_10 value: 41.154 - type: recall_at_100 value: 71.651 - type: recall_at_1000 value: 96.841 - type: recall_at_20 value: 50.71600000000001 - type: recall_at_3 value: 27.254 - type: recall_at_5 value: 32.814 - task: type: Retrieval dataset: name: MTEB MintakaRetrieval (es) type: jinaai/mintakaqa config: es split: test revision: efa78cc2f74bbcd21eff2261f9e13aebe40b814e metrics: - type: main_score value: 26.912000000000003 - type: map_at_1 value: 14.686 - type: map_at_10 value: 22.569 - type: map_at_100 value: 23.679 - type: map_at_1000 value: 23.777 - type: map_at_20 value: 23.169 - type: map_at_3 value: 20.201 - type: map_at_5 value: 21.566 - type: mrr_at_1 value: 14.686468646864686 - type: mrr_at_10 value: 22.569346220336296 - type: mrr_at_100 value: 23.678819125817146 - type: mrr_at_1000 value: 23.77713511338264 - type: mrr_at_20 value: 23.16850858443442 - type: mrr_at_3 value: 20.200770077007665 - type: mrr_at_5 value: 21.56628162816276 - type: nauc_map_at_1000_diff1 value: 14.129007578838381 - type: nauc_map_at_1000_max value: 44.4255501141499 - type: nauc_map_at_1000_std value: 19.95906154868176 - type: nauc_map_at_100_diff1 value: 14.09071870575231 - type: nauc_map_at_100_max value: 44.403179928955566 - type: nauc_map_at_100_std value: 20.00413657519976 - type: nauc_map_at_10_diff1 value: 14.149535953153688 - type: nauc_map_at_10_max value: 44.66529917634685 - type: nauc_map_at_10_std value: 19.580235989479394 - type: nauc_map_at_1_diff1 value: 23.489813522176636 - type: nauc_map_at_1_max value: 46.54578639925787 - type: nauc_map_at_1_std value: 16.39083721709994 - type: nauc_map_at_20_diff1 value: 14.021560420656181 - type: nauc_map_at_20_max value: 44.4825455452467 - type: nauc_map_at_20_std value: 19.886927750826878 - type: nauc_map_at_3_diff1 value: 16.182977890477723 - type: nauc_map_at_3_max value: 46.1840554029258 - type: nauc_map_at_3_std value: 18.735671900228958 - type: nauc_map_at_5_diff1 value: 14.779126395472833 - type: nauc_map_at_5_max value: 45.23237213817556 - type: nauc_map_at_5_std value: 19.348508580412872 - type: nauc_mrr_at_1000_diff1 value: 14.129007578838381 - type: nauc_mrr_at_1000_max value: 44.4255501141499 - type: nauc_mrr_at_1000_std value: 19.95906154868176 - type: nauc_mrr_at_100_diff1 value: 14.09071870575231 - type: nauc_mrr_at_100_max value: 44.403179928955566 - type: nauc_mrr_at_100_std value: 20.00413657519976 - type: nauc_mrr_at_10_diff1 value: 14.149535953153688 - type: nauc_mrr_at_10_max value: 44.66529917634685 - type: nauc_mrr_at_10_std value: 19.580235989479394 - type: nauc_mrr_at_1_diff1 value: 23.489813522176636 - type: nauc_mrr_at_1_max value: 46.54578639925787 - type: nauc_mrr_at_1_std value: 16.39083721709994 - type: nauc_mrr_at_20_diff1 value: 14.021560420656181 - type: nauc_mrr_at_20_max value: 44.4825455452467 - type: nauc_mrr_at_20_std value: 19.886927750826878 - type: nauc_mrr_at_3_diff1 value: 16.182977890477723 - type: nauc_mrr_at_3_max value: 46.1840554029258 - type: nauc_mrr_at_3_std value: 18.735671900228958 - type: nauc_mrr_at_5_diff1 value: 14.779126395472833 - type: nauc_mrr_at_5_max value: 45.23237213817556 - type: nauc_mrr_at_5_std value: 19.348508580412872 - type: nauc_ndcg_at_1000_diff1 value: 11.762470380481101 - type: nauc_ndcg_at_1000_max value: 42.8233203033089 - type: nauc_ndcg_at_1000_std value: 21.78503705117719 - type: nauc_ndcg_at_100_diff1 value: 10.45886076220022 - type: nauc_ndcg_at_100_max value: 41.85472899256818 - type: nauc_ndcg_at_100_std value: 23.20955486335138 - type: nauc_ndcg_at_10_diff1 value: 10.605912468659469 - type: nauc_ndcg_at_10_max value: 43.150942448104715 - type: nauc_ndcg_at_10_std value: 21.120035764826085 - type: nauc_ndcg_at_1_diff1 value: 23.489813522176636 - type: nauc_ndcg_at_1_max value: 46.54578639925787 - type: nauc_ndcg_at_1_std value: 16.39083721709994 - type: nauc_ndcg_at_20_diff1 value: 10.11291783888644 - type: nauc_ndcg_at_20_max value: 42.51260678842788 - type: nauc_ndcg_at_20_std value: 22.1744949382252 - type: nauc_ndcg_at_3_diff1 value: 14.25625326760802 - type: nauc_ndcg_at_3_max value: 45.96162916377383 - type: nauc_ndcg_at_3_std value: 19.557832728215523 - type: nauc_ndcg_at_5_diff1 value: 11.956317653823053 - type: nauc_ndcg_at_5_max value: 44.35971268886807 - type: nauc_ndcg_at_5_std value: 20.581696730374233 - type: nauc_precision_at_1000_diff1 value: 5.132291843566577 - type: nauc_precision_at_1000_max value: 25.293354576835263 - type: nauc_precision_at_1000_std value: 40.36005126087624 - type: nauc_precision_at_100_diff1 value: -1.5252854375008238 - type: nauc_precision_at_100_max value: 31.007586474495984 - type: nauc_precision_at_100_std value: 37.297552993548386 - type: nauc_precision_at_10_diff1 value: 1.9663657370770737 - type: nauc_precision_at_10_max value: 39.194092293625125 - type: nauc_precision_at_10_std value: 24.956542621999542 - type: nauc_precision_at_1_diff1 value: 23.489813522176636 - type: nauc_precision_at_1_max value: 46.54578639925787 - type: nauc_precision_at_1_std value: 16.39083721709994 - type: nauc_precision_at_20_diff1 value: 0.011112090390932373 - type: nauc_precision_at_20_max value: 36.9357074392519 - type: nauc_precision_at_20_std value: 28.611387115093876 - type: nauc_precision_at_3_diff1 value: 9.596831091013703 - type: nauc_precision_at_3_max value: 45.3905541893809 - type: nauc_precision_at_3_std value: 21.599314388526945 - type: nauc_precision_at_5_diff1 value: 5.175887949900142 - type: nauc_precision_at_5_max value: 42.129467510414464 - type: nauc_precision_at_5_std value: 23.607251548776677 - type: nauc_recall_at_1000_diff1 value: 5.132291843566257 - type: nauc_recall_at_1000_max value: 25.29335457683396 - type: nauc_recall_at_1000_std value: 40.36005126087638 - type: nauc_recall_at_100_diff1 value: -1.5252854375008988 - type: nauc_recall_at_100_max value: 31.00758647449594 - type: nauc_recall_at_100_std value: 37.29755299354834 - type: nauc_recall_at_10_diff1 value: 1.9663657370770793 - type: nauc_recall_at_10_max value: 39.19409229362512 - type: nauc_recall_at_10_std value: 24.956542621999546 - type: nauc_recall_at_1_diff1 value: 23.489813522176636 - type: nauc_recall_at_1_max value: 46.54578639925787 - type: nauc_recall_at_1_std value: 16.39083721709994 - type: nauc_recall_at_20_diff1 value: 0.011112090390923075 - type: nauc_recall_at_20_max value: 36.93570743925189 - type: nauc_recall_at_20_std value: 28.611387115093883 - type: nauc_recall_at_3_diff1 value: 9.596831091013714 - type: nauc_recall_at_3_max value: 45.39055418938087 - type: nauc_recall_at_3_std value: 21.599314388526956 - type: nauc_recall_at_5_diff1 value: 5.17588794990012 - type: nauc_recall_at_5_max value: 42.12946751041448 - type: nauc_recall_at_5_std value: 23.607251548776695 - type: ndcg_at_1 value: 14.686 - type: ndcg_at_10 value: 26.912000000000003 - type: ndcg_at_100 value: 32.919 - type: ndcg_at_1000 value: 36.119 - type: ndcg_at_20 value: 29.079 - type: ndcg_at_3 value: 21.995 - type: ndcg_at_5 value: 24.474999999999998 - type: precision_at_1 value: 14.686 - type: precision_at_10 value: 4.08 - type: precision_at_100 value: 0.703 - type: precision_at_1000 value: 0.097 - type: precision_at_20 value: 2.467 - type: precision_at_3 value: 9.062000000000001 - type: precision_at_5 value: 6.65 - type: recall_at_1 value: 14.686 - type: recall_at_10 value: 40.8 - type: recall_at_100 value: 70.338 - type: recall_at_1000 value: 96.82300000000001 - type: recall_at_20 value: 49.34 - type: recall_at_3 value: 27.186 - type: recall_at_5 value: 33.251 - task: type: Retrieval dataset: name: MTEB MintakaRetrieval (fr) type: jinaai/mintakaqa config: fr split: test revision: efa78cc2f74bbcd21eff2261f9e13aebe40b814e metrics: - type: main_score value: 26.909 - type: map_at_1 value: 14.701 - type: map_at_10 value: 22.613 - type: map_at_100 value: 23.729 - type: map_at_1000 value: 23.837 - type: map_at_20 value: 23.262 - type: map_at_3 value: 20.236 - type: map_at_5 value: 21.673000000000002 - type: mrr_at_1 value: 14.7010647010647 - type: mrr_at_10 value: 22.613165113165113 - type: mrr_at_100 value: 23.72877605989423 - type: mrr_at_1000 value: 23.837150802746805 - type: mrr_at_20 value: 23.261627081110596 - type: mrr_at_3 value: 20.2361452361452 - type: mrr_at_5 value: 21.673491673491625 - type: nauc_map_at_1000_diff1 value: 17.08927788889635 - type: nauc_map_at_1000_max value: 47.240929150603336 - type: nauc_map_at_1000_std value: 20.559244258100275 - type: nauc_map_at_100_diff1 value: 17.029461792796777 - type: nauc_map_at_100_max value: 47.207381115550696 - type: nauc_map_at_100_std value: 20.581498156895265 - type: nauc_map_at_10_diff1 value: 17.351456007804536 - type: nauc_map_at_10_max value: 47.815880040221344 - type: nauc_map_at_10_std value: 20.292999107555794 - type: nauc_map_at_1_diff1 value: 27.297525357600776 - type: nauc_map_at_1_max value: 47.18835074959486 - type: nauc_map_at_1_std value: 18.304203168281834 - type: nauc_map_at_20_diff1 value: 17.157460199542136 - type: nauc_map_at_20_max value: 47.4776610667456 - type: nauc_map_at_20_std value: 20.499186342964478 - type: nauc_map_at_3_diff1 value: 19.393119961356277 - type: nauc_map_at_3_max value: 49.02841822452882 - type: nauc_map_at_3_std value: 19.293122796321292 - type: nauc_map_at_5_diff1 value: 17.76275044752008 - type: nauc_map_at_5_max value: 48.01292548040298 - type: nauc_map_at_5_std value: 19.928449977400504 - type: nauc_mrr_at_1000_diff1 value: 17.08927788889635 - type: nauc_mrr_at_1000_max value: 47.240929150603336 - type: nauc_mrr_at_1000_std value: 20.559244258100275 - type: nauc_mrr_at_100_diff1 value: 17.029461792796777 - type: nauc_mrr_at_100_max value: 47.207381115550696 - type: nauc_mrr_at_100_std value: 20.581498156895265 - type: nauc_mrr_at_10_diff1 value: 17.351456007804536 - type: nauc_mrr_at_10_max value: 47.815880040221344 - type: nauc_mrr_at_10_std value: 20.292999107555794 - type: nauc_mrr_at_1_diff1 value: 27.297525357600776 - type: nauc_mrr_at_1_max value: 47.18835074959486 - type: nauc_mrr_at_1_std value: 18.304203168281834 - type: nauc_mrr_at_20_diff1 value: 17.157460199542136 - type: nauc_mrr_at_20_max value: 47.4776610667456 - type: nauc_mrr_at_20_std value: 20.499186342964478 - type: nauc_mrr_at_3_diff1 value: 19.393119961356277 - type: nauc_mrr_at_3_max value: 49.02841822452882 - type: nauc_mrr_at_3_std value: 19.293122796321292 - type: nauc_mrr_at_5_diff1 value: 17.76275044752008 - type: nauc_mrr_at_5_max value: 48.01292548040298 - type: nauc_mrr_at_5_std value: 19.928449977400504 - type: nauc_ndcg_at_1000_diff1 value: 13.989496006047975 - type: nauc_ndcg_at_1000_max value: 45.626323944336114 - type: nauc_ndcg_at_1000_std value: 22.125600410796515 - type: nauc_ndcg_at_100_diff1 value: 12.302204843705244 - type: nauc_ndcg_at_100_max value: 44.46856314559079 - type: nauc_ndcg_at_100_std value: 23.084984546328677 - type: nauc_ndcg_at_10_diff1 value: 14.001226213368275 - type: nauc_ndcg_at_10_max value: 47.37780636546918 - type: nauc_ndcg_at_10_std value: 21.702709032840637 - type: nauc_ndcg_at_1_diff1 value: 27.297525357600776 - type: nauc_ndcg_at_1_max value: 47.18835074959486 - type: nauc_ndcg_at_1_std value: 18.304203168281834 - type: nauc_ndcg_at_20_diff1 value: 13.317759910171056 - type: nauc_ndcg_at_20_max value: 46.25171251043813 - type: nauc_ndcg_at_20_std value: 22.309331575402595 - type: nauc_ndcg_at_3_diff1 value: 17.555381234893872 - type: nauc_ndcg_at_3_max value: 49.48635590260059 - type: nauc_ndcg_at_3_std value: 19.734570962933674 - type: nauc_ndcg_at_5_diff1 value: 14.844841165765061 - type: nauc_ndcg_at_5_max value: 47.76437065028708 - type: nauc_ndcg_at_5_std value: 20.816034479453954 - type: nauc_precision_at_1000_diff1 value: -15.591898698252546 - type: nauc_precision_at_1000_max value: 20.545984285353892 - type: nauc_precision_at_1000_std value: 38.9013414992826 - type: nauc_precision_at_100_diff1 value: -5.290395978742176 - type: nauc_precision_at_100_max value: 31.340480360546845 - type: nauc_precision_at_100_std value: 33.6897935720505 - type: nauc_precision_at_10_diff1 value: 5.965001997926562 - type: nauc_precision_at_10_max value: 46.12515296162247 - type: nauc_precision_at_10_std value: 25.409433135253558 - type: nauc_precision_at_1_diff1 value: 27.297525357600776 - type: nauc_precision_at_1_max value: 47.18835074959486 - type: nauc_precision_at_1_std value: 18.304203168281834 - type: nauc_precision_at_20_diff1 value: 3.4438127279827744 - type: nauc_precision_at_20_max value: 42.36095587714494 - type: nauc_precision_at_20_std value: 27.367900512797906 - type: nauc_precision_at_3_diff1 value: 13.165017224718916 - type: nauc_precision_at_3_max value: 50.58931825484506 - type: nauc_precision_at_3_std value: 20.852009214609442 - type: nauc_precision_at_5_diff1 value: 7.840087177549876 - type: nauc_precision_at_5_max value: 46.99388755575109 - type: nauc_precision_at_5_std value: 23.048702393099834 - type: nauc_recall_at_1000_diff1 value: -15.591898698252932 - type: nauc_recall_at_1000_max value: 20.5459842853537 - type: nauc_recall_at_1000_std value: 38.901341499282395 - type: nauc_recall_at_100_diff1 value: -5.290395978742165 - type: nauc_recall_at_100_max value: 31.340480360546863 - type: nauc_recall_at_100_std value: 33.68979357205046 - type: nauc_recall_at_10_diff1 value: 5.96500199792656 - type: nauc_recall_at_10_max value: 46.1251529616225 - type: nauc_recall_at_10_std value: 25.409433135253543 - type: nauc_recall_at_1_diff1 value: 27.297525357600776 - type: nauc_recall_at_1_max value: 47.18835074959486 - type: nauc_recall_at_1_std value: 18.304203168281834 - type: nauc_recall_at_20_diff1 value: 3.4438127279827833 - type: nauc_recall_at_20_max value: 42.36095587714498 - type: nauc_recall_at_20_std value: 27.36790051279787 - type: nauc_recall_at_3_diff1 value: 13.165017224718916 - type: nauc_recall_at_3_max value: 50.589318254845054 - type: nauc_recall_at_3_std value: 20.852009214609435 - type: nauc_recall_at_5_diff1 value: 7.840087177549891 - type: nauc_recall_at_5_max value: 46.99388755575112 - type: nauc_recall_at_5_std value: 23.048702393099845 - type: ndcg_at_1 value: 14.701 - type: ndcg_at_10 value: 26.909 - type: ndcg_at_100 value: 32.727000000000004 - type: ndcg_at_1000 value: 36.086 - type: ndcg_at_20 value: 29.236 - type: ndcg_at_3 value: 22.004 - type: ndcg_at_5 value: 24.615000000000002 - type: precision_at_1 value: 14.701 - type: precision_at_10 value: 4.062 - type: precision_at_100 value: 0.688 - type: precision_at_1000 value: 0.096 - type: precision_at_20 value: 2.488 - type: precision_at_3 value: 9.036 - type: precision_at_5 value: 6.699 - type: recall_at_1 value: 14.701 - type: recall_at_10 value: 40.622 - type: recall_at_100 value: 68.796 - type: recall_at_1000 value: 96.314 - type: recall_at_20 value: 49.754 - type: recall_at_3 value: 27.108999999999998 - type: recall_at_5 value: 33.497 - task: type: Classification dataset: name: MTEB MultilingualSentiment (default) type: C-MTEB/MultilingualSentiment-classification config: default split: test revision: 46958b007a63fdbf239b7672c25d0bea67b5ea1a metrics: - type: accuracy value: 73.20999999999998 - type: f1 value: 73.18755986777474 - type: f1_weighted value: 73.18755986777475 - type: main_score value: 73.20999999999998 - task: type: Retrieval dataset: name: MTEB NFCorpus (default) type: mteb/nfcorpus config: default split: test revision: ec0fa4fe99da2ff19ca1214b7966684033a58814 metrics: - type: map_at_1 value: 4.822 - type: map_at_10 value: 13.144 - type: map_at_100 value: 17.254 - type: map_at_1000 value: 18.931 - type: map_at_20 value: 14.834 - type: map_at_3 value: 8.975 - type: map_at_5 value: 10.922 - type: mrr_at_1 value: 47.059 - type: mrr_at_10 value: 55.806999999999995 - type: mrr_at_100 value: 56.286 - type: mrr_at_1000 value: 56.327000000000005 - type: mrr_at_20 value: 56.00000000000001 - type: mrr_at_3 value: 54.17999999999999 - type: mrr_at_5 value: 55.155 - type: ndcg_at_1 value: 44.427 - type: ndcg_at_10 value: 36.623 - type: ndcg_at_100 value: 33.664 - type: ndcg_at_1000 value: 42.538 - type: ndcg_at_20 value: 34.066 - type: ndcg_at_3 value: 41.118 - type: ndcg_at_5 value: 39.455 - type: precision_at_1 value: 46.44 - type: precision_at_10 value: 28.607 - type: precision_at_100 value: 9.189 - type: precision_at_1000 value: 2.261 - type: precision_at_20 value: 21.238 - type: precision_at_3 value: 39.628 - type: precision_at_5 value: 35.604 - type: recall_at_1 value: 4.822 - type: recall_at_10 value: 17.488999999999997 - type: recall_at_100 value: 35.052 - type: recall_at_1000 value: 66.67999999999999 - type: recall_at_20 value: 21.343999999999998 - type: recall_at_3 value: 10.259 - type: recall_at_5 value: 13.406 - type: main_score value: 36.623 - task: type: Retrieval dataset: name: MTEB NQ (default) type: mteb/nq config: default split: test revision: b774495ed302d8c44a3a7ea25c90dbce03968f31 metrics: - type: map_at_1 value: 41.411 - type: map_at_10 value: 57.179 - type: map_at_100 value: 57.945 - type: map_at_1000 value: 57.967999999999996 - type: map_at_20 value: 57.687 - type: map_at_3 value: 53.46300000000001 - type: map_at_5 value: 55.696999999999996 - type: mrr_at_1 value: 46.233999999999995 - type: mrr_at_10 value: 59.831999999999994 - type: mrr_at_100 value: 60.33500000000001 - type: mrr_at_1000 value: 60.348 - type: mrr_at_20 value: 60.167 - type: mrr_at_3 value: 56.972 - type: mrr_at_5 value: 58.74 - type: ndcg_at_1 value: 46.205 - type: ndcg_at_10 value: 64.23100000000001 - type: ndcg_at_100 value: 67.242 - type: ndcg_at_1000 value: 67.72500000000001 - type: ndcg_at_20 value: 65.77300000000001 - type: ndcg_at_3 value: 57.516 - type: ndcg_at_5 value: 61.11600000000001 - type: precision_at_1 value: 46.205 - type: precision_at_10 value: 9.873 - type: precision_at_100 value: 1.158 - type: precision_at_1000 value: 0.12 - type: precision_at_20 value: 5.319 - type: precision_at_3 value: 25.424999999999997 - type: precision_at_5 value: 17.375 - type: recall_at_1 value: 41.411 - type: recall_at_10 value: 82.761 - type: recall_at_100 value: 95.52199999999999 - type: recall_at_1000 value: 99.02499999999999 - type: recall_at_20 value: 88.34 - type: recall_at_3 value: 65.73 - type: recall_at_5 value: 73.894 - type: main_score value: 64.23100000000001 - task: type: PairClassification dataset: name: MTEB Ocnli (default) type: C-MTEB/OCNLI config: default split: validation revision: 66e76a618a34d6d565d5538088562851e6daa7ec metrics: - type: cosine_accuracy value: 62.3714131023281 - type: cosine_accuracy_threshold value: 79.70921993255615 - type: cosine_ap value: 66.41380155495659 - type: cosine_f1 value: 68.89547185780786 - type: cosine_f1_threshold value: 72.91591167449951 - type: cosine_precision value: 57.485875706214685 - type: cosine_recall value: 85.95564941921859 - type: dot_accuracy value: 60.47644829453167 - type: dot_accuracy_threshold value: 36627.362060546875 - type: dot_ap value: 63.696303449293204 - type: dot_f1 value: 68.3986041101202 - type: dot_f1_threshold value: 30452.72216796875 - type: dot_precision value: 54.04411764705882 - type: dot_recall value: 93.13621964097149 - type: euclidean_accuracy value: 63.02111532214402 - type: euclidean_accuracy_threshold value: 1392.76762008667 - type: euclidean_ap value: 66.65907089443218 - type: euclidean_f1 value: 69.05036524413688 - type: euclidean_f1_threshold value: 1711.5310668945312 - type: euclidean_precision value: 54.29262394195889 - type: euclidean_recall value: 94.82576557550159 - type: main_score value: 63.02111532214402 - type: manhattan_accuracy value: 62.75040606388739 - type: manhattan_accuracy_threshold value: 32475.347900390625 - type: manhattan_ap value: 66.50943585125434 - type: manhattan_f1 value: 69.08382066276802 - type: manhattan_f1_threshold value: 41238.470458984375 - type: manhattan_precision value: 54.75896168108776 - type: manhattan_recall value: 93.55860612460401 - type: max_accuracy value: 63.02111532214402 - type: max_ap value: 66.65907089443218 - type: max_f1 value: 69.08382066276802 - type: max_precision value: 57.485875706214685 - type: max_recall value: 94.82576557550159 - type: similarity_accuracy value: 62.3714131023281 - type: similarity_accuracy_threshold value: 79.70921993255615 - type: similarity_ap value: 66.41380155495659 - type: similarity_f1 value: 68.89547185780786 - type: similarity_f1_threshold value: 72.91591167449951 - type: similarity_precision value: 57.485875706214685 - type: similarity_recall value: 85.95564941921859 - task: type: Classification dataset: name: MTEB OnlineShopping (default) type: C-MTEB/OnlineShopping-classification config: default split: test revision: e610f2ebd179a8fda30ae534c3878750a96db120 metrics: - type: accuracy value: 91.88000000000001 - type: ap value: 89.52463684448476 - type: ap_weighted value: 89.52463684448476 - type: f1 value: 91.86313022306673 - type: f1_weighted value: 91.87806318146912 - type: main_score value: 91.88000000000001 - task: type: PairClassification dataset: name: MTEB OpusparcusPC (en) type: GEM/opusparcus config: en split: test.full revision: 9e9b1f8ef51616073f47f306f7f47dd91663f86a metrics: - type: cosine_accuracy value: 92.65578635014838 - type: cosine_accuracy_threshold value: 74.02530312538147 - type: cosine_ap value: 98.3834226153613 - type: cosine_f1 value: 94.92567913890312 - type: cosine_f1_threshold value: 74.02530312538147 - type: cosine_precision value: 95.562435500516 - type: cosine_recall value: 94.29735234215886 - type: dot_accuracy value: 91.54302670623146 - type: dot_accuracy_threshold value: 34452.29187011719 - type: dot_ap value: 98.1237257754439 - type: dot_f1 value: 94.22400803616273 - type: dot_f1_threshold value: 33670.41931152344 - type: dot_precision value: 92.9633300297324 - type: dot_recall value: 95.5193482688391 - type: euclidean_accuracy value: 92.28486646884274 - type: euclidean_accuracy_threshold value: 1602.8022766113281 - type: euclidean_ap value: 98.3099021504706 - type: euclidean_f1 value: 94.75277497477296 - type: euclidean_f1_threshold value: 1604.7462463378906 - type: euclidean_precision value: 93.89999999999999 - type: euclidean_recall value: 95.62118126272912 - type: main_score value: 98.3834226153613 - type: manhattan_accuracy value: 92.2106824925816 - type: manhattan_accuracy_threshold value: 38872.90954589844 - type: manhattan_ap value: 98.28694101230218 - type: manhattan_f1 value: 94.67815509376584 - type: manhattan_f1_threshold value: 38872.90954589844 - type: manhattan_precision value: 94.24823410696267 - type: manhattan_recall value: 95.11201629327903 - type: max_accuracy value: 92.65578635014838 - type: max_ap value: 98.3834226153613 - type: max_f1 value: 94.92567913890312 - type: max_precision value: 95.562435500516 - type: max_recall value: 95.62118126272912 - type: similarity_accuracy value: 92.65578635014838 - type: similarity_accuracy_threshold value: 74.02530312538147 - type: similarity_ap value: 98.3834226153613 - type: similarity_f1 value: 94.92567913890312 - type: similarity_f1_threshold value: 74.02530312538147 - type: similarity_precision value: 95.562435500516 - type: similarity_recall value: 94.29735234215886 - task: type: PairClassification dataset: name: MTEB OpusparcusPC (de) type: GEM/opusparcus config: de split: test.full revision: 9e9b1f8ef51616073f47f306f7f47dd91663f86a metrics: - type: cosine_accuracy value: 87.72178850248403 - type: cosine_accuracy_threshold value: 73.33863377571106 - type: cosine_ap value: 96.98901408834976 - type: cosine_f1 value: 91.89944134078212 - type: cosine_f1_threshold value: 71.45810127258301 - type: cosine_precision value: 89.64577656675749 - type: cosine_recall value: 94.26934097421203 - type: dot_accuracy value: 86.30234208658624 - type: dot_accuracy_threshold value: 32027.130126953125 - type: dot_ap value: 96.12260574893256 - type: dot_f1 value: 91.31602506714414 - type: dot_f1_threshold value: 30804.376220703125 - type: dot_precision value: 85.93091828138164 - type: dot_recall value: 97.42120343839542 - type: euclidean_accuracy value: 87.9347054648687 - type: euclidean_accuracy_threshold value: 1609.6670150756836 - type: euclidean_ap value: 97.00238860358252 - type: euclidean_f1 value: 92.1089063221043 - type: euclidean_f1_threshold value: 1641.8487548828125 - type: euclidean_precision value: 89.10714285714286 - type: euclidean_recall value: 95.31996179560649 - type: main_score value: 97.00238860358252 - type: manhattan_accuracy value: 87.72178850248403 - type: manhattan_accuracy_threshold value: 40137.060546875 - type: manhattan_ap value: 96.98653728159941 - type: manhattan_f1 value: 92.03865623561896 - type: manhattan_f1_threshold value: 40137.060546875 - type: manhattan_precision value: 88.80994671403198 - type: manhattan_recall value: 95.51098376313276 - type: max_accuracy value: 87.9347054648687 - type: max_ap value: 97.00238860358252 - type: max_f1 value: 92.1089063221043 - type: max_precision value: 89.64577656675749 - type: max_recall value: 97.42120343839542 - type: similarity_accuracy value: 87.72178850248403 - type: similarity_accuracy_threshold value: 73.33863377571106 - type: similarity_ap value: 96.98901408834976 - type: similarity_f1 value: 91.89944134078212 - type: similarity_f1_threshold value: 71.45810127258301 - type: similarity_precision value: 89.64577656675749 - type: similarity_recall value: 94.26934097421203 - task: type: PairClassification dataset: name: MTEB OpusparcusPC (fr) type: GEM/opusparcus config: fr split: test.full revision: 9e9b1f8ef51616073f47f306f7f47dd91663f86a metrics: - type: cosine_accuracy value: 80.92643051771117 - type: cosine_accuracy_threshold value: 76.68856382369995 - type: cosine_ap value: 93.74622381534307 - type: cosine_f1 value: 87.12328767123287 - type: cosine_f1_threshold value: 71.64022922515869 - type: cosine_precision value: 80.64243448858834 - type: cosine_recall value: 94.73684210526315 - type: dot_accuracy value: 80.858310626703 - type: dot_accuracy_threshold value: 34028.3935546875 - type: dot_ap value: 91.18448457633308 - type: dot_f1 value: 86.82606657290202 - type: dot_f1_threshold value: 34028.3935546875 - type: dot_precision value: 82.2380106571936 - type: dot_recall value: 91.9563058589871 - type: euclidean_accuracy value: 80.858310626703 - type: euclidean_accuracy_threshold value: 1595.7651138305664 - type: euclidean_ap value: 93.8182717829648 - type: euclidean_f1 value: 87.04044117647058 - type: euclidean_f1_threshold value: 1609.2475891113281 - type: euclidean_precision value: 81.00940975192472 - type: euclidean_recall value: 94.04170804369414 - type: main_score value: 93.8182717829648 - type: manhattan_accuracy value: 80.99455040871935 - type: manhattan_accuracy_threshold value: 38092.132568359375 - type: manhattan_ap value: 93.77563401151711 - type: manhattan_f1 value: 86.91983122362869 - type: manhattan_f1_threshold value: 38092.132568359375 - type: manhattan_precision value: 82.32682060390763 - type: manhattan_recall value: 92.05561072492551 - type: max_accuracy value: 80.99455040871935 - type: max_ap value: 93.8182717829648 - type: max_f1 value: 87.12328767123287 - type: max_precision value: 82.32682060390763 - type: max_recall value: 94.73684210526315 - type: similarity_accuracy value: 80.92643051771117 - type: similarity_accuracy_threshold value: 76.68856382369995 - type: similarity_ap value: 93.74622381534307 - type: similarity_f1 value: 87.12328767123287 - type: similarity_f1_threshold value: 71.64022922515869 - type: similarity_precision value: 80.64243448858834 - type: similarity_recall value: 94.73684210526315 - task: type: PairClassification dataset: name: MTEB OpusparcusPC (ru) type: GEM/opusparcus config: ru split: test.full revision: 9e9b1f8ef51616073f47f306f7f47dd91663f86a metrics: - type: cosine_accuracy value: 76.83823529411765 - type: cosine_accuracy_threshold value: 72.70769476890564 - type: cosine_ap value: 89.56692049908222 - type: cosine_f1 value: 83.99832003359934 - type: cosine_f1_threshold value: 70.9052324295044 - type: cosine_precision value: 76.16146230007617 - type: cosine_recall value: 93.63295880149812 - type: dot_accuracy value: 76.28676470588235 - type: dot_accuracy_threshold value: 33740.68908691406 - type: dot_ap value: 87.77185177141567 - type: dot_f1 value: 83.62251375370292 - type: dot_f1_threshold value: 32726.611328125 - type: dot_precision value: 76.29343629343629 - type: dot_recall value: 92.50936329588015 - type: euclidean_accuracy value: 77.32843137254902 - type: euclidean_accuracy_threshold value: 1566.510009765625 - type: euclidean_ap value: 89.60605626791111 - type: euclidean_f1 value: 84.06546080964686 - type: euclidean_f1_threshold value: 1576.4202117919922 - type: euclidean_precision value: 77.83094098883574 - type: euclidean_recall value: 91.38576779026218 - type: main_score value: 89.60605626791111 - type: manhattan_accuracy value: 76.89950980392157 - type: manhattan_accuracy_threshold value: 38202.215576171875 - type: manhattan_ap value: 89.55766894104868 - type: manhattan_f1 value: 83.80462724935732 - type: manhattan_f1_threshold value: 38934.375 - type: manhattan_precision value: 77.25118483412322 - type: manhattan_recall value: 91.57303370786516 - type: max_accuracy value: 77.32843137254902 - type: max_ap value: 89.60605626791111 - type: max_f1 value: 84.06546080964686 - type: max_precision value: 77.83094098883574 - type: max_recall value: 93.63295880149812 - type: similarity_accuracy value: 76.83823529411765 - type: similarity_accuracy_threshold value: 72.70769476890564 - type: similarity_ap value: 89.56692049908222 - type: similarity_f1 value: 83.99832003359934 - type: similarity_f1_threshold value: 70.9052324295044 - type: similarity_precision value: 76.16146230007617 - type: similarity_recall value: 93.63295880149812 - task: type: Classification dataset: name: MTEB PAC (default) type: laugustyniak/abusive-clauses-pl config: default split: test revision: fc69d1c153a8ccdcf1eef52f4e2a27f88782f543 metrics: - type: accuracy value: 68.39559803069794 - type: ap value: 77.68074206719457 - type: ap_weighted value: 77.68074206719457 - type: f1 value: 66.23485605467732 - type: f1_weighted value: 69.03201442129347 - type: main_score value: 68.39559803069794 - task: type: STS dataset: name: MTEB PAWSX (default) type: C-MTEB/PAWSX config: default split: test revision: 9c6a90e430ac22b5779fb019a23e820b11a8b5e1 metrics: - type: cosine_pearson value: 13.161523266433587 - type: cosine_spearman value: 15.557333873773386 - type: euclidean_pearson value: 17.147508431907525 - type: euclidean_spearman value: 15.664112857732146 - type: main_score value: 15.557333873773386 - type: manhattan_pearson value: 17.130875906264386 - type: manhattan_spearman value: 15.624397342229637 - type: pearson value: 13.161523266433587 - type: spearman value: 15.557333873773386 - task: type: PairClassification dataset: name: MTEB PSC (default) type: PL-MTEB/psc-pairclassification config: default split: test revision: d05a294af9e1d3ff2bfb6b714e08a24a6cabc669 metrics: - type: cosine_accuracy value: 97.86641929499072 - type: cosine_accuracy_threshold value: 79.0391206741333 - type: cosine_ap value: 99.19403807771533 - type: cosine_f1 value: 96.45608628659475 - type: cosine_f1_threshold value: 79.0391206741333 - type: cosine_precision value: 97.50778816199377 - type: cosine_recall value: 95.42682926829268 - type: dot_accuracy value: 98.14471243042672 - type: dot_accuracy_threshold value: 29808.1787109375 - type: dot_ap value: 99.331999859971 - type: dot_f1 value: 97.01492537313433 - type: dot_f1_threshold value: 29808.1787109375 - type: dot_precision value: 95.02923976608187 - type: dot_recall value: 99.08536585365853 - type: euclidean_accuracy value: 97.49536178107606 - type: euclidean_accuracy_threshold value: 1276.227855682373 - type: euclidean_ap value: 98.91056467717377 - type: euclidean_f1 value: 95.83975346687212 - type: euclidean_f1_threshold value: 1276.227855682373 - type: euclidean_precision value: 96.88473520249221 - type: euclidean_recall value: 94.8170731707317 - type: main_score value: 99.331999859971 - type: manhattan_accuracy value: 97.49536178107606 - type: manhattan_accuracy_threshold value: 31097.674560546875 - type: manhattan_ap value: 98.95694691792707 - type: manhattan_f1 value: 95.83975346687212 - type: manhattan_f1_threshold value: 31097.674560546875 - type: manhattan_precision value: 96.88473520249221 - type: manhattan_recall value: 94.8170731707317 - type: max_accuracy value: 98.14471243042672 - type: max_ap value: 99.331999859971 - type: max_f1 value: 97.01492537313433 - type: max_precision value: 97.50778816199377 - type: max_recall value: 99.08536585365853 - type: similarity_accuracy value: 97.86641929499072 - type: similarity_accuracy_threshold value: 79.0391206741333 - type: similarity_ap value: 99.19403807771533 - type: similarity_f1 value: 96.45608628659475 - type: similarity_f1_threshold value: 79.0391206741333 - type: similarity_precision value: 97.50778816199377 - type: similarity_recall value: 95.42682926829268 - task: type: PairClassification dataset: name: MTEB PawsXPairClassification (en) type: google-research-datasets/paws-x config: en split: test revision: 8a04d940a42cd40658986fdd8e3da561533a3646 metrics: - type: cosine_accuracy value: 61.8 - type: cosine_accuracy_threshold value: 99.5664119720459 - type: cosine_ap value: 60.679317786040585 - type: cosine_f1 value: 63.17354143441101 - type: cosine_f1_threshold value: 97.22164869308472 - type: cosine_precision value: 47.6457399103139 - type: cosine_recall value: 93.71554575523705 - type: dot_accuracy value: 55.7 - type: dot_accuracy_threshold value: 48353.62548828125 - type: dot_ap value: 48.53805970536875 - type: dot_f1 value: 62.42214532871972 - type: dot_f1_threshold value: 38215.53955078125 - type: dot_precision value: 45.48663640948058 - type: dot_recall value: 99.44873208379272 - type: euclidean_accuracy value: 61.75000000000001 - type: euclidean_accuracy_threshold value: 189.0761137008667 - type: euclidean_ap value: 60.55517418691518 - type: euclidean_f1 value: 63.07977736549165 - type: euclidean_f1_threshold value: 504.3168067932129 - type: euclidean_precision value: 47.53914988814318 - type: euclidean_recall value: 93.71554575523705 - type: main_score value: 60.679317786040585 - type: manhattan_accuracy value: 61.9 - type: manhattan_accuracy_threshold value: 4695.778274536133 - type: manhattan_ap value: 60.48686620413608 - type: manhattan_f1 value: 62.92880855772778 - type: manhattan_f1_threshold value: 12542.36831665039 - type: manhattan_precision value: 47.28381374722838 - type: manhattan_recall value: 94.04630650496141 - type: max_accuracy value: 61.9 - type: max_ap value: 60.679317786040585 - type: max_f1 value: 63.17354143441101 - type: max_precision value: 47.6457399103139 - type: max_recall value: 99.44873208379272 - type: similarity_accuracy value: 61.8 - type: similarity_accuracy_threshold value: 99.5664119720459 - type: similarity_ap value: 60.679317786040585 - type: similarity_f1 value: 63.17354143441101 - type: similarity_f1_threshold value: 97.22164869308472 - type: similarity_precision value: 47.6457399103139 - type: similarity_recall value: 93.71554575523705 - task: type: PairClassification dataset: name: MTEB PawsXPairClassification (de) type: google-research-datasets/paws-x config: de split: test revision: 8a04d940a42cd40658986fdd8e3da561533a3646 metrics: - type: cosine_accuracy value: 60.25 - type: cosine_accuracy_threshold value: 99.54338073730469 - type: cosine_ap value: 56.7863613689054 - type: cosine_f1 value: 62.23499820337766 - type: cosine_f1_threshold value: 89.95014429092407 - type: cosine_precision value: 45.86864406779661 - type: cosine_recall value: 96.75977653631284 - type: dot_accuracy value: 56.8 - type: dot_accuracy_threshold value: 47349.78332519531 - type: dot_ap value: 49.7857806061729 - type: dot_f1 value: 62.31225986727209 - type: dot_f1_threshold value: 30143.206787109375 - type: dot_precision value: 45.32520325203252 - type: dot_recall value: 99.66480446927373 - type: euclidean_accuracy value: 60.3 - type: euclidean_accuracy_threshold value: 219.78106498718262 - type: euclidean_ap value: 56.731544327179606 - type: euclidean_f1 value: 62.19895287958115 - type: euclidean_f1_threshold value: 1792.1623229980469 - type: euclidean_precision value: 45.22842639593909 - type: euclidean_recall value: 99.55307262569832 - type: main_score value: 56.7863613689054 - type: manhattan_accuracy value: 60.150000000000006 - type: manhattan_accuracy_threshold value: 5104.503631591797 - type: manhattan_ap value: 56.70304479768734 - type: manhattan_f1 value: 62.22067039106145 - type: manhattan_f1_threshold value: 42839.471435546875 - type: manhattan_precision value: 45.2513966480447 - type: manhattan_recall value: 99.55307262569832 - type: max_accuracy value: 60.3 - type: max_ap value: 56.7863613689054 - type: max_f1 value: 62.31225986727209 - type: max_precision value: 45.86864406779661 - type: max_recall value: 99.66480446927373 - type: similarity_accuracy value: 60.25 - type: similarity_accuracy_threshold value: 99.54338073730469 - type: similarity_ap value: 56.7863613689054 - type: similarity_f1 value: 62.23499820337766 - type: similarity_f1_threshold value: 89.95014429092407 - type: similarity_precision value: 45.86864406779661 - type: similarity_recall value: 96.75977653631284 - task: type: PairClassification dataset: name: MTEB PawsXPairClassification (es) type: google-research-datasets/paws-x config: es split: test revision: 8a04d940a42cd40658986fdd8e3da561533a3646 metrics: - type: cosine_accuracy value: 59.699999999999996 - type: cosine_accuracy_threshold value: 99.55930709838867 - type: cosine_ap value: 57.31662248806265 - type: cosine_f1 value: 62.444061962134256 - type: cosine_f1_threshold value: 74.75898265838623 - type: cosine_precision value: 45.3953953953954 - type: cosine_recall value: 100.0 - type: dot_accuracy value: 55.900000000000006 - type: dot_accuracy_threshold value: 47512.90283203125 - type: dot_ap value: 49.39339147787568 - type: dot_f1 value: 62.487082328625554 - type: dot_f1_threshold value: 34989.03503417969 - type: dot_precision value: 45.44088176352705 - type: dot_recall value: 100.0 - type: euclidean_accuracy value: 59.599999999999994 - type: euclidean_accuracy_threshold value: 200.82547664642334 - type: euclidean_ap value: 57.19737488445163 - type: euclidean_f1 value: 62.444061962134256 - type: euclidean_f1_threshold value: 1538.8837814331055 - type: euclidean_precision value: 45.3953953953954 - type: euclidean_recall value: 100.0 - type: main_score value: 57.31662248806265 - type: manhattan_accuracy value: 59.550000000000004 - type: manhattan_accuracy_threshold value: 5016.501617431641 - type: manhattan_ap value: 57.089959907945065 - type: manhattan_f1 value: 62.444061962134256 - type: manhattan_f1_threshold value: 37523.53515625 - type: manhattan_precision value: 45.3953953953954 - type: manhattan_recall value: 100.0 - type: max_accuracy value: 59.699999999999996 - type: max_ap value: 57.31662248806265 - type: max_f1 value: 62.487082328625554 - type: max_precision value: 45.44088176352705 - type: max_recall value: 100.0 - type: similarity_accuracy value: 59.699999999999996 - type: similarity_accuracy_threshold value: 99.55930709838867 - type: similarity_ap value: 57.31662248806265 - type: similarity_f1 value: 62.444061962134256 - type: similarity_f1_threshold value: 74.75898265838623 - type: similarity_precision value: 45.3953953953954 - type: similarity_recall value: 100.0 - task: type: PairClassification dataset: name: MTEB PawsXPairClassification (fr) type: google-research-datasets/paws-x config: fr split: test revision: 8a04d940a42cd40658986fdd8e3da561533a3646 metrics: - type: cosine_accuracy value: 61.150000000000006 - type: cosine_accuracy_threshold value: 99.36153888702393 - type: cosine_ap value: 59.43845317938599 - type: cosine_f1 value: 62.51298026998961 - type: cosine_f1_threshold value: 76.77866220474243 - type: cosine_precision value: 45.468277945619334 - type: cosine_recall value: 100.0 - type: dot_accuracy value: 55.75 - type: dot_accuracy_threshold value: 48931.55212402344 - type: dot_ap value: 50.15949290538757 - type: dot_f1 value: 62.53462603878117 - type: dot_f1_threshold value: 34415.7958984375 - type: dot_precision value: 45.4911838790932 - type: dot_recall value: 100.0 - type: euclidean_accuracy value: 61.050000000000004 - type: euclidean_accuracy_threshold value: 240.8097267150879 - type: euclidean_ap value: 59.367971294226216 - type: euclidean_f1 value: 62.51298026998961 - type: euclidean_f1_threshold value: 1444.132423400879 - type: euclidean_precision value: 45.468277945619334 - type: euclidean_recall value: 100.0 - type: main_score value: 59.43845317938599 - type: manhattan_accuracy value: 60.95 - type: manhattan_accuracy_threshold value: 5701.206207275391 - type: manhattan_ap value: 59.30094096378774 - type: manhattan_f1 value: 62.53462603878117 - type: manhattan_f1_threshold value: 33445.672607421875 - type: manhattan_precision value: 45.4911838790932 - type: manhattan_recall value: 100.0 - type: max_accuracy value: 61.150000000000006 - type: max_ap value: 59.43845317938599 - type: max_f1 value: 62.53462603878117 - type: max_precision value: 45.4911838790932 - type: max_recall value: 100.0 - type: similarity_accuracy value: 61.150000000000006 - type: similarity_accuracy_threshold value: 99.36153888702393 - type: similarity_ap value: 59.43845317938599 - type: similarity_f1 value: 62.51298026998961 - type: similarity_f1_threshold value: 76.77866220474243 - type: similarity_precision value: 45.468277945619334 - type: similarity_recall value: 100.0 - task: type: PairClassification dataset: name: MTEB PawsXPairClassification (zh) type: google-research-datasets/paws-x config: zh split: test revision: 8a04d940a42cd40658986fdd8e3da561533a3646 metrics: - type: cosine_accuracy value: 58.85 - type: cosine_accuracy_threshold value: 99.73838329315186 - type: cosine_ap value: 54.66913160570546 - type: cosine_f1 value: 62.32136632973162 - type: cosine_f1_threshold value: 76.4499306678772 - type: cosine_precision value: 45.265822784810126 - type: cosine_recall value: 100.0 - type: dot_accuracy value: 56.25 - type: dot_accuracy_threshold value: 47351.9287109375 - type: dot_ap value: 48.5266232989438 - type: dot_f1 value: 62.277951933124356 - type: dot_f1_threshold value: 31325.28076171875 - type: dot_precision value: 45.220030349013655 - type: dot_recall value: 100.0 - type: euclidean_accuracy value: 58.9 - type: euclidean_accuracy_threshold value: 144.24468278884888 - type: euclidean_ap value: 54.66981490353506 - type: euclidean_f1 value: 62.32136632973162 - type: euclidean_f1_threshold value: 1484.908676147461 - type: euclidean_precision value: 45.265822784810126 - type: euclidean_recall value: 100.0 - type: main_score value: 54.66981490353506 - type: manhattan_accuracy value: 58.9 - type: manhattan_accuracy_threshold value: 3586.785125732422 - type: manhattan_ap value: 54.668355260247736 - type: manhattan_f1 value: 62.32136632973162 - type: manhattan_f1_threshold value: 36031.22863769531 - type: manhattan_precision value: 45.265822784810126 - type: manhattan_recall value: 100.0 - type: max_accuracy value: 58.9 - type: max_ap value: 54.66981490353506 - type: max_f1 value: 62.32136632973162 - type: max_precision value: 45.265822784810126 - type: max_recall value: 100.0 - type: similarity_accuracy value: 58.85 - type: similarity_accuracy_threshold value: 99.73838329315186 - type: similarity_ap value: 54.66913160570546 - type: similarity_f1 value: 62.32136632973162 - type: similarity_f1_threshold value: 76.4499306678772 - type: similarity_precision value: 45.265822784810126 - type: similarity_recall value: 100.0 - task: type: Classification dataset: name: MTEB PolEmo2.0-IN (default) type: PL-MTEB/polemo2_in config: default split: test revision: d90724373c70959f17d2331ad51fb60c71176b03 metrics: - type: accuracy value: 83.75346260387812 - type: f1 value: 81.98304891214909 - type: f1_weighted value: 84.29623200830078 - type: main_score value: 83.75346260387812 - task: type: Classification dataset: name: MTEB PolEmo2.0-OUT (default) type: PL-MTEB/polemo2_out config: default split: test revision: 6a21ab8716e255ab1867265f8b396105e8aa63d4 metrics: - type: accuracy value: 66.53846153846153 - type: f1 value: 52.71826064368638 - type: f1_weighted value: 69.10010124630334 - type: main_score value: 66.53846153846153 - task: type: PairClassification dataset: name: MTEB PPC type: PL-MTEB/ppc-pairclassification config: default split: test revision: None metrics: - type: cosine_accuracy value: 81.8 - type: cosine_accuracy_threshold value: 90.47793745994568 - type: cosine_ap value: 91.42490266080884 - type: cosine_f1 value: 85.4632587859425 - type: cosine_f1_threshold value: 90.47793745994568 - type: cosine_precision value: 82.56172839506173 - type: cosine_recall value: 88.57615894039735 - type: dot_accuracy value: 74.6 - type: dot_accuracy_threshold value: 42102.23693847656 - type: dot_ap value: 86.20060009096979 - type: dot_f1 value: 80.02842928216063 - type: dot_f1_threshold value: 38970.16906738281 - type: dot_precision value: 70.1120797011208 - type: dot_recall value: 93.21192052980133 - type: euclidean_accuracy value: 81.5 - type: euclidean_accuracy_threshold value: 880.433464050293 - type: euclidean_ap value: 91.33143477982087 - type: euclidean_f1 value: 85.44600938967135 - type: euclidean_f1_threshold value: 964.0384674072266 - type: euclidean_precision value: 81.00890207715133 - type: euclidean_recall value: 90.39735099337747 - type: main_score value: 91.42490266080884 - type: manhattan_accuracy value: 81.3 - type: manhattan_accuracy_threshold value: 22100.830078125 - type: manhattan_ap value: 91.25996158651282 - type: manhattan_f1 value: 85.38102643856921 - type: manhattan_f1_threshold value: 24043.515014648438 - type: manhattan_precision value: 80.49853372434018 - type: manhattan_recall value: 90.89403973509934 - type: max_accuracy value: 81.8 - type: max_ap value: 91.42490266080884 - type: max_f1 value: 85.4632587859425 - type: max_precision value: 82.56172839506173 - type: max_recall value: 93.21192052980133 - type: similarity_accuracy value: 81.8 - type: similarity_accuracy_threshold value: 90.47793745994568 - type: similarity_ap value: 91.42490266080884 - type: similarity_f1 value: 85.4632587859425 - type: similarity_f1_threshold value: 90.47793745994568 - type: similarity_precision value: 82.56172839506173 - type: similarity_recall value: 88.57615894039735 - task: type: Retrieval dataset: name: MTEB QuoraRetrieval (default) type: mteb/quora config: default split: test revision: e4e08e0b7dbe3c8700f0daef558ff32256715259 metrics: - type: map_at_1 value: 71.419 - type: map_at_10 value: 85.542 - type: map_at_100 value: 86.161 - type: map_at_1000 value: 86.175 - type: map_at_20 value: 85.949 - type: map_at_3 value: 82.623 - type: map_at_5 value: 84.5 - type: mrr_at_1 value: 82.27 - type: mrr_at_10 value: 88.21900000000001 - type: mrr_at_100 value: 88.313 - type: mrr_at_1000 value: 88.31400000000001 - type: mrr_at_20 value: 88.286 - type: mrr_at_3 value: 87.325 - type: mrr_at_5 value: 87.97500000000001 - type: ndcg_at_1 value: 82.3 - type: ndcg_at_10 value: 89.088 - type: ndcg_at_100 value: 90.217 - type: ndcg_at_1000 value: 90.29700000000001 - type: ndcg_at_20 value: 89.697 - type: ndcg_at_3 value: 86.435 - type: ndcg_at_5 value: 87.966 - type: precision_at_1 value: 82.3 - type: precision_at_10 value: 13.527000000000001 - type: precision_at_100 value: 1.537 - type: precision_at_1000 value: 0.157 - type: precision_at_20 value: 7.165000000000001 - type: precision_at_3 value: 37.92 - type: precision_at_5 value: 24.914 - type: recall_at_1 value: 71.419 - type: recall_at_10 value: 95.831 - type: recall_at_100 value: 99.64 - type: recall_at_1000 value: 99.988 - type: recall_at_20 value: 97.76599999999999 - type: recall_at_3 value: 88.081 - type: recall_at_5 value: 92.50500000000001 - type: main_score value: 89.088 - task: type: STS dataset: name: MTEB RUParaPhraserSTS (default) type: merionum/ru_paraphraser config: default split: test revision: 43265056790b8f7c59e0139acb4be0a8dad2c8f4 metrics: - type: cosine_pearson value: 67.91177744712421 - type: cosine_spearman value: 76.77113726753656 - type: euclidean_pearson value: 73.81454206068638 - type: euclidean_spearman value: 76.92529493599028 - type: main_score value: 76.77113726753656 - type: manhattan_pearson value: 73.81690454439168 - type: manhattan_spearman value: 76.87333776705002 - type: pearson value: 67.91177744712421 - type: spearman value: 76.77113726753656 - task: type: Clustering dataset: name: MTEB RedditClustering (default) type: mteb/reddit-clustering config: default split: test revision: 24640382cdbf8abc73003fb0fa6d111a705499eb metrics: - type: main_score value: 55.39924225216962 - type: v_measure value: 55.39924225216962 - type: v_measure_std value: 4.723802279292467 - task: type: Clustering dataset: name: MTEB RedditClusteringP2P (default) type: mteb/reddit-clustering-p2p config: default split: test revision: 385e3cb46b4cfa89021f56c4380204149d0efe33 metrics: - type: main_score value: 62.87465161304012 - type: v_measure value: 62.87465161304012 - type: v_measure_std value: 12.082670914488473 - task: type: Retrieval dataset: name: MTEB RiaNewsRetrieval (default) type: ai-forever/ria-news-retrieval config: default split: test revision: 82374b0bbacda6114f39ff9c5b925fa1512ca5d7 metrics: - type: main_score value: 79.209 - type: map_at_1 value: 67.33 - type: map_at_10 value: 75.633 - type: map_at_100 value: 75.897 - type: map_at_1000 value: 75.907 - type: map_at_20 value: 75.804 - type: map_at_3 value: 74.2 - type: map_at_5 value: 75.13300000000001 - type: mrr_at_1 value: 67.31 - type: mrr_at_10 value: 75.62709126984095 - type: mrr_at_100 value: 75.89105697041113 - type: mrr_at_1000 value: 75.90115653883124 - type: mrr_at_20 value: 75.79802332308172 - type: mrr_at_3 value: 74.19499999999961 - type: mrr_at_5 value: 75.12849999999939 - type: nauc_map_at_1000_diff1 value: 74.30304869630591 - type: nauc_map_at_1000_max value: 36.477146725784046 - type: nauc_map_at_1000_std value: -20.862772498461723 - type: nauc_map_at_100_diff1 value: 74.29833058090355 - type: nauc_map_at_100_max value: 36.483678619667884 - type: nauc_map_at_100_std value: -20.856274849980135 - type: nauc_map_at_10_diff1 value: 74.20729220697967 - type: nauc_map_at_10_max value: 36.56543146170092 - type: nauc_map_at_10_std value: -20.991081015484728 - type: nauc_map_at_1_diff1 value: 77.38899022125185 - type: nauc_map_at_1_max value: 32.45918619669731 - type: nauc_map_at_1_std value: -22.149586336167324 - type: nauc_map_at_20_diff1 value: 74.2447573558587 - type: nauc_map_at_20_max value: 36.50383130240387 - type: nauc_map_at_20_std value: -20.87013743041831 - type: nauc_map_at_3_diff1 value: 74.3054577294586 - type: nauc_map_at_3_max value: 36.484530586652724 - type: nauc_map_at_3_std value: -21.90543024607988 - type: nauc_map_at_5_diff1 value: 74.21062368961503 - type: nauc_map_at_5_max value: 36.55670532498779 - type: nauc_map_at_5_std value: -21.488786900676942 - type: nauc_mrr_at_1000_diff1 value: 74.31619177956684 - type: nauc_mrr_at_1000_max value: 36.53498918453189 - type: nauc_mrr_at_1000_std value: -20.75986704931237 - type: nauc_mrr_at_100_diff1 value: 74.31146790382356 - type: nauc_mrr_at_100_max value: 36.54149252857106 - type: nauc_mrr_at_100_std value: -20.75341959250079 - type: nauc_mrr_at_10_diff1 value: 74.22027806145095 - type: nauc_mrr_at_10_max value: 36.622542969971725 - type: nauc_mrr_at_10_std value: -20.889417384064117 - type: nauc_mrr_at_1_diff1 value: 77.4306709551449 - type: nauc_mrr_at_1_max value: 32.57259463438259 - type: nauc_mrr_at_1_std value: -21.964402859613937 - type: nauc_mrr_at_20_diff1 value: 74.25784396230718 - type: nauc_mrr_at_20_max value: 36.561412224507336 - type: nauc_mrr_at_20_std value: -20.767665000065723 - type: nauc_mrr_at_3_diff1 value: 74.31423253547214 - type: nauc_mrr_at_3_max value: 36.537745749488906 - type: nauc_mrr_at_3_std value: -21.81259529019546 - type: nauc_mrr_at_5_diff1 value: 74.22404613312771 - type: nauc_mrr_at_5_max value: 36.60743768455219 - type: nauc_mrr_at_5_std value: -21.39479216331971 - type: nauc_ndcg_at_1000_diff1 value: 73.48182819705742 - type: nauc_ndcg_at_1000_max value: 37.86991608461793 - type: nauc_ndcg_at_1000_std value: -19.021499322688904 - type: nauc_ndcg_at_100_diff1 value: 73.34941250585759 - type: nauc_ndcg_at_100_max value: 38.11150275625829 - type: nauc_ndcg_at_100_std value: -18.70624087206104 - type: nauc_ndcg_at_10_diff1 value: 72.82520265115987 - type: nauc_ndcg_at_10_max value: 38.43323357650525 - type: nauc_ndcg_at_10_std value: -19.410953792830878 - type: nauc_ndcg_at_1_diff1 value: 77.38899022125185 - type: nauc_ndcg_at_1_max value: 32.45918619669731 - type: nauc_ndcg_at_1_std value: -22.149586336167324 - type: nauc_ndcg_at_20_diff1 value: 72.93309285256507 - type: nauc_ndcg_at_20_max value: 38.217372819067755 - type: nauc_ndcg_at_20_std value: -18.864113576359333 - type: nauc_ndcg_at_3_diff1 value: 73.18253776744112 - type: nauc_ndcg_at_3_max value: 38.008109328364 - type: nauc_ndcg_at_3_std value: -21.68785687594153 - type: nauc_ndcg_at_5_diff1 value: 72.90474739784793 - type: nauc_ndcg_at_5_max value: 38.29483039202184 - type: nauc_ndcg_at_5_std value: -20.833049811453474 - type: nauc_precision_at_1000_diff1 value: 59.306217613750334 - type: nauc_precision_at_1000_max value: 72.20747948302262 - type: nauc_precision_at_1000_std value: 45.58837180096227 - type: nauc_precision_at_100_diff1 value: 62.87286844562389 - type: nauc_precision_at_100_max value: 61.33108214045868 - type: nauc_precision_at_100_std value: 20.67481963545654 - type: nauc_precision_at_10_diff1 value: 64.11222984256685 - type: nauc_precision_at_10_max value: 50.323697746037496 - type: nauc_precision_at_10_std value: -7.9994544634332625 - type: nauc_precision_at_1_diff1 value: 77.38899022125185 - type: nauc_precision_at_1_max value: 32.45918619669731 - type: nauc_precision_at_1_std value: -22.149586336167324 - type: nauc_precision_at_20_diff1 value: 62.30228127286973 - type: nauc_precision_at_20_max value: 52.02090746208407 - type: nauc_precision_at_20_std value: 0.7629898806370331 - type: nauc_precision_at_3_diff1 value: 68.82856645994157 - type: nauc_precision_at_3_max value: 43.94171571306625 - type: nauc_precision_at_3_std value: -20.78595255410148 - type: nauc_precision_at_5_diff1 value: 66.62157622497887 - type: nauc_precision_at_5_max value: 46.69398173603811 - type: nauc_precision_at_5_std value: -17.412423571163057 - type: nauc_recall_at_1000_diff1 value: 59.30621761375148 - type: nauc_recall_at_1000_max value: 72.20747948302191 - type: nauc_recall_at_1000_std value: 45.588371800962655 - type: nauc_recall_at_100_diff1 value: 62.872868445623894 - type: nauc_recall_at_100_max value: 61.33108214045813 - type: nauc_recall_at_100_std value: 20.67481963545666 - type: nauc_recall_at_10_diff1 value: 64.11222984256698 - type: nauc_recall_at_10_max value: 50.32369774603755 - type: nauc_recall_at_10_std value: -7.999454463433321 - type: nauc_recall_at_1_diff1 value: 77.38899022125185 - type: nauc_recall_at_1_max value: 32.45918619669731 - type: nauc_recall_at_1_std value: -22.149586336167324 - type: nauc_recall_at_20_diff1 value: 62.3022812728695 - type: nauc_recall_at_20_max value: 52.02090746208397 - type: nauc_recall_at_20_std value: 0.7629898806369458 - type: nauc_recall_at_3_diff1 value: 68.82856645994157 - type: nauc_recall_at_3_max value: 43.94171571306612 - type: nauc_recall_at_3_std value: -20.78595255410157 - type: nauc_recall_at_5_diff1 value: 66.62157622497897 - type: nauc_recall_at_5_max value: 46.693981736038246 - type: nauc_recall_at_5_std value: -17.412423571162954 - type: ndcg_at_1 value: 67.33 - type: ndcg_at_10 value: 79.209 - type: ndcg_at_100 value: 80.463 - type: ndcg_at_1000 value: 80.74799999999999 - type: ndcg_at_20 value: 79.81899999999999 - type: ndcg_at_3 value: 76.335 - type: ndcg_at_5 value: 78.011 - type: precision_at_1 value: 67.33 - type: precision_at_10 value: 9.020999999999999 - type: precision_at_100 value: 0.96 - type: precision_at_1000 value: 0.098 - type: precision_at_20 value: 4.63 - type: precision_at_3 value: 27.493000000000002 - type: precision_at_5 value: 17.308 - type: recall_at_1 value: 67.33 - type: recall_at_10 value: 90.21000000000001 - type: recall_at_100 value: 96.00999999999999 - type: recall_at_1000 value: 98.29 - type: recall_at_20 value: 92.60000000000001 - type: recall_at_3 value: 82.48 - type: recall_at_5 value: 86.53999999999999 - task: type: Reranking dataset: name: MTEB RuBQReranking (default) type: ai-forever/rubq-reranking config: default split: test revision: 2e96b8f098fa4b0950fc58eacadeb31c0d0c7fa2 metrics: - type: main_score value: 65.57453932493252 - type: map value: 65.57453932493252 - type: mrr value: 70.51408205663526 - type: nAUC_map_diff1 value: 26.69583260609023 - type: nAUC_map_max value: 12.928262749610663 - type: nAUC_map_std value: 11.702468857903128 - type: nAUC_mrr_diff1 value: 28.5206955462174 - type: nAUC_mrr_max value: 14.207162454694227 - type: nAUC_mrr_std value: 10.725721001555296 - task: type: Retrieval dataset: name: MTEB RuBQRetrieval (default) type: ai-forever/rubq-retrieval config: default split: test revision: e19b6ffa60b3bc248e0b41f4cc37c26a55c2a67b metrics: - type: main_score value: 72.306 - type: map_at_1 value: 44.187 - type: map_at_10 value: 64.836 - type: map_at_100 value: 65.771 - type: map_at_1000 value: 65.8 - type: map_at_20 value: 65.497 - type: map_at_3 value: 59.692 - type: map_at_5 value: 63.105 - type: mrr_at_1 value: 62.23404255319149 - type: mrr_at_10 value: 73.40810161732159 - type: mrr_at_100 value: 73.67949305473395 - type: mrr_at_1000 value: 73.68707852294746 - type: mrr_at_20 value: 73.60429051697479 - type: mrr_at_3 value: 71.47360126083535 - type: mrr_at_5 value: 72.8447596532704 - type: nauc_map_at_1000_diff1 value: 39.838449035736886 - type: nauc_map_at_1000_max value: 32.29962306877408 - type: nauc_map_at_1000_std value: -6.324859592714388 - type: nauc_map_at_100_diff1 value: 39.824361938745426 - type: nauc_map_at_100_max value: 32.32055222704763 - type: nauc_map_at_100_std value: -6.301641111869559 - type: nauc_map_at_10_diff1 value: 39.50155328718487 - type: nauc_map_at_10_max value: 31.745730244960672 - type: nauc_map_at_10_std value: -6.867215137329693 - type: nauc_map_at_1_diff1 value: 47.66181128677822 - type: nauc_map_at_1_max value: 21.75204233166764 - type: nauc_map_at_1_std value: -8.06951079061697 - type: nauc_map_at_20_diff1 value: 39.78364637902108 - type: nauc_map_at_20_max value: 32.39065528029405 - type: nauc_map_at_20_std value: -6.368994332729006 - type: nauc_map_at_3_diff1 value: 39.51829474433183 - type: nauc_map_at_3_max value: 28.633292697821673 - type: nauc_map_at_3_std value: -7.2561170814963925 - type: nauc_map_at_5_diff1 value: 39.288433237676266 - type: nauc_map_at_5_max value: 31.007702201615515 - type: nauc_map_at_5_std value: -7.235131195162474 - type: nauc_mrr_at_1000_diff1 value: 49.599102391215226 - type: nauc_mrr_at_1000_max value: 38.25521825911133 - type: nauc_mrr_at_1000_std value: -10.448180939809435 - type: nauc_mrr_at_100_diff1 value: 49.5957067716212 - type: nauc_mrr_at_100_max value: 38.26760703964535 - type: nauc_mrr_at_100_std value: -10.438443051971081 - type: nauc_mrr_at_10_diff1 value: 49.35269710190271 - type: nauc_mrr_at_10_max value: 38.43782589127069 - type: nauc_mrr_at_10_std value: -10.404402063509815 - type: nauc_mrr_at_1_diff1 value: 53.32206103688421 - type: nauc_mrr_at_1_max value: 33.52402390241035 - type: nauc_mrr_at_1_std value: -12.73473393949936 - type: nauc_mrr_at_20_diff1 value: 49.550630850826636 - type: nauc_mrr_at_20_max value: 38.35964703941151 - type: nauc_mrr_at_20_std value: -10.444577766284766 - type: nauc_mrr_at_3_diff1 value: 49.12029127633829 - type: nauc_mrr_at_3_max value: 38.01631275124067 - type: nauc_mrr_at_3_std value: -10.523724301481309 - type: nauc_mrr_at_5_diff1 value: 49.04606949432458 - type: nauc_mrr_at_5_max value: 38.33647550077891 - type: nauc_mrr_at_5_std value: -10.47076409263114 - type: nauc_ndcg_at_1000_diff1 value: 41.342785916264226 - type: nauc_ndcg_at_1000_max value: 35.75731064862711 - type: nauc_ndcg_at_1000_std value: -5.45573422899229 - type: nauc_ndcg_at_100_diff1 value: 40.972974559636086 - type: nauc_ndcg_at_100_max value: 36.32938573321036 - type: nauc_ndcg_at_100_std value: -4.749631537590004 - type: nauc_ndcg_at_10_diff1 value: 39.67813474464166 - type: nauc_ndcg_at_10_max value: 35.480200504848966 - type: nauc_ndcg_at_10_std value: -6.318561293935512 - type: nauc_ndcg_at_1_diff1 value: 53.45970160222764 - type: nauc_ndcg_at_1_max value: 33.14759013278075 - type: nauc_ndcg_at_1_std value: -12.579833891774847 - type: nauc_ndcg_at_20_diff1 value: 40.67492861219249 - type: nauc_ndcg_at_20_max value: 36.84960799838019 - type: nauc_ndcg_at_20_std value: -5.202530835850179 - type: nauc_ndcg_at_3_diff1 value: 39.574906207408844 - type: nauc_ndcg_at_3_max value: 31.76512164509258 - type: nauc_ndcg_at_3_std value: -7.656143208565999 - type: nauc_ndcg_at_5_diff1 value: 39.096348529742095 - type: nauc_ndcg_at_5_max value: 34.075926475544165 - type: nauc_ndcg_at_5_std value: -7.238045445366631 - type: nauc_precision_at_1000_diff1 value: -14.283799754212609 - type: nauc_precision_at_1000_max value: 6.449741756717101 - type: nauc_precision_at_1000_std value: 4.862828679759048 - type: nauc_precision_at_100_diff1 value: -13.23173132700258 - type: nauc_precision_at_100_max value: 11.058898534529195 - type: nauc_precision_at_100_std value: 7.343683941814956 - type: nauc_precision_at_10_diff1 value: -7.202951643546464 - type: nauc_precision_at_10_max value: 17.499446869433278 - type: nauc_precision_at_10_std value: 2.8367985220406307 - type: nauc_precision_at_1_diff1 value: 53.45970160222764 - type: nauc_precision_at_1_max value: 33.14759013278075 - type: nauc_precision_at_1_std value: -12.579833891774847 - type: nauc_precision_at_20_diff1 value: -9.477122699154124 - type: nauc_precision_at_20_max value: 16.80556031564312 - type: nauc_precision_at_20_std value: 6.420218284416923 - type: nauc_precision_at_3_diff1 value: 5.5276143574150245 - type: nauc_precision_at_3_max value: 23.65952688481666 - type: nauc_precision_at_3_std value: -1.8730348729295785 - type: nauc_precision_at_5_diff1 value: -2.4537029093721308 - type: nauc_precision_at_5_max value: 21.41469327545133 - type: nauc_precision_at_5_std value: 0.1543890645722277 - type: nauc_recall_at_1000_diff1 value: -1.7474947956413491 - type: nauc_recall_at_1000_max value: 46.22670991970479 - type: nauc_recall_at_1000_std value: 62.582840705588794 - type: nauc_recall_at_100_diff1 value: 16.116089801097345 - type: nauc_recall_at_100_max value: 52.54794580975103 - type: nauc_recall_at_100_std value: 33.720245696003246 - type: nauc_recall_at_10_diff1 value: 23.134924318655482 - type: nauc_recall_at_10_max value: 38.73754275649077 - type: nauc_recall_at_10_std value: 0.6137471711639239 - type: nauc_recall_at_1_diff1 value: 47.66181128677822 - type: nauc_recall_at_1_max value: 21.75204233166764 - type: nauc_recall_at_1_std value: -8.06951079061697 - type: nauc_recall_at_20_diff1 value: 24.130616271355017 - type: nauc_recall_at_20_max value: 48.306178640146136 - type: nauc_recall_at_20_std value: 9.290819557000022 - type: nauc_recall_at_3_diff1 value: 29.767415016250226 - type: nauc_recall_at_3_max value: 28.54289782140701 - type: nauc_recall_at_3_std value: -5.1395675072005576 - type: nauc_recall_at_5_diff1 value: 25.410613126870174 - type: nauc_recall_at_5_max value: 33.24658754857624 - type: nauc_recall_at_5_std value: -4.211226036746632 - type: ndcg_at_1 value: 62.175000000000004 - type: ndcg_at_10 value: 72.306 - type: ndcg_at_100 value: 75.074 - type: ndcg_at_1000 value: 75.581 - type: ndcg_at_20 value: 73.875 - type: ndcg_at_3 value: 65.641 - type: ndcg_at_5 value: 69.48299999999999 - type: precision_at_1 value: 62.175000000000004 - type: precision_at_10 value: 13.907 - type: precision_at_100 value: 1.591 - type: precision_at_1000 value: 0.166 - type: precision_at_20 value: 7.446999999999999 - type: precision_at_3 value: 35.619 - type: precision_at_5 value: 24.917 - type: recall_at_1 value: 44.187 - type: recall_at_10 value: 85.10600000000001 - type: recall_at_100 value: 95.488 - type: recall_at_1000 value: 98.831 - type: recall_at_20 value: 90.22200000000001 - type: recall_at_3 value: 68.789 - type: recall_at_5 value: 77.85499999999999 - task: type: Classification dataset: name: MTEB RuReviewsClassification (default) type: ai-forever/ru-reviews-classification config: default split: test revision: f6d2c31f4dc6b88f468552750bfec05b4b41b05a metrics: - type: accuracy value: 67.5830078125 - type: f1 value: 67.56931936632446 - type: f1_weighted value: 67.57137733752779 - type: main_score value: 67.5830078125 - task: type: STS dataset: name: MTEB RuSTSBenchmarkSTS (default) type: ai-forever/ru-stsbenchmark-sts config: default split: test revision: 7cf24f325c6da6195df55bef3d86b5e0616f3018 metrics: - type: cosine_pearson value: 85.90493484626788 - type: cosine_spearman value: 86.21965691667411 - type: euclidean_pearson value: 86.07499842984909 - type: euclidean_spearman value: 86.55506818735688 - type: main_score value: 86.21965691667411 - type: manhattan_pearson value: 85.95976420231729 - type: manhattan_spearman value: 86.48604243661234 - type: pearson value: 85.90493484626788 - type: spearman value: 86.21965691667411 - task: type: Classification dataset: name: MTEB RuSciBenchGRNTIClassification (default) type: ai-forever/ru-scibench-grnti-classification config: default split: test revision: 673a610d6d3dd91a547a0d57ae1b56f37ebbf6a1 metrics: - type: accuracy value: 59.1943359375 - type: f1 value: 58.894480861440414 - type: f1_weighted value: 58.903615560240866 - type: main_score value: 59.1943359375 - task: type: Clustering dataset: name: MTEB RuSciBenchGRNTIClusteringP2P (default) type: ai-forever/ru-scibench-grnti-classification config: default split: test revision: 673a610d6d3dd91a547a0d57ae1b56f37ebbf6a1 metrics: - type: main_score value: 57.99209448663228 - type: v_measure value: 57.99209448663228 - type: v_measure_std value: 1.0381163861993816 - task: type: Classification dataset: name: MTEB RuSciBenchOECDClassification (default) type: ai-forever/ru-scibench-oecd-classification config: default split: test revision: 26c88e99dcaba32bb45d0e1bfc21902337f6d471 metrics: - type: accuracy value: 45.556640625 - type: f1 value: 45.159163104085906 - type: f1_weighted value: 45.16098316398626 - type: main_score value: 45.556640625 - task: type: Clustering dataset: name: MTEB RuSciBenchOECDClusteringP2P (default) type: ai-forever/ru-scibench-oecd-classification config: default split: test revision: 26c88e99dcaba32bb45d0e1bfc21902337f6d471 metrics: - type: main_score value: 50.787548070488974 - type: v_measure value: 50.787548070488974 - type: v_measure_std value: 0.8569958168946827 - task: type: Retrieval dataset: name: MTEB SCIDOCS (default) type: mteb/scidocs config: default split: test revision: f8c2fcf00f625baaa80f62ec5bd9e1fff3b8ae88 metrics: - type: map_at_1 value: 4.843 - type: map_at_10 value: 11.752 - type: map_at_100 value: 13.919 - type: map_at_1000 value: 14.198 - type: map_at_20 value: 12.898000000000001 - type: map_at_3 value: 8.603 - type: map_at_5 value: 10.069 - type: mrr_at_1 value: 23.799999999999997 - type: mrr_at_10 value: 34.449999999999996 - type: mrr_at_100 value: 35.64 - type: mrr_at_1000 value: 35.691 - type: mrr_at_20 value: 35.213 - type: mrr_at_3 value: 31.383 - type: mrr_at_5 value: 33.062999999999995 - type: ndcg_at_1 value: 23.799999999999997 - type: ndcg_at_10 value: 19.811 - type: ndcg_at_100 value: 28.108 - type: ndcg_at_1000 value: 33.1 - type: ndcg_at_20 value: 22.980999999999998 - type: ndcg_at_3 value: 19.153000000000002 - type: ndcg_at_5 value: 16.408 - type: precision_at_1 value: 23.799999999999997 - type: precision_at_10 value: 10.16 - type: precision_at_100 value: 2.1999999999999997 - type: precision_at_1000 value: 0.34099999999999997 - type: precision_at_20 value: 6.915 - type: precision_at_3 value: 17.8 - type: precision_at_5 value: 14.14 - type: recall_at_1 value: 4.843 - type: recall_at_10 value: 20.595 - type: recall_at_100 value: 44.66 - type: recall_at_1000 value: 69.152 - type: recall_at_20 value: 28.04 - type: recall_at_3 value: 10.833 - type: recall_at_5 value: 14.346999999999998 - type: main_score value: 19.811 - task: type: PairClassification dataset: name: MTEB SICK-E-PL (default) type: PL-MTEB/sicke-pl-pairclassification config: default split: test revision: 71bba34b0ece6c56dfcf46d9758a27f7a90f17e9 metrics: - type: cosine_accuracy value: 80.90093762739502 - type: cosine_accuracy_threshold value: 94.40930485725403 - type: cosine_ap value: 71.15400909912427 - type: cosine_f1 value: 66.8213457076566 - type: cosine_f1_threshold value: 91.53673648834229 - type: cosine_precision value: 62.4922504649721 - type: cosine_recall value: 71.7948717948718 - type: dot_accuracy value: 78.41418671015083 - type: dot_accuracy_threshold value: 42924.45068359375 - type: dot_ap value: 63.34003025365763 - type: dot_f1 value: 62.518258837277244 - type: dot_f1_threshold value: 40900.738525390625 - type: dot_precision value: 52.99653293709758 - type: dot_recall value: 76.21082621082621 - type: euclidean_accuracy value: 80.67672238075826 - type: euclidean_accuracy_threshold value: 696.0524559020996 - type: euclidean_ap value: 70.88762835990224 - type: euclidean_f1 value: 66.711051930759 - type: euclidean_f1_threshold value: 878.5581588745117 - type: euclidean_precision value: 62.625 - type: euclidean_recall value: 71.36752136752136 - type: main_score value: 71.15400909912427 - type: manhattan_accuracy value: 80.65633917651854 - type: manhattan_accuracy_threshold value: 17277.72674560547 - type: manhattan_ap value: 70.67105336611716 - type: manhattan_f1 value: 66.51346027577151 - type: manhattan_f1_threshold value: 21687.957763671875 - type: manhattan_precision value: 61.69305724725944 - type: manhattan_recall value: 72.15099715099716 - type: max_accuracy value: 80.90093762739502 - type: max_ap value: 71.15400909912427 - type: max_f1 value: 66.8213457076566 - type: max_precision value: 62.625 - type: max_recall value: 76.21082621082621 - type: similarity_accuracy value: 80.90093762739502 - type: similarity_accuracy_threshold value: 94.40930485725403 - type: similarity_ap value: 71.15400909912427 - type: similarity_f1 value: 66.8213457076566 - type: similarity_f1_threshold value: 91.53673648834229 - type: similarity_precision value: 62.4922504649721 - type: similarity_recall value: 71.7948717948718 - task: type: STS dataset: name: MTEB SICK-R (default) type: mteb/sickr-sts config: default split: test revision: 20a6d6f312dd54037fe07a32d58e5e168867909d metrics: - type: cosine_pearson value: 92.3339946866199 - type: cosine_spearman value: 89.61697355115497 - type: euclidean_pearson value: 90.3264916449669 - type: euclidean_spearman value: 89.36270451308866 - type: main_score value: 89.61697355115497 - type: manhattan_pearson value: 90.18909339052534 - type: manhattan_spearman value: 89.28337093097377 - type: pearson value: 92.3339946866199 - type: spearman value: 89.61697355115497 - task: type: STS dataset: name: MTEB SICK-R-PL (default) type: PL-MTEB/sickr-pl-sts config: default split: test revision: fd5c2441b7eeff8676768036142af4cfa42c1339 metrics: - type: cosine_pearson value: 85.27883048457821 - type: cosine_spearman value: 80.53204892678619 - type: euclidean_pearson value: 82.78520705216168 - type: euclidean_spearman value: 80.27848359873212 - type: main_score value: 80.53204892678619 - type: manhattan_pearson value: 82.63270640583454 - type: manhattan_spearman value: 80.21507977473146 - type: pearson value: 85.27883048457821 - type: spearman value: 80.53204892678619 - task: type: STS dataset: name: MTEB SICKFr (default) type: Lajavaness/SICK-fr config: default split: test revision: e077ab4cf4774a1e36d86d593b150422fafd8e8a metrics: - type: cosine_pearson value: 88.77029361817212 - type: cosine_spearman value: 83.9453600346894 - type: euclidean_pearson value: 85.85331086208573 - type: euclidean_spearman value: 83.70852031985308 - type: main_score value: 83.9453600346894 - type: manhattan_pearson value: 85.66222265885914 - type: manhattan_spearman value: 83.60833111525962 - type: pearson value: 88.77029361817212 - type: spearman value: 83.9453600346894 - task: type: STS dataset: name: MTEB STS12 (default) type: mteb/sts12-sts config: default split: test revision: a0d554a64d88156834ff5ae9920b964011b16384 metrics: - type: cosine_pearson value: 88.76435859522375 - type: cosine_spearman value: 82.43768167804375 - type: euclidean_pearson value: 87.43566183874832 - type: euclidean_spearman value: 82.82166873757507 - type: main_score value: 82.43768167804375 - type: manhattan_pearson value: 87.39450871380951 - type: manhattan_spearman value: 82.89253043430163 - type: pearson value: 88.76435859522375 - type: spearman value: 82.43768167804375 - task: type: STS dataset: name: MTEB STS13 (default) type: mteb/sts13-sts config: default split: test revision: 7e90230a92c190f1bf69ae9002b8cea547a64cca metrics: - type: cosine_pearson value: 88.86627241652141 - type: cosine_spearman value: 89.49011599120688 - type: euclidean_pearson value: 89.3314120073772 - type: euclidean_spearman value: 89.8226502776963 - type: main_score value: 89.49011599120688 - type: manhattan_pearson value: 89.2252179076963 - type: manhattan_spearman value: 89.74573844021225 - type: pearson value: 88.86627241652141 - type: spearman value: 89.49011599120688 - task: type: STS dataset: name: MTEB STS14 (default) type: mteb/sts14-sts config: default split: test revision: 6031580fec1f6af667f0bd2da0a551cf4f0b2375 metrics: - type: cosine_pearson value: 87.22891405215968 - type: cosine_spearman value: 84.9467188157614 - type: euclidean_pearson value: 87.20330004726237 - type: euclidean_spearman value: 85.34806059461808 - type: main_score value: 84.9467188157614 - type: manhattan_pearson value: 87.15224666107623 - type: manhattan_spearman value: 85.34596898699708 - type: pearson value: 87.22891405215968 - type: spearman value: 84.9467188157614 - task: type: STS dataset: name: MTEB STS15 (default) type: mteb/sts15-sts config: default split: test revision: ae752c7c21bf194d8b67fd573edf7ae58183cbe3 metrics: - type: cosine_pearson value: 88.14066430111033 - type: cosine_spearman value: 89.31337445552545 - type: euclidean_pearson value: 89.08039335366983 - type: euclidean_spearman value: 89.6658762856415 - type: main_score value: 89.31337445552545 - type: manhattan_pearson value: 89.08057438154486 - type: manhattan_spearman value: 89.68673984203022 - type: pearson value: 88.14066430111033 - type: spearman value: 89.31337445552545 - task: type: STS dataset: name: MTEB STS16 (default) type: mteb/sts16-sts config: default split: test revision: 4d8694f8f0e0100860b497b999b3dbed754a0513 metrics: - type: cosine_pearson value: 85.14908856657084 - type: cosine_spearman value: 86.84648320786727 - type: euclidean_pearson value: 86.11454713131947 - type: euclidean_spearman value: 86.77738862047961 - type: main_score value: 86.84648320786727 - type: manhattan_pearson value: 86.07804821916372 - type: manhattan_spearman value: 86.78676064310474 - type: pearson value: 85.14908856657084 - type: spearman value: 86.84648320786727 - task: type: STS dataset: name: MTEB STS17 (en-en) type: mteb/sts17-crosslingual-sts config: en-en split: test revision: faeb762787bd10488a50c8b5be4a3b82e411949c metrics: - type: cosine_pearson value: 89.61633502468356 - type: cosine_spearman value: 89.99772663224805 - type: euclidean_pearson value: 90.14056501501044 - type: euclidean_spearman value: 90.04496896837503 - type: main_score value: 89.99772663224805 - type: manhattan_pearson value: 90.08964860311801 - type: manhattan_spearman value: 90.00091712362196 - type: pearson value: 89.61633502468356 - type: spearman value: 89.99772663224805 - task: type: STS dataset: name: MTEB STS17 (es-en) type: mteb/sts17-crosslingual-sts config: es-en split: test revision: faeb762787bd10488a50c8b5be4a3b82e411949c metrics: - type: cosine_pearson value: 86.44548026840202 - type: cosine_spearman value: 87.26263108768539 - type: euclidean_pearson value: 86.42844593583838 - type: euclidean_spearman value: 86.89388428664364 - type: main_score value: 87.26263108768539 - type: manhattan_pearson value: 86.47186940800881 - type: manhattan_spearman value: 87.02163091089946 - type: pearson value: 86.44548026840202 - type: spearman value: 87.26263108768539 - task: type: STS dataset: name: MTEB STS17 (en-de) type: mteb/sts17-crosslingual-sts config: en-de split: test revision: faeb762787bd10488a50c8b5be4a3b82e411949c metrics: - type: cosine_pearson value: 87.89345132532758 - type: cosine_spearman value: 87.96246221327699 - type: euclidean_pearson value: 88.49013032701419 - type: euclidean_spearman value: 87.81981265317344 - type: main_score value: 87.96246221327699 - type: manhattan_pearson value: 88.31360914178538 - type: manhattan_spearman value: 87.62734530005075 - type: pearson value: 87.89345132532758 - type: spearman value: 87.96246221327699 - task: type: STS dataset: name: MTEB STS17 (es-es) type: mteb/sts17-crosslingual-sts config: es-es split: test revision: faeb762787bd10488a50c8b5be4a3b82e411949c metrics: - type: cosine_pearson value: 88.4084678497171 - type: cosine_spearman value: 88.77640638748285 - type: euclidean_pearson value: 89.60124312475843 - type: euclidean_spearman value: 88.4321442688528 - type: main_score value: 88.77640638748285 - type: manhattan_pearson value: 89.62375118021299 - type: manhattan_spearman value: 88.46998118661577 - type: pearson value: 88.4084678497171 - type: spearman value: 88.77640638748285 - task: type: STS dataset: name: MTEB STS17 (fr-en) type: mteb/sts17-crosslingual-sts config: fr-en split: test revision: faeb762787bd10488a50c8b5be4a3b82e411949c metrics: - type: cosine_pearson value: 87.30688801326498 - type: cosine_spearman value: 87.55684697258378 - type: euclidean_pearson value: 87.89672951056794 - type: euclidean_spearman value: 87.28050429201674 - type: main_score value: 87.55684697258378 - type: manhattan_pearson value: 87.74292745320572 - type: manhattan_spearman value: 87.16383993876582 - type: pearson value: 87.30688801326498 - type: spearman value: 87.55684697258378 - task: type: STS dataset: name: MTEB STS22 (zh-en) type: mteb/sts22-crosslingual-sts config: zh-en split: test revision: de9d86b3b84231dc21f76c7b7af1f28e2f57f6e3 metrics: - type: cosine_pearson value: 73.46180375170147 - type: cosine_spearman value: 73.39559590127081 - type: euclidean_pearson value: 73.72613901293681 - type: euclidean_spearman value: 71.85465165176795 - type: main_score value: 73.39559590127081 - type: manhattan_pearson value: 73.07859140869076 - type: manhattan_spearman value: 71.22047343718893 - type: pearson value: 73.46180375170147 - type: spearman value: 73.39559590127081 - task: type: STS dataset: name: MTEB STS22 (zh) type: mteb/sts22-crosslingual-sts config: zh split: test revision: de9d86b3b84231dc21f76c7b7af1f28e2f57f6e3 metrics: - type: cosine_pearson value: 62.47531620842637 - type: cosine_spearman value: 66.22504667157702 - type: euclidean_pearson value: 66.76201254783692 - type: euclidean_spearman value: 66.86115760269463 - type: main_score value: 66.22504667157702 - type: manhattan_pearson value: 66.73847836793489 - type: manhattan_spearman value: 66.7677116377695 - type: pearson value: 62.47531620842637 - type: spearman value: 66.22504667157702 - task: type: STS dataset: name: MTEB STS22 (es) type: mteb/sts22-crosslingual-sts config: es split: test revision: de9d86b3b84231dc21f76c7b7af1f28e2f57f6e3 metrics: - type: cosine_pearson value: 69.89707002436481 - type: cosine_spearman value: 72.2054865735116 - type: euclidean_pearson value: 71.81856615570756 - type: euclidean_spearman value: 72.72593304629407 - type: main_score value: 72.2054865735116 - type: manhattan_pearson value: 72.00362684700072 - type: manhattan_spearman value: 72.62783534769964 - type: pearson value: 69.89707002436481 - type: spearman value: 72.2054865735116 - task: type: STS dataset: name: MTEB STS22 (fr) type: mteb/sts22-crosslingual-sts config: fr split: test revision: de9d86b3b84231dc21f76c7b7af1f28e2f57f6e3 metrics: - type: cosine_pearson value: 81.59623734395916 - type: cosine_spearman value: 83.28946105111358 - type: euclidean_pearson value: 79.377330171466 - type: euclidean_spearman value: 81.81029781662205 - type: main_score value: 83.28946105111358 - type: manhattan_pearson value: 78.96970881689698 - type: manhattan_spearman value: 81.91773236079703 - type: pearson value: 81.59623734395916 - type: spearman value: 83.28946105111358 - task: type: STS dataset: name: MTEB STS22 (de-fr) type: mteb/sts22-crosslingual-sts config: de-fr split: test revision: de9d86b3b84231dc21f76c7b7af1f28e2f57f6e3 metrics: - type: cosine_pearson value: 55.03825643126142 - type: cosine_spearman value: 58.25792501780429 - type: euclidean_pearson value: 50.38007603973409 - type: euclidean_spearman value: 59.39961789383097 - type: main_score value: 58.25792501780429 - type: manhattan_pearson value: 50.518568927999155 - type: manhattan_spearman value: 59.84185466003894 - type: pearson value: 55.03825643126142 - type: spearman value: 58.25792501780429 - task: type: STS dataset: name: MTEB STS22 (pl-en) type: mteb/sts22-crosslingual-sts config: pl-en split: test revision: de9d86b3b84231dc21f76c7b7af1f28e2f57f6e3 metrics: - type: cosine_pearson value: 77.77233721490776 - type: cosine_spearman value: 76.17596588017625 - type: euclidean_pearson value: 74.47600468156611 - type: euclidean_spearman value: 72.61278728057012 - type: main_score value: 76.17596588017625 - type: manhattan_pearson value: 74.48118910099699 - type: manhattan_spearman value: 73.33167419101696 - type: pearson value: 77.77233721490776 - type: spearman value: 76.17596588017625 - task: type: STS dataset: name: MTEB STS22 (pl) type: mteb/sts22-crosslingual-sts config: pl split: test revision: de9d86b3b84231dc21f76c7b7af1f28e2f57f6e3 metrics: - type: cosine_pearson value: 42.87453608131507 - type: cosine_spearman value: 45.137849894401185 - type: euclidean_pearson value: 31.66964197694796 - type: euclidean_spearman value: 44.1014900837869 - type: main_score value: 45.137849894401185 - type: manhattan_pearson value: 31.007199259384745 - type: manhattan_spearman value: 43.48181523288926 - type: pearson value: 42.87453608131507 - type: spearman value: 45.137849894401185 - task: type: STS dataset: name: MTEB STS22 (en) type: mteb/sts22-crosslingual-sts config: en split: test revision: de9d86b3b84231dc21f76c7b7af1f28e2f57f6e3 metrics: - type: cosine_pearson value: 66.87400150638176 - type: cosine_spearman value: 67.27861354834066 - type: euclidean_pearson value: 66.81789582140216 - type: euclidean_spearman value: 66.44220479858708 - type: main_score value: 67.27861354834066 - type: manhattan_pearson value: 66.92509859033235 - type: manhattan_spearman value: 66.46841124185076 - type: pearson value: 66.87400150638176 - type: spearman value: 67.27861354834066 - task: type: STS dataset: name: MTEB STS22 (ru) type: mteb/sts22-crosslingual-sts config: ru split: test revision: de9d86b3b84231dc21f76c7b7af1f28e2f57f6e3 metrics: - type: cosine_pearson value: 61.819804551576084 - type: cosine_spearman value: 65.0864146772135 - type: euclidean_pearson value: 62.518151090361876 - type: euclidean_spearman value: 65.13608138548017 - type: main_score value: 65.0864146772135 - type: manhattan_pearson value: 62.51413246915267 - type: manhattan_spearman value: 65.19077543064323 - type: pearson value: 61.819804551576084 - type: spearman value: 65.0864146772135 - task: type: STS dataset: name: MTEB STS22 (de) type: mteb/sts22-crosslingual-sts config: de split: test revision: de9d86b3b84231dc21f76c7b7af1f28e2f57f6e3 metrics: - type: cosine_pearson value: 54.85728696035389 - type: cosine_spearman value: 61.60906359227576 - type: euclidean_pearson value: 52.57582587901851 - type: euclidean_spearman value: 61.41823097598308 - type: main_score value: 61.60906359227576 - type: manhattan_pearson value: 52.500978361080506 - type: manhattan_spearman value: 61.30365596659758 - type: pearson value: 54.85728696035389 - type: spearman value: 61.60906359227576 - task: type: STS dataset: name: MTEB STS22 (fr-pl) type: mteb/sts22-crosslingual-sts config: fr-pl split: test revision: de9d86b3b84231dc21f76c7b7af1f28e2f57f6e3 metrics: - type: cosine_pearson value: 67.68016005631422 - type: cosine_spearman value: 84.51542547285167 - type: euclidean_pearson value: 66.19871164667245 - type: euclidean_spearman value: 73.24670207647144 - type: main_score value: 84.51542547285167 - type: manhattan_pearson value: 67.0443525268974 - type: manhattan_spearman value: 73.24670207647144 - type: pearson value: 67.68016005631422 - type: spearman value: 84.51542547285167 - task: type: STS dataset: name: MTEB STS22 (de-pl) type: mteb/sts22-crosslingual-sts config: de-pl split: test revision: de9d86b3b84231dc21f76c7b7af1f28e2f57f6e3 metrics: - type: cosine_pearson value: 47.49467414030747 - type: cosine_spearman value: 56.81512095681289 - type: euclidean_pearson value: 48.42860221765214 - type: euclidean_spearman value: 58.63197306329092 - type: main_score value: 56.81512095681289 - type: manhattan_pearson value: 48.39594959260441 - type: manhattan_spearman value: 58.63197306329092 - type: pearson value: 47.49467414030747 - type: spearman value: 56.81512095681289 - task: type: STS dataset: name: MTEB STS22 (es-en) type: mteb/sts22-crosslingual-sts config: es-en split: test revision: de9d86b3b84231dc21f76c7b7af1f28e2f57f6e3 metrics: - type: cosine_pearson value: 76.8364678896155 - type: cosine_spearman value: 78.45516413087114 - type: euclidean_pearson value: 78.62779318576634 - type: euclidean_spearman value: 78.88760695649488 - type: main_score value: 78.45516413087114 - type: manhattan_pearson value: 78.62131335760031 - type: manhattan_spearman value: 78.81861844200388 - type: pearson value: 76.8364678896155 - type: spearman value: 78.45516413087114 - task: type: STS dataset: name: MTEB STS22 (de-en) type: mteb/sts22-crosslingual-sts config: de-en split: test revision: de9d86b3b84231dc21f76c7b7af1f28e2f57f6e3 metrics: - type: cosine_pearson value: 65.16640313911604 - type: cosine_spearman value: 60.887608967403914 - type: euclidean_pearson value: 67.49902244990913 - type: euclidean_spearman value: 59.2458787136538 - type: main_score value: 60.887608967403914 - type: manhattan_pearson value: 67.34313506388378 - type: manhattan_spearman value: 59.05283429200166 - type: pearson value: 65.16640313911604 - type: spearman value: 60.887608967403914 - task: type: STS dataset: name: MTEB STSB (default) type: C-MTEB/STSB config: default split: test revision: 0cde68302b3541bb8b3c340dc0644b0b745b3dc0 metrics: - type: cosine_pearson value: 81.5092853013241 - type: cosine_spearman value: 83.54005474244292 - type: euclidean_pearson value: 83.7246578378554 - type: euclidean_spearman value: 84.46767551087716 - type: main_score value: 83.54005474244292 - type: manhattan_pearson value: 83.65922665594636 - type: manhattan_spearman value: 84.42431449101848 - type: pearson value: 81.5092853013241 - type: spearman value: 83.54005474244292 - task: type: STS dataset: name: MTEB STSBenchmark (default) type: mteb/stsbenchmark-sts config: default split: test revision: b0fddb56ed78048fa8b90373c8a3cfc37b684831 metrics: - type: cosine_pearson value: 87.70246866744966 - type: cosine_spearman value: 89.44070045346106 - type: euclidean_pearson value: 89.56956519641007 - type: euclidean_spearman value: 89.95830112784283 - type: main_score value: 89.44070045346106 - type: manhattan_pearson value: 89.48264471425145 - type: manhattan_spearman value: 89.87900732483114 - type: pearson value: 87.70246866744966 - type: spearman value: 89.44070045346106 - task: type: STS dataset: name: MTEB STSBenchmarkMultilingualSTS (de) type: mteb/stsb_multi_mt config: de split: test revision: 29afa2569dcedaaa2fe6a3dcfebab33d28b82e8c metrics: - type: cosine_pearson value: 86.83701990805217 - type: cosine_spearman value: 87.80280785492258 - type: euclidean_pearson value: 87.77325330043514 - type: euclidean_spearman value: 88.3564607283144 - type: main_score value: 87.80280785492258 - type: manhattan_pearson value: 87.6745449945946 - type: manhattan_spearman value: 88.30660465978795 - type: pearson value: 86.83701990805217 - type: spearman value: 87.80280785492258 - task: type: STS dataset: name: MTEB STSBenchmarkMultilingualSTS (zh) type: mteb/stsb_multi_mt config: zh split: test revision: 29afa2569dcedaaa2fe6a3dcfebab33d28b82e8c metrics: - type: cosine_pearson value: 84.27751020600267 - type: cosine_spearman value: 85.63500407412486 - type: euclidean_pearson value: 85.21829891649696 - type: euclidean_spearman value: 85.9384575715382 - type: main_score value: 85.63500407412486 - type: manhattan_pearson value: 85.10797194089801 - type: manhattan_spearman value: 85.8770162042784 - type: pearson value: 84.27751020600267 - type: spearman value: 85.63500407412486 - task: type: STS dataset: name: MTEB STSBenchmarkMultilingualSTS (fr) type: mteb/stsb_multi_mt config: fr split: test revision: 29afa2569dcedaaa2fe6a3dcfebab33d28b82e8c metrics: - type: cosine_pearson value: 86.56833656723254 - type: cosine_spearman value: 87.4393978501382 - type: euclidean_pearson value: 87.45171512751267 - type: euclidean_spearman value: 88.13106516566947 - type: main_score value: 87.4393978501382 - type: manhattan_pearson value: 87.33010961793333 - type: manhattan_spearman value: 88.06707425102182 - type: pearson value: 86.56833656723254 - type: spearman value: 87.4393978501382 - task: type: STS dataset: name: MTEB STSBenchmarkMultilingualSTS (pl) type: mteb/stsb_multi_mt config: pl split: test revision: 29afa2569dcedaaa2fe6a3dcfebab33d28b82e8c metrics: - type: cosine_pearson value: 85.45065540325523 - type: cosine_spearman value: 85.47881076789359 - type: euclidean_pearson value: 85.1999493863155 - type: euclidean_spearman value: 85.7874947669187 - type: main_score value: 85.47881076789359 - type: manhattan_pearson value: 85.06075305990376 - type: manhattan_spearman value: 85.71563015639558 - type: pearson value: 85.45065540325523 - type: spearman value: 85.47881076789359 - task: type: STS dataset: name: MTEB STSBenchmarkMultilingualSTS (es) type: mteb/stsb_multi_mt config: es split: test revision: 29afa2569dcedaaa2fe6a3dcfebab33d28b82e8c metrics: - type: cosine_pearson value: 87.11952824079832 - type: cosine_spearman value: 87.9643473573153 - type: euclidean_pearson value: 88.11750364639971 - type: euclidean_spearman value: 88.63695109016498 - type: main_score value: 87.9643473573153 - type: manhattan_pearson value: 88.00294453126699 - type: manhattan_spearman value: 88.53750241758391 - type: pearson value: 87.11952824079832 - type: spearman value: 87.9643473573153 - task: type: STS dataset: name: MTEB STSBenchmarkMultilingualSTS (ru) type: mteb/stsb_multi_mt config: ru split: test revision: 29afa2569dcedaaa2fe6a3dcfebab33d28b82e8c metrics: - type: cosine_pearson value: 85.99804354414991 - type: cosine_spearman value: 86.30252111551002 - type: euclidean_pearson value: 86.1880652037762 - type: euclidean_spearman value: 86.69556223944502 - type: main_score value: 86.30252111551002 - type: manhattan_pearson value: 86.0736400320898 - type: manhattan_spearman value: 86.61747927593393 - type: pearson value: 85.99804354414991 - type: spearman value: 86.30252111551002 - task: type: STS dataset: name: MTEB STSBenchmarkMultilingualSTS (en) type: mteb/stsb_multi_mt config: en split: test revision: 29afa2569dcedaaa2fe6a3dcfebab33d28b82e8c metrics: - type: cosine_pearson value: 87.70246861738103 - type: cosine_spearman value: 89.44070045346106 - type: euclidean_pearson value: 89.56956518833663 - type: euclidean_spearman value: 89.95830112784283 - type: main_score value: 89.44070045346106 - type: manhattan_pearson value: 89.48264470792915 - type: manhattan_spearman value: 89.87900732483114 - type: pearson value: 87.70246861738103 - type: spearman value: 89.44070045346106 - task: type: Reranking dataset: name: MTEB SciDocsRR (default) type: mteb/scidocs-reranking config: default split: test revision: d3c5e1fc0b855ab6097bf1cda04dd73947d7caab metrics: - type: map value: 84.88064122814694 - type: mrr value: 95.84832651009123 - type: main_score value: 84.88064122814694 - task: type: Retrieval dataset: name: MTEB SciFact (default) type: mteb/scifact config: default split: test revision: 0228b52cf27578f30900b9e5271d331663a030d7 metrics: - type: map_at_1 value: 57.289 - type: map_at_10 value: 67.88499999999999 - type: map_at_100 value: 68.477 - type: map_at_1000 value: 68.50500000000001 - type: map_at_20 value: 68.33500000000001 - type: map_at_3 value: 65.08 - type: map_at_5 value: 67.001 - type: mrr_at_1 value: 59.667 - type: mrr_at_10 value: 68.626 - type: mrr_at_100 value: 69.082 - type: mrr_at_1000 value: 69.108 - type: mrr_at_20 value: 68.958 - type: mrr_at_3 value: 66.667 - type: mrr_at_5 value: 67.983 - type: ndcg_at_1 value: 59.667 - type: ndcg_at_10 value: 72.309 - type: ndcg_at_100 value: 74.58399999999999 - type: ndcg_at_1000 value: 75.25500000000001 - type: ndcg_at_20 value: 73.656 - type: ndcg_at_3 value: 67.791 - type: ndcg_at_5 value: 70.45 - type: precision_at_1 value: 59.667 - type: precision_at_10 value: 9.567 - type: precision_at_100 value: 1.073 - type: precision_at_1000 value: 0.11299999999999999 - type: precision_at_20 value: 5.083 - type: precision_at_3 value: 26.333000000000002 - type: precision_at_5 value: 17.666999999999998 - type: recall_at_1 value: 57.289 - type: recall_at_10 value: 84.756 - type: recall_at_100 value: 94.5 - type: recall_at_1000 value: 99.667 - type: recall_at_20 value: 89.7 - type: recall_at_3 value: 73.22800000000001 - type: recall_at_5 value: 79.444 - type: main_score value: 72.309 - task: type: Clustering dataset: name: MTEB SpanishNewsClusteringP2P (default) type: jinaai/spanish_news_clustering config: default split: test revision: bf8ca8ddc5b7da4f7004720ddf99bbe0483480e6 metrics: - type: main_score value: 45.04477709795154 - type: v_measure value: 45.04477709795154 - type: v_measure_std value: 0.0 - task: type: Retrieval dataset: name: MTEB SpanishPassageRetrievalS2S (default) type: jinaai/spanish_passage_retrieval config: default split: test revision: 9cddf2ce5209ade52c2115ccfa00eb22c6d3a837 metrics: - type: main_score value: 69.83 - type: map_at_1 value: 15.736 - type: map_at_10 value: 52.027 - type: map_at_100 value: 65.08800000000001 - type: map_at_1000 value: 65.08800000000001 - type: map_at_20 value: 60.79900000000001 - type: map_at_3 value: 32.869 - type: map_at_5 value: 41.436 - type: mrr_at_1 value: 75.44910179640718 - type: mrr_at_10 value: 84.43446440452426 - type: mrr_at_100 value: 84.48052612723271 - type: mrr_at_1000 value: 84.48052612723271 - type: mrr_at_20 value: 84.48052612723271 - type: mrr_at_3 value: 83.13373253493013 - type: mrr_at_5 value: 84.3013972055888 - type: nauc_map_at_1000_diff1 value: 50.611540149694356 - type: nauc_map_at_1000_max value: 2.1102430434260238 - type: nauc_map_at_1000_std value: -18.88993521335793 - type: nauc_map_at_100_diff1 value: 50.611540149694356 - type: nauc_map_at_100_max value: 2.1102430434260238 - type: nauc_map_at_100_std value: -18.88993521335793 - type: nauc_map_at_10_diff1 value: 59.13518981755268 - type: nauc_map_at_10_max value: -9.810386627392807 - type: nauc_map_at_10_std value: -38.31810152345078 - type: nauc_map_at_1_diff1 value: 74.96782567287174 - type: nauc_map_at_1_max value: -29.648279252607875 - type: nauc_map_at_1_std value: -54.017459339141595 - type: nauc_map_at_20_diff1 value: 55.26694458629849 - type: nauc_map_at_20_max value: -1.9490244535020729 - type: nauc_map_at_20_std value: -25.22211659104076 - type: nauc_map_at_3_diff1 value: 71.67607885031732 - type: nauc_map_at_3_max value: -25.078101661694507 - type: nauc_map_at_3_std value: -50.55408861920259 - type: nauc_map_at_5_diff1 value: 61.50111515417668 - type: nauc_map_at_5_max value: -16.4114670513168 - type: nauc_map_at_5_std value: -44.391416134859135 - type: nauc_mrr_at_1000_diff1 value: 74.18848063283234 - type: nauc_mrr_at_1000_max value: 21.929205946778005 - type: nauc_mrr_at_1000_std value: -36.27399268489433 - type: nauc_mrr_at_100_diff1 value: 74.18848063283234 - type: nauc_mrr_at_100_max value: 21.929205946778005 - type: nauc_mrr_at_100_std value: -36.27399268489433 - type: nauc_mrr_at_10_diff1 value: 74.27231582268745 - type: nauc_mrr_at_10_max value: 21.481133301135337 - type: nauc_mrr_at_10_std value: -36.72070854872902 - type: nauc_mrr_at_1_diff1 value: 76.54855950439561 - type: nauc_mrr_at_1_max value: 26.99938321212366 - type: nauc_mrr_at_1_std value: -33.098742603429635 - type: nauc_mrr_at_20_diff1 value: 74.18848063283234 - type: nauc_mrr_at_20_max value: 21.929205946778005 - type: nauc_mrr_at_20_std value: -36.27399268489433 - type: nauc_mrr_at_3_diff1 value: 72.05379526740143 - type: nauc_mrr_at_3_max value: 18.875831185752528 - type: nauc_mrr_at_3_std value: -37.27302006456391 - type: nauc_mrr_at_5_diff1 value: 74.25342356682029 - type: nauc_mrr_at_5_max value: 20.756340085088738 - type: nauc_mrr_at_5_std value: -37.99507208540703 - type: nauc_ndcg_at_1000_diff1 value: 53.259363764380275 - type: nauc_ndcg_at_1000_max value: 12.936954959423218 - type: nauc_ndcg_at_1000_std value: -16.953898675672153 - type: nauc_ndcg_at_100_diff1 value: 53.259363764380275 - type: nauc_ndcg_at_100_max value: 12.936954959423218 - type: nauc_ndcg_at_100_std value: -16.953898675672153 - type: nauc_ndcg_at_10_diff1 value: 53.70942345413554 - type: nauc_ndcg_at_10_max value: -3.8465093347016186 - type: nauc_ndcg_at_10_std value: -31.208127919994755 - type: nauc_ndcg_at_1_diff1 value: 75.30551289259554 - type: nauc_ndcg_at_1_max value: 25.53292054129834 - type: nauc_ndcg_at_1_std value: -33.285498788395145 - type: nauc_ndcg_at_20_diff1 value: 57.62409278278133 - type: nauc_ndcg_at_20_max value: 2.8040586426056233 - type: nauc_ndcg_at_20_std value: -26.270875776221704 - type: nauc_ndcg_at_3_diff1 value: 48.42294834754225 - type: nauc_ndcg_at_3_max value: 16.912467881065822 - type: nauc_ndcg_at_3_std value: -13.324841189277873 - type: nauc_ndcg_at_5_diff1 value: 47.512819802794596 - type: nauc_ndcg_at_5_max value: 14.645518203506594 - type: nauc_ndcg_at_5_std value: -17.641450435599275 - type: nauc_precision_at_1000_diff1 value: -34.43320975829637 - type: nauc_precision_at_1000_max value: 29.08585622578186 - type: nauc_precision_at_1000_std value: 46.55117940162061 - type: nauc_precision_at_100_diff1 value: -34.433209758296364 - type: nauc_precision_at_100_max value: 29.085856225781885 - type: nauc_precision_at_100_std value: 46.55117940162065 - type: nauc_precision_at_10_diff1 value: -21.895306304096902 - type: nauc_precision_at_10_max value: 33.190476527593745 - type: nauc_precision_at_10_std value: 37.64916268614298 - type: nauc_precision_at_1_diff1 value: 75.30551289259554 - type: nauc_precision_at_1_max value: 25.53292054129834 - type: nauc_precision_at_1_std value: -33.285498788395145 - type: nauc_precision_at_20_diff1 value: -27.63076748060466 - type: nauc_precision_at_20_max value: 30.689810416086154 - type: nauc_precision_at_20_std value: 46.164191636131626 - type: nauc_precision_at_3_diff1 value: 20.547345067837288 - type: nauc_precision_at_3_max value: 26.177050942827528 - type: nauc_precision_at_3_std value: 5.960466052973099 - type: nauc_precision_at_5_diff1 value: -8.928755534002669 - type: nauc_precision_at_5_max value: 40.83262650073459 - type: nauc_precision_at_5_std value: 26.158537031161494 - type: nauc_recall_at_1000_diff1 value: .nan - type: nauc_recall_at_1000_max value: .nan - type: nauc_recall_at_1000_std value: .nan - type: nauc_recall_at_100_diff1 value: .nan - type: nauc_recall_at_100_max value: .nan - type: nauc_recall_at_100_std value: .nan - type: nauc_recall_at_10_diff1 value: 53.08654386169444 - type: nauc_recall_at_10_max value: -23.276269379519356 - type: nauc_recall_at_10_std value: -50.80707792706157 - type: nauc_recall_at_1_diff1 value: 74.96782567287174 - type: nauc_recall_at_1_max value: -29.648279252607875 - type: nauc_recall_at_1_std value: -54.017459339141595 - type: nauc_recall_at_20_diff1 value: 51.60121897059633 - type: nauc_recall_at_20_max value: -14.241779530735387 - type: nauc_recall_at_20_std value: -37.877451525215456 - type: nauc_recall_at_3_diff1 value: 66.99474984329694 - type: nauc_recall_at_3_max value: -30.802787353187966 - type: nauc_recall_at_3_std value: -53.58737792129713 - type: nauc_recall_at_5_diff1 value: 54.64214444958567 - type: nauc_recall_at_5_max value: -23.341309362104703 - type: nauc_recall_at_5_std value: -51.381363923145265 - type: ndcg_at_1 value: 76.048 - type: ndcg_at_10 value: 69.83 - type: ndcg_at_100 value: 82.11500000000001 - type: ndcg_at_1000 value: 82.11500000000001 - type: ndcg_at_20 value: 75.995 - type: ndcg_at_3 value: 69.587 - type: ndcg_at_5 value: 69.062 - type: precision_at_1 value: 76.048 - type: precision_at_10 value: 43.653 - type: precision_at_100 value: 7.718999999999999 - type: precision_at_1000 value: 0.772 - type: precision_at_20 value: 31.108000000000004 - type: precision_at_3 value: 63.87199999999999 - type: precision_at_5 value: 56.407 - type: recall_at_1 value: 15.736 - type: recall_at_10 value: 66.873 - type: recall_at_100 value: 100.0 - type: recall_at_1000 value: 100.0 - type: recall_at_20 value: 85.01100000000001 - type: recall_at_3 value: 36.441 - type: recall_at_5 value: 49.109 - task: type: PairClassification dataset: name: MTEB SprintDuplicateQuestions (default) type: mteb/sprintduplicatequestions-pairclassification config: default split: test revision: d66bd1f72af766a5cc4b0ca5e00c162f89e8cc46 metrics: - type: cosine_accuracy value: 99.87326732673267 - type: cosine_accuracy_threshold value: 86.0752820968628 - type: cosine_ap value: 96.98758090713252 - type: cosine_f1 value: 93.52881698685542 - type: cosine_f1_threshold value: 86.0752820968628 - type: cosine_precision value: 94.58077709611452 - type: cosine_recall value: 92.5 - type: dot_accuracy value: 99.82574257425742 - type: dot_accuracy_threshold value: 40484.73815917969 - type: dot_ap value: 95.68959907254845 - type: dot_f1 value: 91.31293188548865 - type: dot_f1_threshold value: 40336.810302734375 - type: dot_precision value: 90.15594541910332 - type: dot_recall value: 92.5 - type: euclidean_accuracy value: 99.87128712871286 - type: euclidean_accuracy_threshold value: 1162.5749588012695 - type: euclidean_ap value: 96.92640435656577 - type: euclidean_f1 value: 93.4475806451613 - type: euclidean_f1_threshold value: 1162.5749588012695 - type: euclidean_precision value: 94.20731707317073 - type: euclidean_recall value: 92.7 - type: main_score value: 96.98758090713252 - type: manhattan_accuracy value: 99.86930693069307 - type: manhattan_accuracy_threshold value: 28348.71826171875 - type: manhattan_ap value: 96.93832673967925 - type: manhattan_f1 value: 93.33333333333333 - type: manhattan_f1_threshold value: 28348.71826171875 - type: manhattan_precision value: 94.28571428571428 - type: manhattan_recall value: 92.4 - type: max_accuracy value: 99.87326732673267 - type: max_ap value: 96.98758090713252 - type: max_f1 value: 93.52881698685542 - type: max_precision value: 94.58077709611452 - type: max_recall value: 92.7 - type: similarity_accuracy value: 99.87326732673267 - type: similarity_accuracy_threshold value: 86.0752820968628 - type: similarity_ap value: 96.98758090713252 - type: similarity_f1 value: 93.52881698685542 - type: similarity_f1_threshold value: 86.0752820968628 - type: similarity_precision value: 94.58077709611452 - type: similarity_recall value: 92.5 - task: type: Clustering dataset: name: MTEB StackExchangeClustering (default) type: mteb/stackexchange-clustering config: default split: test revision: 6cbc1f7b2bc0622f2e39d2c77fa502909748c259 metrics: - type: main_score value: 65.6560129719848 - type: v_measure value: 65.6560129719848 - type: v_measure_std value: 4.781229811487539 - task: type: Clustering dataset: name: MTEB StackExchangeClusteringP2P (default) type: mteb/stackexchange-clustering-p2p config: default split: test revision: 815ca46b2622cec33ccafc3735d572c266efdb44 metrics: - type: main_score value: 35.07546243853692 - type: v_measure value: 35.07546243853692 - type: v_measure_std value: 1.1978740356240998 - task: type: Reranking dataset: name: MTEB StackOverflowDupQuestions (default) type: mteb/stackoverflowdupquestions-reranking config: default split: test revision: e185fbe320c72810689fc5848eb6114e1ef5ec69 metrics: - type: map value: 51.771005199508835 - type: mrr value: 52.65443298531534 - type: main_score value: 51.771005199508835 - task: type: Summarization dataset: name: MTEB SummEval (default) type: mteb/summeval config: default split: test revision: cda12ad7615edc362dbf25a00fdd61d3b1eaf93c metrics: - type: cosine_pearson value: 29.48686238342228 - type: cosine_spearman value: 29.706543509170054 - type: dot_pearson value: 27.95853155597859 - type: dot_spearman value: 27.604287986935162 - type: main_score value: 29.706543509170054 - type: pearson value: 29.48686238342228 - type: spearman value: 29.706543509170054 - task: type: Summarization dataset: name: MTEB SummEvalFr (default) type: lyon-nlp/summarization-summeval-fr-p2p config: default split: test revision: b385812de6a9577b6f4d0f88c6a6e35395a94054 metrics: - type: cosine_pearson value: 31.551301434917868 - type: cosine_spearman value: 30.709049789175186 - type: dot_pearson value: 27.77050901756549 - type: dot_spearman value: 26.715505953561795 - type: main_score value: 30.709049789175186 - type: pearson value: 31.551301434917868 - type: spearman value: 30.709049789175186 - task: type: Reranking dataset: name: MTEB SyntecReranking (default) type: lyon-nlp/mteb-fr-reranking-syntec-s2p config: default split: test revision: b205c5084a0934ce8af14338bf03feb19499c84d metrics: - type: map value: 73.31666666666666 - type: mrr value: 73.31666666666666 - type: main_score value: 73.31666666666666 - task: type: Retrieval dataset: name: MTEB SyntecRetrieval (default) type: lyon-nlp/mteb-fr-retrieval-syntec-s2p config: default split: test revision: 19661ccdca4dfc2d15122d776b61685f48c68ca9 metrics: - type: main_score value: 83.851 - type: map_at_1 value: 68.0 - type: map_at_10 value: 79.187 - type: map_at_100 value: 79.32900000000001 - type: map_at_1000 value: 79.32900000000001 - type: map_at_20 value: 79.32900000000001 - type: map_at_3 value: 77.333 - type: map_at_5 value: 78.93299999999999 - type: mrr_at_1 value: 68.0 - type: mrr_at_10 value: 79.18730158730159 - type: mrr_at_100 value: 79.32945845004669 - type: mrr_at_1000 value: 79.32945845004669 - type: mrr_at_20 value: 79.32945845004669 - type: mrr_at_3 value: 77.33333333333333 - type: mrr_at_5 value: 78.93333333333332 - type: nauc_map_at_1000_diff1 value: 63.31103256935259 - type: nauc_map_at_1000_max value: 11.073749121365623 - type: nauc_map_at_1000_std value: 7.4973309839738 - type: nauc_map_at_100_diff1 value: 63.31103256935259 - type: nauc_map_at_100_max value: 11.073749121365623 - type: nauc_map_at_100_std value: 7.4973309839738 - type: nauc_map_at_10_diff1 value: 62.91585737195978 - type: nauc_map_at_10_max value: 11.770664508983133 - type: nauc_map_at_10_std value: 8.179883948527962 - type: nauc_map_at_1_diff1 value: 66.1236265634718 - type: nauc_map_at_1_max value: 7.000207311173955 - type: nauc_map_at_1_std value: 6.54412272821497 - type: nauc_map_at_20_diff1 value: 63.31103256935259 - type: nauc_map_at_20_max value: 11.073749121365623 - type: nauc_map_at_20_std value: 7.4973309839738 - type: nauc_map_at_3_diff1 value: 62.14039574010254 - type: nauc_map_at_3_max value: 11.06996398110187 - type: nauc_map_at_3_std value: 7.288759297085769 - type: nauc_map_at_5_diff1 value: 63.0401271126211 - type: nauc_map_at_5_max value: 10.779317801858609 - type: nauc_map_at_5_std value: 6.476660484760681 - type: nauc_mrr_at_1000_diff1 value: 63.31103256935259 - type: nauc_mrr_at_1000_max value: 11.073749121365623 - type: nauc_mrr_at_1000_std value: 7.4973309839738 - type: nauc_mrr_at_100_diff1 value: 63.31103256935259 - type: nauc_mrr_at_100_max value: 11.073749121365623 - type: nauc_mrr_at_100_std value: 7.4973309839738 - type: nauc_mrr_at_10_diff1 value: 62.91585737195978 - type: nauc_mrr_at_10_max value: 11.770664508983133 - type: nauc_mrr_at_10_std value: 8.179883948527962 - type: nauc_mrr_at_1_diff1 value: 66.1236265634718 - type: nauc_mrr_at_1_max value: 7.000207311173955 - type: nauc_mrr_at_1_std value: 6.54412272821497 - type: nauc_mrr_at_20_diff1 value: 63.31103256935259 - type: nauc_mrr_at_20_max value: 11.073749121365623 - type: nauc_mrr_at_20_std value: 7.4973309839738 - type: nauc_mrr_at_3_diff1 value: 62.14039574010254 - type: nauc_mrr_at_3_max value: 11.06996398110187 - type: nauc_mrr_at_3_std value: 7.288759297085769 - type: nauc_mrr_at_5_diff1 value: 63.0401271126211 - type: nauc_mrr_at_5_max value: 10.779317801858609 - type: nauc_mrr_at_5_std value: 6.476660484760681 - type: nauc_ndcg_at_1000_diff1 value: 62.9544299483241 - type: nauc_ndcg_at_1000_max value: 11.577079766964538 - type: nauc_ndcg_at_1000_std value: 7.703856790100716 - type: nauc_ndcg_at_100_diff1 value: 62.9544299483241 - type: nauc_ndcg_at_100_max value: 11.577079766964538 - type: nauc_ndcg_at_100_std value: 7.703856790100716 - type: nauc_ndcg_at_10_diff1 value: 61.29907952217381 - type: nauc_ndcg_at_10_max value: 14.760627422715425 - type: nauc_ndcg_at_10_std value: 10.805573898143368 - type: nauc_ndcg_at_1_diff1 value: 66.1236265634718 - type: nauc_ndcg_at_1_max value: 7.000207311173955 - type: nauc_ndcg_at_1_std value: 6.54412272821497 - type: nauc_ndcg_at_20_diff1 value: 62.9544299483241 - type: nauc_ndcg_at_20_max value: 11.577079766964538 - type: nauc_ndcg_at_20_std value: 7.703856790100716 - type: nauc_ndcg_at_3_diff1 value: 60.25643527856101 - type: nauc_ndcg_at_3_max value: 12.236302709487546 - type: nauc_ndcg_at_3_std value: 7.36883189112067 - type: nauc_ndcg_at_5_diff1 value: 61.65220590318238 - type: nauc_ndcg_at_5_max value: 11.39969101913945 - type: nauc_ndcg_at_5_std value: 5.406207922379402 - type: nauc_precision_at_1000_diff1 value: .nan - type: nauc_precision_at_1000_max value: .nan - type: nauc_precision_at_1000_std value: .nan - type: nauc_precision_at_100_diff1 value: .nan - type: nauc_precision_at_100_max value: .nan - type: nauc_precision_at_100_std value: .nan - type: nauc_precision_at_10_diff1 value: 19.14098972922579 - type: nauc_precision_at_10_max value: 100.0 - type: nauc_precision_at_10_std value: 93.46405228758135 - type: nauc_precision_at_1_diff1 value: 66.1236265634718 - type: nauc_precision_at_1_max value: 7.000207311173955 - type: nauc_precision_at_1_std value: 6.54412272821497 - type: nauc_precision_at_20_diff1 value: 100.0 - type: nauc_precision_at_20_max value: 100.0 - type: nauc_precision_at_20_std value: 100.0 - type: nauc_precision_at_3_diff1 value: 50.29636629155561 - type: nauc_precision_at_3_max value: 18.00532600292076 - type: nauc_precision_at_3_std value: 7.649686453053768 - type: nauc_precision_at_5_diff1 value: 43.522408963585356 - type: nauc_precision_at_5_max value: 16.923436041082983 - type: nauc_precision_at_5_std value: -10.854341736694092 - type: nauc_recall_at_1000_diff1 value: .nan - type: nauc_recall_at_1000_max value: .nan - type: nauc_recall_at_1000_std value: .nan - type: nauc_recall_at_100_diff1 value: .nan - type: nauc_recall_at_100_max value: .nan - type: nauc_recall_at_100_std value: .nan - type: nauc_recall_at_10_diff1 value: 19.1409897292252 - type: nauc_recall_at_10_max value: 100.0 - type: nauc_recall_at_10_std value: 93.46405228758134 - type: nauc_recall_at_1_diff1 value: 66.1236265634718 - type: nauc_recall_at_1_max value: 7.000207311173955 - type: nauc_recall_at_1_std value: 6.54412272821497 - type: nauc_recall_at_20_diff1 value: .nan - type: nauc_recall_at_20_max value: .nan - type: nauc_recall_at_20_std value: .nan - type: nauc_recall_at_3_diff1 value: 50.29636629155569 - type: nauc_recall_at_3_max value: 18.005326002920754 - type: nauc_recall_at_3_std value: 7.649686453053851 - type: nauc_recall_at_5_diff1 value: 43.5224089635856 - type: nauc_recall_at_5_max value: 16.92343604108335 - type: nauc_recall_at_5_std value: -10.854341736694499 - type: ndcg_at_1 value: 68.0 - type: ndcg_at_10 value: 83.851 - type: ndcg_at_100 value: 84.36099999999999 - type: ndcg_at_1000 value: 84.36099999999999 - type: ndcg_at_20 value: 84.36099999999999 - type: ndcg_at_3 value: 80.333 - type: ndcg_at_5 value: 83.21600000000001 - type: precision_at_1 value: 68.0 - type: precision_at_10 value: 9.8 - type: precision_at_100 value: 1.0 - type: precision_at_1000 value: 0.1 - type: precision_at_20 value: 5.0 - type: precision_at_3 value: 29.666999999999998 - type: precision_at_5 value: 19.2 - type: recall_at_1 value: 68.0 - type: recall_at_10 value: 98.0 - type: recall_at_100 value: 100.0 - type: recall_at_1000 value: 100.0 - type: recall_at_20 value: 100.0 - type: recall_at_3 value: 89.0 - type: recall_at_5 value: 96.0 - task: type: Reranking dataset: name: MTEB T2Reranking (default) type: C-MTEB/T2Reranking config: default split: dev revision: 76631901a18387f85eaa53e5450019b87ad58ef9 metrics: - type: map value: 65.3088203970324 - type: mrr value: 74.79505862376546 - type: main_score value: 65.3088203970324 - task: type: Retrieval dataset: name: MTEB T2Retrieval (default) type: C-MTEB/T2Retrieval config: default split: dev revision: 8731a845f1bf500a4f111cf1070785c793d10e64 metrics: - type: main_score value: 83.163 - type: map_at_1 value: 26.875 - type: map_at_10 value: 75.454 - type: map_at_100 value: 79.036 - type: map_at_1000 value: 79.111 - type: map_at_20 value: 78.145 - type: map_at_3 value: 53.181 - type: map_at_5 value: 65.362 - type: mrr_at_1 value: 88.90057864281957 - type: mrr_at_10 value: 91.53186397301344 - type: mrr_at_100 value: 91.62809075510003 - type: mrr_at_1000 value: 91.63198173030787 - type: mrr_at_20 value: 91.59414668799909 - type: mrr_at_3 value: 91.0792565316499 - type: mrr_at_5 value: 91.35718043135199 - type: nauc_map_at_1000_diff1 value: 12.364843957982409 - type: nauc_map_at_1000_max value: 52.07043464458799 - type: nauc_map_at_1000_std value: 16.040095055100494 - type: nauc_map_at_100_diff1 value: 12.370621073823022 - type: nauc_map_at_100_max value: 51.960738727635636 - type: nauc_map_at_100_std value: 15.935832440430747 - type: nauc_map_at_10_diff1 value: 16.852819486606585 - type: nauc_map_at_10_max value: 40.11184760756059 - type: nauc_map_at_10_std value: 0.9306648364102376 - type: nauc_map_at_1_diff1 value: 52.87356542654683 - type: nauc_map_at_1_max value: -22.210039746171255 - type: nauc_map_at_1_std value: -38.11345358035342 - type: nauc_map_at_20_diff1 value: 13.045089059562837 - type: nauc_map_at_20_max value: 49.591383082160036 - type: nauc_map_at_20_std value: 12.54330050352008 - type: nauc_map_at_3_diff1 value: 38.08172234377615 - type: nauc_map_at_3_max value: -6.868621684867697 - type: nauc_map_at_3_std value: -35.4712388845996 - type: nauc_map_at_5_diff1 value: 29.665551705577474 - type: nauc_map_at_5_max value: 10.958628576519045 - type: nauc_map_at_5_std value: -25.113120842097057 - type: nauc_mrr_at_1000_diff1 value: 47.39372999496945 - type: nauc_mrr_at_1000_max value: 83.11274997493808 - type: nauc_mrr_at_1000_std value: 39.74195374546631 - type: nauc_mrr_at_100_diff1 value: 47.396678946057676 - type: nauc_mrr_at_100_max value: 83.1192584274415 - type: nauc_mrr_at_100_std value: 39.75840860374685 - type: nauc_mrr_at_10_diff1 value: 47.35365644138715 - type: nauc_mrr_at_10_max value: 83.189165639531 - type: nauc_mrr_at_10_std value: 39.83653157887758 - type: nauc_mrr_at_1_diff1 value: 47.98740362820094 - type: nauc_mrr_at_1_max value: 80.32340034580369 - type: nauc_mrr_at_1_std value: 34.57857131423388 - type: nauc_mrr_at_20_diff1 value: 47.399132055537194 - type: nauc_mrr_at_20_max value: 83.16329919869686 - type: nauc_mrr_at_20_std value: 39.84204692042734 - type: nauc_mrr_at_3_diff1 value: 47.09295580511751 - type: nauc_mrr_at_3_max value: 82.95831045602642 - type: nauc_mrr_at_3_std value: 38.98036804692351 - type: nauc_mrr_at_5_diff1 value: 47.20100268549764 - type: nauc_mrr_at_5_max value: 83.16652480381642 - type: nauc_mrr_at_5_std value: 39.55690491560902 - type: nauc_ndcg_at_1000_diff1 value: 17.201962509184547 - type: nauc_ndcg_at_1000_max value: 63.75820559259539 - type: nauc_ndcg_at_1000_std value: 29.28676096486067 - type: nauc_ndcg_at_100_diff1 value: 16.76847216096811 - type: nauc_ndcg_at_100_max value: 62.646517934470744 - type: nauc_ndcg_at_100_std value: 28.7441617667637 - type: nauc_ndcg_at_10_diff1 value: 16.559511980751886 - type: nauc_ndcg_at_10_max value: 54.35027464277944 - type: nauc_ndcg_at_10_std value: 16.98089333577716 - type: nauc_ndcg_at_1_diff1 value: 47.98740362820094 - type: nauc_ndcg_at_1_max value: 80.32340034580369 - type: nauc_ndcg_at_1_std value: 34.57857131423388 - type: nauc_ndcg_at_20_diff1 value: 16.721525245428243 - type: nauc_ndcg_at_20_max value: 57.683661870555724 - type: nauc_ndcg_at_20_std value: 21.736044200026853 - type: nauc_ndcg_at_3_diff1 value: 12.488009696556192 - type: nauc_ndcg_at_3_max value: 69.2365575305502 - type: nauc_ndcg_at_3_std value: 30.622418945055323 - type: nauc_ndcg_at_5_diff1 value: 12.364114556230609 - type: nauc_ndcg_at_5_max value: 62.33360746285387 - type: nauc_ndcg_at_5_std value: 24.898000803570227 - type: nauc_precision_at_1000_diff1 value: -35.14745130154524 - type: nauc_precision_at_1000_max value: 48.811507982849065 - type: nauc_precision_at_1000_std value: 62.43036496029399 - type: nauc_precision_at_100_diff1 value: -35.15276411320076 - type: nauc_precision_at_100_max value: 50.87010333741109 - type: nauc_precision_at_100_std value: 63.418221030407175 - type: nauc_precision_at_10_diff1 value: -34.84255710936113 - type: nauc_precision_at_10_max value: 56.588401051428825 - type: nauc_precision_at_10_std value: 57.4763370653757 - type: nauc_precision_at_1_diff1 value: 47.98740362820094 - type: nauc_precision_at_1_max value: 80.32340034580369 - type: nauc_precision_at_1_std value: 34.57857131423388 - type: nauc_precision_at_20_diff1 value: -35.165762365233505 - type: nauc_precision_at_20_max value: 54.148762449660424 - type: nauc_precision_at_20_std value: 61.569719669368716 - type: nauc_precision_at_3_diff1 value: -28.63023175340299 - type: nauc_precision_at_3_max value: 68.69825987618499 - type: nauc_precision_at_3_std value: 48.15479495755423 - type: nauc_precision_at_5_diff1 value: -34.13811355456687 - type: nauc_precision_at_5_max value: 62.369363941490604 - type: nauc_precision_at_5_std value: 52.282904411187914 - type: nauc_recall_at_1000_diff1 value: 8.686444579162663 - type: nauc_recall_at_1000_max value: 59.58864478011338 - type: nauc_recall_at_1000_std value: 56.692774954297455 - type: nauc_recall_at_100_diff1 value: 8.820596225758342 - type: nauc_recall_at_100_max value: 53.15048885657892 - type: nauc_recall_at_100_std value: 39.78931159236714 - type: nauc_recall_at_10_diff1 value: 16.022301106315027 - type: nauc_recall_at_10_max value: 29.83242342459543 - type: nauc_recall_at_10_std value: -4.805965555875844 - type: nauc_recall_at_1_diff1 value: 52.87356542654683 - type: nauc_recall_at_1_max value: -22.210039746171255 - type: nauc_recall_at_1_std value: -38.11345358035342 - type: nauc_recall_at_20_diff1 value: 10.35772828627265 - type: nauc_recall_at_20_max value: 43.06420839754062 - type: nauc_recall_at_20_std value: 15.040522218235692 - type: nauc_recall_at_3_diff1 value: 36.23953684770224 - type: nauc_recall_at_3_max value: -11.709269151700374 - type: nauc_recall_at_3_std value: -38.13943178150384 - type: nauc_recall_at_5_diff1 value: 28.644872415763384 - type: nauc_recall_at_5_max value: 2.062151266111129 - type: nauc_recall_at_5_std value: -30.81114034774277 - type: ndcg_at_1 value: 88.901 - type: ndcg_at_10 value: 83.163 - type: ndcg_at_100 value: 86.854 - type: ndcg_at_1000 value: 87.602 - type: ndcg_at_20 value: 84.908 - type: ndcg_at_3 value: 84.848 - type: ndcg_at_5 value: 83.372 - type: precision_at_1 value: 88.901 - type: precision_at_10 value: 41.343 - type: precision_at_100 value: 4.957000000000001 - type: precision_at_1000 value: 0.513 - type: precision_at_20 value: 22.955000000000002 - type: precision_at_3 value: 74.29599999999999 - type: precision_at_5 value: 62.251999999999995 - type: recall_at_1 value: 26.875 - type: recall_at_10 value: 81.902 - type: recall_at_100 value: 93.988 - type: recall_at_1000 value: 97.801 - type: recall_at_20 value: 87.809 - type: recall_at_3 value: 54.869 - type: recall_at_5 value: 68.728 - task: type: PairClassification dataset: name: MTEB TERRa (default) type: ai-forever/terra-pairclassification config: default split: dev revision: 7b58f24536063837d644aab9a023c62199b2a612 metrics: - type: cosine_accuracy value: 60.586319218241044 - type: cosine_accuracy_threshold value: 82.49806761741638 - type: cosine_ap value: 58.73198048427448 - type: cosine_f1 value: 67.37967914438502 - type: cosine_f1_threshold value: 77.46461033821106 - type: cosine_precision value: 57.01357466063348 - type: cosine_recall value: 82.35294117647058 - type: dot_accuracy value: 60.26058631921825 - type: dot_accuracy_threshold value: 35627.020263671875 - type: dot_ap value: 57.418783612898224 - type: dot_f1 value: 66.51982378854623 - type: dot_f1_threshold value: 27620.843505859375 - type: dot_precision value: 50.16611295681063 - type: dot_recall value: 98.69281045751634 - type: euclidean_accuracy value: 60.26058631921825 - type: euclidean_accuracy_threshold value: 1255.4466247558594 - type: euclidean_ap value: 58.748656145387955 - type: euclidean_f1 value: 66.99029126213591 - type: euclidean_f1_threshold value: 1565.1330947875977 - type: euclidean_precision value: 53.28185328185329 - type: euclidean_recall value: 90.19607843137256 - type: main_score value: 58.8479126365766 - type: manhattan_accuracy value: 59.934853420195445 - type: manhattan_accuracy_threshold value: 29897.271728515625 - type: manhattan_ap value: 58.8479126365766 - type: manhattan_f1 value: 66.81318681318683 - type: manhattan_f1_threshold value: 46291.802978515625 - type: manhattan_precision value: 50.331125827814574 - type: manhattan_recall value: 99.34640522875817 - type: max_accuracy value: 60.586319218241044 - type: max_ap value: 58.8479126365766 - type: max_f1 value: 67.37967914438502 - type: max_precision value: 57.01357466063348 - type: max_recall value: 99.34640522875817 - type: similarity_accuracy value: 60.586319218241044 - type: similarity_accuracy_threshold value: 82.49806761741638 - type: similarity_ap value: 58.73198048427448 - type: similarity_f1 value: 67.37967914438502 - type: similarity_f1_threshold value: 77.46461033821106 - type: similarity_precision value: 57.01357466063348 - type: similarity_recall value: 82.35294117647058 - task: type: Classification dataset: name: MTEB TNews (default) type: C-MTEB/TNews-classification config: default split: validation revision: 317f262bf1e6126357bbe89e875451e4b0938fe4 metrics: - type: accuracy value: 45.967999999999996 - type: f1 value: 44.699306100915706 - type: f1_weighted value: 46.03730319014832 - type: main_score value: 45.967999999999996 - task: type: Retrieval dataset: name: MTEB TRECCOVID (default) type: mteb/trec-covid config: default split: test revision: bb9466bac8153a0349341eb1b22e06409e78ef4e metrics: - type: map_at_1 value: 0.251 - type: map_at_10 value: 1.9480000000000002 - type: map_at_100 value: 11.082 - type: map_at_1000 value: 26.700000000000003 - type: map_at_20 value: 3.3529999999999998 - type: map_at_3 value: 0.679 - type: map_at_5 value: 1.079 - type: mrr_at_1 value: 94.0 - type: mrr_at_10 value: 95.786 - type: mrr_at_100 value: 95.786 - type: mrr_at_1000 value: 95.786 - type: mrr_at_20 value: 95.786 - type: mrr_at_3 value: 95.0 - type: mrr_at_5 value: 95.5 - type: ndcg_at_1 value: 91.0 - type: ndcg_at_10 value: 77.71900000000001 - type: ndcg_at_100 value: 57.726 - type: ndcg_at_1000 value: 52.737 - type: ndcg_at_20 value: 72.54 - type: ndcg_at_3 value: 83.397 - type: ndcg_at_5 value: 80.806 - type: precision_at_1 value: 94.0 - type: precision_at_10 value: 81.0 - type: precision_at_100 value: 59.199999999999996 - type: precision_at_1000 value: 23.244 - type: precision_at_20 value: 75.2 - type: precision_at_3 value: 88.0 - type: precision_at_5 value: 84.8 - type: recall_at_1 value: 0.251 - type: recall_at_10 value: 2.1229999999999998 - type: recall_at_100 value: 14.496999999999998 - type: recall_at_1000 value: 50.09 - type: recall_at_20 value: 3.8309999999999995 - type: recall_at_3 value: 0.696 - type: recall_at_5 value: 1.1400000000000001 - type: main_score value: 77.71900000000001 - task: type: Clustering dataset: name: MTEB TenKGnadClusteringP2P (default) type: slvnwhrl/tenkgnad-clustering-p2p config: default split: test revision: 5c59e41555244b7e45c9a6be2d720ab4bafae558 metrics: - type: main_score value: 43.763609722295215 - type: v_measure value: 43.763609722295215 - type: v_measure_std value: 2.8751199473862457 - task: type: Clustering dataset: name: MTEB TenKGnadClusteringS2S (default) type: slvnwhrl/tenkgnad-clustering-s2s config: default split: test revision: 6cddbe003f12b9b140aec477b583ac4191f01786 metrics: - type: main_score value: 39.762424448504355 - type: v_measure value: 39.762424448504355 - type: v_measure_std value: 3.30146124979502 - task: type: Clustering dataset: name: MTEB ThuNewsClusteringP2P (default) type: C-MTEB/ThuNewsClusteringP2P config: default split: test revision: 5798586b105c0434e4f0fe5e767abe619442cf93 metrics: - type: main_score value: 63.133819258289456 - type: v_measure value: 63.133819258289456 - type: v_measure_std value: 1.8854253356479695 - task: type: Clustering dataset: name: MTEB ThuNewsClusteringS2S (default) type: C-MTEB/ThuNewsClusteringS2S config: default split: test revision: 8a8b2caeda43f39e13c4bc5bea0f8a667896e10d metrics: - type: main_score value: 58.98195851785808 - type: v_measure value: 58.98195851785808 - type: v_measure_std value: 1.6237600076393737 - task: type: Retrieval dataset: name: MTEB Touche2020 (default) type: mteb/touche2020 config: default split: test revision: a34f9a33db75fa0cbb21bb5cfc3dae8dc8bec93f metrics: - type: map_at_1 value: 3.3550000000000004 - type: map_at_10 value: 10.08 - type: map_at_100 value: 16.136 - type: map_at_1000 value: 17.605 - type: map_at_20 value: 12.561 - type: map_at_3 value: 5.641 - type: map_at_5 value: 7.3260000000000005 - type: mrr_at_1 value: 46.939 - type: mrr_at_10 value: 58.152 - type: mrr_at_100 value: 58.594 - type: mrr_at_1000 value: 58.601000000000006 - type: mrr_at_20 value: 58.279 - type: mrr_at_3 value: 55.102 - type: mrr_at_5 value: 56.531 - type: ndcg_at_1 value: 44.897999999999996 - type: ndcg_at_10 value: 26.298 - type: ndcg_at_100 value: 37.596000000000004 - type: ndcg_at_1000 value: 49.424 - type: ndcg_at_20 value: 27.066000000000003 - type: ndcg_at_3 value: 31.528 - type: ndcg_at_5 value: 28.219 - type: precision_at_1 value: 46.939 - type: precision_at_10 value: 22.245 - type: precision_at_100 value: 7.531000000000001 - type: precision_at_1000 value: 1.5350000000000001 - type: precision_at_20 value: 17.041 - type: precision_at_3 value: 30.612000000000002 - type: precision_at_5 value: 26.122 - type: recall_at_1 value: 3.3550000000000004 - type: recall_at_10 value: 16.41 - type: recall_at_100 value: 47.272 - type: recall_at_1000 value: 83.584 - type: recall_at_20 value: 24.091 - type: recall_at_3 value: 6.8180000000000005 - type: recall_at_5 value: 9.677 - type: main_score value: 26.298 - task: type: Classification dataset: name: MTEB ToxicConversationsClassification (default) type: mteb/toxic_conversations_50k config: default split: test revision: edfaf9da55d3dd50d43143d90c1ac476895ae6de metrics: - type: accuracy value: 91.2890625 - type: ap value: 33.95547153875715 - type: ap_weighted value: 33.95547153875715 - type: f1 value: 75.10768597556462 - type: f1_weighted value: 92.00161208992606 - type: main_score value: 91.2890625 - task: type: Classification dataset: name: MTEB TweetSentimentExtractionClassification (default) type: mteb/tweet_sentiment_extraction config: default split: test revision: d604517c81ca91fe16a244d1248fc021f9ecee7a metrics: - type: accuracy value: 71.3978494623656 - type: f1 value: 71.7194818511814 - type: f1_weighted value: 71.13860187349744 - type: main_score value: 71.3978494623656 - task: type: Clustering dataset: name: MTEB TwentyNewsgroupsClustering (default) type: mteb/twentynewsgroups-clustering config: default split: test revision: 6125ec4e24fa026cec8a478383ee943acfbd5449 metrics: - type: main_score value: 52.4921688720602 - type: v_measure value: 52.4921688720602 - type: v_measure_std value: 0.992768152658908 - task: type: PairClassification dataset: name: MTEB TwitterSemEval2015 (default) type: mteb/twittersemeval2015-pairclassification config: default split: test revision: 70970daeab8776df92f5ea462b6173c0b46fd2d1 metrics: - type: cosine_accuracy value: 85.11652858079513 - type: cosine_accuracy_threshold value: 87.90839910507202 - type: cosine_ap value: 70.90459908851724 - type: cosine_f1 value: 65.66581227877457 - type: cosine_f1_threshold value: 85.13308763504028 - type: cosine_precision value: 61.094708153531684 - type: cosine_recall value: 70.97625329815304 - type: dot_accuracy value: 83.41181379269239 - type: dot_accuracy_threshold value: 43110.113525390625 - type: dot_ap value: 65.64869491143095 - type: dot_f1 value: 62.05308447460914 - type: dot_f1_threshold value: 41412.542724609375 - type: dot_precision value: 57.38623626989464 - type: dot_recall value: 67.54617414248021 - type: euclidean_accuracy value: 85.15229182809799 - type: euclidean_accuracy_threshold value: 1043.08500289917 - type: euclidean_ap value: 70.71204383269375 - type: euclidean_f1 value: 65.20304568527919 - type: euclidean_f1_threshold value: 1179.2595863342285 - type: euclidean_precision value: 62.81173594132029 - type: euclidean_recall value: 67.78364116094987 - type: main_score value: 70.90459908851724 - type: manhattan_accuracy value: 85.1820945341837 - type: manhattan_accuracy_threshold value: 26115.0390625 - type: manhattan_ap value: 70.66113937117431 - type: manhattan_f1 value: 65.33383628819313 - type: manhattan_f1_threshold value: 29105.181884765625 - type: manhattan_precision value: 62.40691808791736 - type: manhattan_recall value: 68.54881266490766 - type: max_accuracy value: 85.1820945341837 - type: max_ap value: 70.90459908851724 - type: max_f1 value: 65.66581227877457 - type: max_precision value: 62.81173594132029 - type: max_recall value: 70.97625329815304 - type: similarity_accuracy value: 85.11652858079513 - type: similarity_accuracy_threshold value: 87.90839910507202 - type: similarity_ap value: 70.90459908851724 - type: similarity_f1 value: 65.66581227877457 - type: similarity_f1_threshold value: 85.13308763504028 - type: similarity_precision value: 61.094708153531684 - type: similarity_recall value: 70.97625329815304 - task: type: PairClassification dataset: name: MTEB TwitterURLCorpus (default) type: mteb/twitterurlcorpus-pairclassification config: default split: test revision: 8b6510b0b1fa4e4c4f879467980e9be563ec1cdf metrics: - type: cosine_accuracy value: 88.10299996119068 - type: cosine_accuracy_threshold value: 84.34982895851135 - type: cosine_ap value: 84.13755787769226 - type: cosine_f1 value: 76.0967548076923 - type: cosine_f1_threshold value: 82.8936219215393 - type: cosine_precision value: 74.28864769727193 - type: cosine_recall value: 77.99507237449954 - type: dot_accuracy value: 86.64182869561843 - type: dot_accuracy_threshold value: 38794.677734375 - type: dot_ap value: 80.20301567411457 - type: dot_f1 value: 73.50650291634967 - type: dot_f1_threshold value: 37447.23205566406 - type: dot_precision value: 69.41498460485802 - type: dot_recall value: 78.11056359716662 - type: euclidean_accuracy value: 87.9361198432103 - type: euclidean_accuracy_threshold value: 1184.421157836914 - type: euclidean_ap value: 83.79582690117218 - type: euclidean_f1 value: 75.81431709042175 - type: euclidean_f1_threshold value: 1258.2727432250977 - type: euclidean_precision value: 73.39099099099099 - type: euclidean_recall value: 78.40314136125654 - type: main_score value: 84.13755787769226 - type: manhattan_accuracy value: 87.96134590755618 - type: manhattan_accuracy_threshold value: 29077.291870117188 - type: manhattan_ap value: 83.79487172269923 - type: manhattan_f1 value: 75.82421603424935 - type: manhattan_f1_threshold value: 31224.124145507812 - type: manhattan_precision value: 72.24740255212329 - type: manhattan_recall value: 79.77363720357253 - type: max_accuracy value: 88.10299996119068 - type: max_ap value: 84.13755787769226 - type: max_f1 value: 76.0967548076923 - type: max_precision value: 74.28864769727193 - type: max_recall value: 79.77363720357253 - type: similarity_accuracy value: 88.10299996119068 - type: similarity_accuracy_threshold value: 84.34982895851135 - type: similarity_ap value: 84.13755787769226 - type: similarity_f1 value: 76.0967548076923 - type: similarity_f1_threshold value: 82.8936219215393 - type: similarity_precision value: 74.28864769727193 - type: similarity_recall value: 77.99507237449954 - task: type: Retrieval dataset: name: MTEB VideoRetrieval (default) type: C-MTEB/VideoRetrieval config: default split: dev revision: 58c2597a5943a2ba48f4668c3b90d796283c5639 metrics: - type: main_score value: 70.433 - type: map_at_1 value: 55.7 - type: map_at_10 value: 66.013 - type: map_at_100 value: 66.534 - type: map_at_1000 value: 66.547 - type: map_at_20 value: 66.334 - type: map_at_3 value: 64.2 - type: map_at_5 value: 65.445 - type: mrr_at_1 value: 55.7 - type: mrr_at_10 value: 66.01329365079364 - type: mrr_at_100 value: 66.53350061744233 - type: mrr_at_1000 value: 66.54744831962995 - type: mrr_at_20 value: 66.3335147364675 - type: mrr_at_3 value: 64.2 - type: mrr_at_5 value: 65.44500000000002 - type: nauc_map_at_1000_diff1 value: 76.26428836976245 - type: nauc_map_at_1000_max value: 35.41847367373575 - type: nauc_map_at_1000_std value: -33.04639860831992 - type: nauc_map_at_100_diff1 value: 76.25793229023193 - type: nauc_map_at_100_max value: 35.43663260110076 - type: nauc_map_at_100_std value: -33.04238139882945 - type: nauc_map_at_10_diff1 value: 76.2108281297711 - type: nauc_map_at_10_max value: 35.59442419423183 - type: nauc_map_at_10_std value: -33.32346518997277 - type: nauc_map_at_1_diff1 value: 79.17728405262736 - type: nauc_map_at_1_max value: 31.880738163589527 - type: nauc_map_at_1_std value: -30.891888718004584 - type: nauc_map_at_20_diff1 value: 76.2181333410193 - type: nauc_map_at_20_max value: 35.43448818430876 - type: nauc_map_at_20_std value: -33.35682442863193 - type: nauc_map_at_3_diff1 value: 76.10046541433466 - type: nauc_map_at_3_max value: 34.6831278555291 - type: nauc_map_at_3_std value: -34.030826044831116 - type: nauc_map_at_5_diff1 value: 75.96513023582064 - type: nauc_map_at_5_max value: 34.66920832438069 - type: nauc_map_at_5_std value: -33.79799777830796 - type: nauc_mrr_at_1000_diff1 value: 76.26428836976245 - type: nauc_mrr_at_1000_max value: 35.41847367373575 - type: nauc_mrr_at_1000_std value: -33.04639860831992 - type: nauc_mrr_at_100_diff1 value: 76.25793229023193 - type: nauc_mrr_at_100_max value: 35.43663260110076 - type: nauc_mrr_at_100_std value: -33.04238139882945 - type: nauc_mrr_at_10_diff1 value: 76.2108281297711 - type: nauc_mrr_at_10_max value: 35.59442419423183 - type: nauc_mrr_at_10_std value: -33.32346518997277 - type: nauc_mrr_at_1_diff1 value: 79.17728405262736 - type: nauc_mrr_at_1_max value: 31.880738163589527 - type: nauc_mrr_at_1_std value: -30.891888718004584 - type: nauc_mrr_at_20_diff1 value: 76.2181333410193 - type: nauc_mrr_at_20_max value: 35.43448818430876 - type: nauc_mrr_at_20_std value: -33.35682442863193 - type: nauc_mrr_at_3_diff1 value: 76.10046541433466 - type: nauc_mrr_at_3_max value: 34.6831278555291 - type: nauc_mrr_at_3_std value: -34.030826044831116 - type: nauc_mrr_at_5_diff1 value: 75.96513023582064 - type: nauc_mrr_at_5_max value: 34.66920832438069 - type: nauc_mrr_at_5_std value: -33.79799777830796 - type: nauc_ndcg_at_1000_diff1 value: 75.68118206798317 - type: nauc_ndcg_at_1000_max value: 37.12252980787349 - type: nauc_ndcg_at_1000_std value: -31.457578337430505 - type: nauc_ndcg_at_100_diff1 value: 75.46730761564156 - type: nauc_ndcg_at_100_max value: 37.549890025544265 - type: nauc_ndcg_at_100_std value: -31.35066985945112 - type: nauc_ndcg_at_10_diff1 value: 75.09890404887037 - type: nauc_ndcg_at_10_max value: 38.024147790014204 - type: nauc_ndcg_at_10_std value: -33.67408368593356 - type: nauc_ndcg_at_1_diff1 value: 79.17728405262736 - type: nauc_ndcg_at_1_max value: 31.880738163589527 - type: nauc_ndcg_at_1_std value: -30.891888718004584 - type: nauc_ndcg_at_20_diff1 value: 75.12977548171354 - type: nauc_ndcg_at_20_max value: 37.524926748917956 - type: nauc_ndcg_at_20_std value: -33.771344674947485 - type: nauc_ndcg_at_3_diff1 value: 74.94037476984154 - type: nauc_ndcg_at_3_max value: 35.60345554050552 - type: nauc_ndcg_at_3_std value: -35.256991346321854 - type: nauc_ndcg_at_5_diff1 value: 74.54265907753783 - type: nauc_ndcg_at_5_max value: 35.57662819978585 - type: nauc_ndcg_at_5_std value: -34.879794448418465 - type: nauc_precision_at_1000_diff1 value: 74.52277207179142 - type: nauc_precision_at_1000_max value: 94.25510945118707 - type: nauc_precision_at_1000_std value: 91.6874157070222 - type: nauc_precision_at_100_diff1 value: 65.98346655735419 - type: nauc_precision_at_100_max value: 78.81168727653687 - type: nauc_precision_at_100_std value: 27.241465691967708 - type: nauc_precision_at_10_diff1 value: 69.55050319096688 - type: nauc_precision_at_10_max value: 51.827749140893374 - type: nauc_precision_at_10_std value: -34.60818605792837 - type: nauc_precision_at_1_diff1 value: 79.17728405262736 - type: nauc_precision_at_1_max value: 31.880738163589527 - type: nauc_precision_at_1_std value: -30.891888718004584 - type: nauc_precision_at_20_diff1 value: 68.08078305042736 - type: nauc_precision_at_20_max value: 52.83318878288501 - type: nauc_precision_at_20_std value: -35.46070292817927 - type: nauc_precision_at_3_diff1 value: 70.76249609881901 - type: nauc_precision_at_3_max value: 38.86561868624655 - type: nauc_precision_at_3_std value: -39.68917853446992 - type: nauc_precision_at_5_diff1 value: 68.39110629013278 - type: nauc_precision_at_5_max value: 39.28677163904683 - type: nauc_precision_at_5_std value: -39.39101423819562 - type: nauc_recall_at_1000_diff1 value: 74.52277207179175 - type: nauc_recall_at_1000_max value: 94.25510945118776 - type: nauc_recall_at_1000_std value: 91.68741570702382 - type: nauc_recall_at_100_diff1 value: 65.9834665573548 - type: nauc_recall_at_100_max value: 78.81168727653679 - type: nauc_recall_at_100_std value: 27.241465691967598 - type: nauc_recall_at_10_diff1 value: 69.55050319096708 - type: nauc_recall_at_10_max value: 51.82774914089347 - type: nauc_recall_at_10_std value: -34.6081860579283 - type: nauc_recall_at_1_diff1 value: 79.17728405262736 - type: nauc_recall_at_1_max value: 31.880738163589527 - type: nauc_recall_at_1_std value: -30.891888718004584 - type: nauc_recall_at_20_diff1 value: 68.08078305042746 - type: nauc_recall_at_20_max value: 52.833188782885244 - type: nauc_recall_at_20_std value: -35.46070292817895 - type: nauc_recall_at_3_diff1 value: 70.76249609881896 - type: nauc_recall_at_3_max value: 38.865618686246464 - type: nauc_recall_at_3_std value: -39.68917853446999 - type: nauc_recall_at_5_diff1 value: 68.39110629013274 - type: nauc_recall_at_5_max value: 39.28677163904688 - type: nauc_recall_at_5_std value: -39.39101423819562 - type: ndcg_at_1 value: 55.7 - type: ndcg_at_10 value: 70.433 - type: ndcg_at_100 value: 72.975 - type: ndcg_at_1000 value: 73.283 - type: ndcg_at_20 value: 71.58 - type: ndcg_at_3 value: 66.83099999999999 - type: ndcg_at_5 value: 69.085 - type: precision_at_1 value: 55.7 - type: precision_at_10 value: 8.4 - type: precision_at_100 value: 0.959 - type: precision_at_1000 value: 0.098 - type: precision_at_20 value: 4.425 - type: precision_at_3 value: 24.8 - type: precision_at_5 value: 15.98 - type: recall_at_1 value: 55.7 - type: recall_at_10 value: 84.0 - type: recall_at_100 value: 95.89999999999999 - type: recall_at_1000 value: 98.2 - type: recall_at_20 value: 88.5 - type: recall_at_3 value: 74.4 - type: recall_at_5 value: 79.9 - task: type: Classification dataset: name: MTEB Waimai (default) type: C-MTEB/waimai-classification config: default split: test revision: 339287def212450dcaa9df8c22bf93e9980c7023 metrics: - type: accuracy value: 86.58999999999999 - type: ap value: 70.02619249927523 - type: ap_weighted value: 70.02619249927523 - type: f1 value: 84.97572770889423 - type: f1_weighted value: 86.6865713531272 - type: main_score value: 86.58999999999999 - task: type: Retrieval dataset: name: MTEB XMarket (en) type: jinaai/xmarket_ml config: en split: test revision: dfe57acff5b62c23732a7b7d3e3fb84ff501708b metrics: - type: main_score value: 34.772999999999996 - type: map_at_1 value: 7.2620000000000005 - type: map_at_10 value: 17.98 - type: map_at_100 value: 24.828 - type: map_at_1000 value: 26.633000000000003 - type: map_at_20 value: 20.699 - type: map_at_3 value: 12.383 - type: map_at_5 value: 14.871 - type: mrr_at_1 value: 34.718100890207715 - type: mrr_at_10 value: 43.9336827525092 - type: mrr_at_100 value: 44.66474011066837 - type: mrr_at_1000 value: 44.7075592197356 - type: mrr_at_20 value: 44.35984436569346 - type: mrr_at_3 value: 41.73901893981052 - type: mrr_at_5 value: 43.025973550207134 - type: nauc_map_at_1000_diff1 value: 13.899869081196364 - type: nauc_map_at_1000_max value: 46.60452816386231 - type: nauc_map_at_1000_std value: 24.87925799401773 - type: nauc_map_at_100_diff1 value: 16.164805650871084 - type: nauc_map_at_100_max value: 44.720912958558095 - type: nauc_map_at_100_std value: 20.236734536210477 - type: nauc_map_at_10_diff1 value: 23.58580520913581 - type: nauc_map_at_10_max value: 31.276151869914216 - type: nauc_map_at_10_std value: -0.1833326246041355 - type: nauc_map_at_1_diff1 value: 37.02663305598722 - type: nauc_map_at_1_max value: 14.931071531116528 - type: nauc_map_at_1_std value: -12.478790028708453 - type: nauc_map_at_20_diff1 value: 20.718297881540593 - type: nauc_map_at_20_max value: 36.62264094841859 - type: nauc_map_at_20_std value: 6.658514770057742 - type: nauc_map_at_3_diff1 value: 29.379034581120006 - type: nauc_map_at_3_max value: 21.387214269548803 - type: nauc_map_at_3_std value: -9.3404121914247 - type: nauc_map_at_5_diff1 value: 26.627169792839485 - type: nauc_map_at_5_max value: 25.393331109666388 - type: nauc_map_at_5_std value: -6.023485287246353 - type: nauc_mrr_at_1000_diff1 value: 12.047232036652295 - type: nauc_mrr_at_1000_max value: 46.611862580860645 - type: nauc_mrr_at_1000_std value: 27.89146066442305 - type: nauc_mrr_at_100_diff1 value: 12.05261747449997 - type: nauc_mrr_at_100_max value: 46.61328535381203 - type: nauc_mrr_at_100_std value: 27.886145596874535 - type: nauc_mrr_at_10_diff1 value: 12.006935553036941 - type: nauc_mrr_at_10_max value: 46.53351686240496 - type: nauc_mrr_at_10_std value: 27.708742470257462 - type: nauc_mrr_at_1_diff1 value: 13.323408127738782 - type: nauc_mrr_at_1_max value: 43.78884661002012 - type: nauc_mrr_at_1_std value: 25.164417588165673 - type: nauc_mrr_at_20_diff1 value: 12.036022973968011 - type: nauc_mrr_at_20_max value: 46.56537838037131 - type: nauc_mrr_at_20_std value: 27.78189157249635 - type: nauc_mrr_at_3_diff1 value: 11.943896700976381 - type: nauc_mrr_at_3_max value: 46.33644663073225 - type: nauc_mrr_at_3_std value: 27.523915405053845 - type: nauc_mrr_at_5_diff1 value: 12.03108009033769 - type: nauc_mrr_at_5_max value: 46.49103616896692 - type: nauc_mrr_at_5_std value: 27.630879129863366 - type: nauc_ndcg_at_1000_diff1 value: 9.766823796017324 - type: nauc_ndcg_at_1000_max value: 52.85844801910602 - type: nauc_ndcg_at_1000_std value: 36.43271437761207 - type: nauc_ndcg_at_100_diff1 value: 12.035059298282036 - type: nauc_ndcg_at_100_max value: 50.05520240705682 - type: nauc_ndcg_at_100_std value: 29.87678724506636 - type: nauc_ndcg_at_10_diff1 value: 10.281893031139424 - type: nauc_ndcg_at_10_max value: 47.02153679426017 - type: nauc_ndcg_at_10_std value: 26.624948330369126 - type: nauc_ndcg_at_1_diff1 value: 13.323408127738782 - type: nauc_ndcg_at_1_max value: 43.78884661002012 - type: nauc_ndcg_at_1_std value: 25.164417588165673 - type: nauc_ndcg_at_20_diff1 value: 11.463524849646598 - type: nauc_ndcg_at_20_max value: 47.415073186019704 - type: nauc_ndcg_at_20_std value: 26.359019620164307 - type: nauc_ndcg_at_3_diff1 value: 9.689199913805394 - type: nauc_ndcg_at_3_max value: 45.68151849572808 - type: nauc_ndcg_at_3_std value: 26.559193219799486 - type: nauc_ndcg_at_5_diff1 value: 9.448823370356575 - type: nauc_ndcg_at_5_max value: 46.19999662690141 - type: nauc_ndcg_at_5_std value: 26.8411706726069 - type: nauc_precision_at_1000_diff1 value: -20.379065598727024 - type: nauc_precision_at_1000_max value: 13.162562437268427 - type: nauc_precision_at_1000_std value: 22.658226157785812 - type: nauc_precision_at_100_diff1 value: -16.458155977309282 - type: nauc_precision_at_100_max value: 35.97956789169889 - type: nauc_precision_at_100_std value: 48.878375009979194 - type: nauc_precision_at_10_diff1 value: -7.810992317607771 - type: nauc_precision_at_10_max value: 49.307339277444754 - type: nauc_precision_at_10_std value: 42.82533951854582 - type: nauc_precision_at_1_diff1 value: 13.323408127738782 - type: nauc_precision_at_1_max value: 43.78884661002012 - type: nauc_precision_at_1_std value: 25.164417588165673 - type: nauc_precision_at_20_diff1 value: -11.43933465149542 - type: nauc_precision_at_20_max value: 46.93722753460038 - type: nauc_precision_at_20_std value: 47.36223769029678 - type: nauc_precision_at_3_diff1 value: 1.3230178593599737 - type: nauc_precision_at_3_max value: 48.49039534395576 - type: nauc_precision_at_3_std value: 33.161384183129194 - type: nauc_precision_at_5_diff1 value: -3.185516457926519 - type: nauc_precision_at_5_max value: 49.5814309394308 - type: nauc_precision_at_5_std value: 37.57637865900281 - type: nauc_recall_at_1000_diff1 value: 7.839499443984168 - type: nauc_recall_at_1000_max value: 52.67165467640894 - type: nauc_recall_at_1000_std value: 48.85318316702583 - type: nauc_recall_at_100_diff1 value: 14.117557049589418 - type: nauc_recall_at_100_max value: 40.59046301348715 - type: nauc_recall_at_100_std value: 24.379680901739505 - type: nauc_recall_at_10_diff1 value: 20.04536052614054 - type: nauc_recall_at_10_max value: 25.54148839721574 - type: nauc_recall_at_10_std value: -1.938182527562211 - type: nauc_recall_at_1_diff1 value: 37.02663305598722 - type: nauc_recall_at_1_max value: 14.931071531116528 - type: nauc_recall_at_1_std value: -12.478790028708453 - type: nauc_recall_at_20_diff1 value: 17.959977483235566 - type: nauc_recall_at_20_max value: 29.88502687870809 - type: nauc_recall_at_20_std value: 4.26527395196852 - type: nauc_recall_at_3_diff1 value: 26.297810954500456 - type: nauc_recall_at_3_max value: 18.819406079307402 - type: nauc_recall_at_3_std value: -10.002237229729081 - type: nauc_recall_at_5_diff1 value: 22.739080899568485 - type: nauc_recall_at_5_max value: 21.0322968243985 - type: nauc_recall_at_5_std value: -6.927749435306422 - type: ndcg_at_1 value: 34.717999999999996 - type: ndcg_at_10 value: 34.772999999999996 - type: ndcg_at_100 value: 39.407 - type: ndcg_at_1000 value: 44.830999999999996 - type: ndcg_at_20 value: 35.667 - type: ndcg_at_3 value: 34.332 - type: ndcg_at_5 value: 34.408 - type: precision_at_1 value: 34.717999999999996 - type: precision_at_10 value: 23.430999999999997 - type: precision_at_100 value: 9.31 - type: precision_at_1000 value: 2.259 - type: precision_at_20 value: 18.826999999999998 - type: precision_at_3 value: 30.553 - type: precision_at_5 value: 27.792 - type: recall_at_1 value: 7.2620000000000005 - type: recall_at_10 value: 26.384 - type: recall_at_100 value: 52.506 - type: recall_at_1000 value: 73.38 - type: recall_at_20 value: 34.032000000000004 - type: recall_at_3 value: 14.821000000000002 - type: recall_at_5 value: 19.481 - task: type: Retrieval dataset: name: MTEB XMarket (de) type: jinaai/xmarket_ml config: de split: test revision: dfe57acff5b62c23732a7b7d3e3fb84ff501708b metrics: - type: main_score value: 28.316000000000003 - type: map_at_1 value: 8.667 - type: map_at_10 value: 17.351 - type: map_at_100 value: 21.02 - type: map_at_1000 value: 21.951 - type: map_at_20 value: 18.994 - type: map_at_3 value: 13.23 - type: map_at_5 value: 15.17 - type: mrr_at_1 value: 27.27272727272727 - type: mrr_at_10 value: 36.10858487561485 - type: mrr_at_100 value: 36.92033814316568 - type: mrr_at_1000 value: 36.972226653870365 - type: mrr_at_20 value: 36.58914906427944 - type: mrr_at_3 value: 33.642969201552305 - type: mrr_at_5 value: 35.13417554289494 - type: nauc_map_at_1000_diff1 value: 23.345116790998063 - type: nauc_map_at_1000_max value: 44.447240670835725 - type: nauc_map_at_1000_std value: 18.34636500680144 - type: nauc_map_at_100_diff1 value: 24.458120909292347 - type: nauc_map_at_100_max value: 43.31851431140378 - type: nauc_map_at_100_std value: 15.654778355549965 - type: nauc_map_at_10_diff1 value: 29.376508937265044 - type: nauc_map_at_10_max value: 36.650196725140795 - type: nauc_map_at_10_std value: 4.682465435374843 - type: nauc_map_at_1_diff1 value: 40.382365672683214 - type: nauc_map_at_1_max value: 22.894341150096785 - type: nauc_map_at_1_std value: -5.610725673968323 - type: nauc_map_at_20_diff1 value: 27.197033425732908 - type: nauc_map_at_20_max value: 39.71672400647207 - type: nauc_map_at_20_std value: 8.944436813309933 - type: nauc_map_at_3_diff1 value: 34.49739294661502 - type: nauc_map_at_3_max value: 29.006972420735284 - type: nauc_map_at_3_std value: -3.0372650571243986 - type: nauc_map_at_5_diff1 value: 32.764901537277105 - type: nauc_map_at_5_max value: 32.658533295918154 - type: nauc_map_at_5_std value: 0.029626452286996906 - type: nauc_mrr_at_1000_diff1 value: 19.521229956280603 - type: nauc_mrr_at_1000_max value: 44.39409866211472 - type: nauc_mrr_at_1000_std value: 23.580697307036058 - type: nauc_mrr_at_100_diff1 value: 19.51312676591073 - type: nauc_mrr_at_100_max value: 44.39559153963895 - type: nauc_mrr_at_100_std value: 23.57913711397437 - type: nauc_mrr_at_10_diff1 value: 19.584635617935145 - type: nauc_mrr_at_10_max value: 44.44842226236198 - type: nauc_mrr_at_10_std value: 23.382684909390434 - type: nauc_mrr_at_1_diff1 value: 20.92594790923806 - type: nauc_mrr_at_1_max value: 40.593939625252816 - type: nauc_mrr_at_1_std value: 20.37467598073644 - type: nauc_mrr_at_20_diff1 value: 19.590641822115725 - type: nauc_mrr_at_20_max value: 44.42512299604718 - type: nauc_mrr_at_20_std value: 23.45564260800024 - type: nauc_mrr_at_3_diff1 value: 20.005307129527232 - type: nauc_mrr_at_3_max value: 43.68300366192776 - type: nauc_mrr_at_3_std value: 22.297190480842005 - type: nauc_mrr_at_5_diff1 value: 19.852896386271716 - type: nauc_mrr_at_5_max value: 44.20641808920062 - type: nauc_mrr_at_5_std value: 22.966517330852895 - type: nauc_ndcg_at_1000_diff1 value: 17.800116251376103 - type: nauc_ndcg_at_1000_max value: 50.98332718061365 - type: nauc_ndcg_at_1000_std value: 31.464484658102577 - type: nauc_ndcg_at_100_diff1 value: 19.555159680541088 - type: nauc_ndcg_at_100_max value: 48.56377130899141 - type: nauc_ndcg_at_100_std value: 25.77572748714817 - type: nauc_ndcg_at_10_diff1 value: 20.003008726679415 - type: nauc_ndcg_at_10_max value: 45.1293725480628 - type: nauc_ndcg_at_10_std value: 21.149213260765872 - type: nauc_ndcg_at_1_diff1 value: 21.00986278773023 - type: nauc_ndcg_at_1_max value: 40.524637076774894 - type: nauc_ndcg_at_1_std value: 20.29682194006685 - type: nauc_ndcg_at_20_diff1 value: 20.659734137312284 - type: nauc_ndcg_at_20_max value: 45.73108736599869 - type: nauc_ndcg_at_20_std value: 21.200736170346133 - type: nauc_ndcg_at_3_diff1 value: 19.200120542882544 - type: nauc_ndcg_at_3_max value: 42.89772612963168 - type: nauc_ndcg_at_3_std value: 20.713292754978983 - type: nauc_ndcg_at_5_diff1 value: 19.96329647992544 - type: nauc_ndcg_at_5_max value: 44.296627037787324 - type: nauc_ndcg_at_5_std value: 21.200135784971973 - type: nauc_precision_at_1000_diff1 value: -11.543221249009427 - type: nauc_precision_at_1000_max value: 9.132801614448221 - type: nauc_precision_at_1000_std value: 21.203720655381055 - type: nauc_precision_at_100_diff1 value: -12.510945425786039 - type: nauc_precision_at_100_max value: 31.42530963666252 - type: nauc_precision_at_100_std value: 44.99672783467617 - type: nauc_precision_at_10_diff1 value: -4.025802651746804 - type: nauc_precision_at_10_max value: 47.50967924227793 - type: nauc_precision_at_10_std value: 41.1558559268985 - type: nauc_precision_at_1_diff1 value: 21.00986278773023 - type: nauc_precision_at_1_max value: 40.524637076774894 - type: nauc_precision_at_1_std value: 20.29682194006685 - type: nauc_precision_at_20_diff1 value: -8.059482951110002 - type: nauc_precision_at_20_max value: 44.28832115946278 - type: nauc_precision_at_20_std value: 45.2005585353651 - type: nauc_precision_at_3_diff1 value: 8.53530005716248 - type: nauc_precision_at_3_max value: 46.48353678905102 - type: nauc_precision_at_3_std value: 28.868791323881972 - type: nauc_precision_at_5_diff1 value: 3.093619954821814 - type: nauc_precision_at_5_max value: 48.43294475817019 - type: nauc_precision_at_5_std value: 34.83430452745434 - type: nauc_recall_at_1000_diff1 value: 9.93680206699751 - type: nauc_recall_at_1000_max value: 52.97840222394363 - type: nauc_recall_at_1000_std value: 46.370023604436255 - type: nauc_recall_at_100_diff1 value: 14.100542445524972 - type: nauc_recall_at_100_max value: 42.853775131475224 - type: nauc_recall_at_100_std value: 26.93029971231028 - type: nauc_recall_at_10_diff1 value: 22.774547475714716 - type: nauc_recall_at_10_max value: 33.984586405015044 - type: nauc_recall_at_10_std value: 5.332325172373655 - type: nauc_recall_at_1_diff1 value: 40.382365672683214 - type: nauc_recall_at_1_max value: 22.894341150096785 - type: nauc_recall_at_1_std value: -5.610725673968323 - type: nauc_recall_at_20_diff1 value: 19.751060483835936 - type: nauc_recall_at_20_max value: 36.18774034635102 - type: nauc_recall_at_20_std value: 10.362242090308577 - type: nauc_recall_at_3_diff1 value: 30.29462372902671 - type: nauc_recall_at_3_max value: 27.377175450099635 - type: nauc_recall_at_3_std value: -3.015752705993425 - type: nauc_recall_at_5_diff1 value: 28.096893312615723 - type: nauc_recall_at_5_max value: 30.485075571512425 - type: nauc_recall_at_5_std value: 0.09106417003502826 - type: ndcg_at_1 value: 27.248 - type: ndcg_at_10 value: 28.316000000000003 - type: ndcg_at_100 value: 33.419 - type: ndcg_at_1000 value: 38.134 - type: ndcg_at_20 value: 29.707 - type: ndcg_at_3 value: 26.93 - type: ndcg_at_5 value: 27.363 - type: precision_at_1 value: 27.248 - type: precision_at_10 value: 15.073 - type: precision_at_100 value: 5.061 - type: precision_at_1000 value: 1.325 - type: precision_at_20 value: 11.407 - type: precision_at_3 value: 21.823 - type: precision_at_5 value: 18.984 - type: recall_at_1 value: 8.667 - type: recall_at_10 value: 26.984 - type: recall_at_100 value: 49.753 - type: recall_at_1000 value: 70.354 - type: recall_at_20 value: 33.955999999999996 - type: recall_at_3 value: 16.086 - type: recall_at_5 value: 20.544999999999998 - task: type: Retrieval dataset: name: MTEB XMarket (es) type: jinaai/xmarket_ml config: es split: test revision: dfe57acff5b62c23732a7b7d3e3fb84ff501708b metrics: - type: main_score value: 26.592 - type: map_at_1 value: 8.081000000000001 - type: map_at_10 value: 16.486 - type: map_at_100 value: 19.996 - type: map_at_1000 value: 20.889 - type: map_at_20 value: 18.088 - type: map_at_3 value: 12.864 - type: map_at_5 value: 14.515 - type: mrr_at_1 value: 24.643356643356643 - type: mrr_at_10 value: 33.755599955599926 - type: mrr_at_100 value: 34.55914769326114 - type: mrr_at_1000 value: 34.614384237219745 - type: mrr_at_20 value: 34.228909650276194 - type: mrr_at_3 value: 31.445221445221456 - type: mrr_at_5 value: 32.71375291375297 - type: nauc_map_at_1000_diff1 value: 19.17751654240679 - type: nauc_map_at_1000_max value: 43.493743561136434 - type: nauc_map_at_1000_std value: 21.14477911550252 - type: nauc_map_at_100_diff1 value: 20.259227234415395 - type: nauc_map_at_100_max value: 42.510860292169106 - type: nauc_map_at_100_std value: 18.63085160442346 - type: nauc_map_at_10_diff1 value: 24.12419385640694 - type: nauc_map_at_10_max value: 35.99892932069915 - type: nauc_map_at_10_std value: 8.488520124325058 - type: nauc_map_at_1_diff1 value: 35.09239143996649 - type: nauc_map_at_1_max value: 23.72498533914286 - type: nauc_map_at_1_std value: -4.164387883546102 - type: nauc_map_at_20_diff1 value: 22.411418237320817 - type: nauc_map_at_20_max value: 39.12496266094892 - type: nauc_map_at_20_std value: 12.371656353894227 - type: nauc_map_at_3_diff1 value: 28.106972376813506 - type: nauc_map_at_3_max value: 29.57824316865409 - type: nauc_map_at_3_std value: 1.8928791254813127 - type: nauc_map_at_5_diff1 value: 26.4958239149419 - type: nauc_map_at_5_max value: 32.45906016649239 - type: nauc_map_at_5_std value: 4.612735963224018 - type: nauc_mrr_at_1000_diff1 value: 17.614812607094446 - type: nauc_mrr_at_1000_max value: 41.13031556228715 - type: nauc_mrr_at_1000_std value: 22.564112871230318 - type: nauc_mrr_at_100_diff1 value: 17.614044568011085 - type: nauc_mrr_at_100_max value: 41.129436273086796 - type: nauc_mrr_at_100_std value: 22.566763500658766 - type: nauc_mrr_at_10_diff1 value: 17.61869494452089 - type: nauc_mrr_at_10_max value: 41.091542329381426 - type: nauc_mrr_at_10_std value: 22.370473458633594 - type: nauc_mrr_at_1_diff1 value: 20.321421442201913 - type: nauc_mrr_at_1_max value: 38.36531448180009 - type: nauc_mrr_at_1_std value: 18.422203207777688 - type: nauc_mrr_at_20_diff1 value: 17.614767736091625 - type: nauc_mrr_at_20_max value: 41.11221420736687 - type: nauc_mrr_at_20_std value: 22.44271891522012 - type: nauc_mrr_at_3_diff1 value: 17.98184651584625 - type: nauc_mrr_at_3_max value: 40.424293610470144 - type: nauc_mrr_at_3_std value: 21.554750947206706 - type: nauc_mrr_at_5_diff1 value: 17.72088314927416 - type: nauc_mrr_at_5_max value: 40.662724739072694 - type: nauc_mrr_at_5_std value: 21.822957528431928 - type: nauc_ndcg_at_1000_diff1 value: 15.310699428328398 - type: nauc_ndcg_at_1000_max value: 48.83921393349997 - type: nauc_ndcg_at_1000_std value: 32.22600294110774 - type: nauc_ndcg_at_100_diff1 value: 16.62672763977423 - type: nauc_ndcg_at_100_max value: 47.36060653537392 - type: nauc_ndcg_at_100_std value: 27.879865162871575 - type: nauc_ndcg_at_10_diff1 value: 16.436684176028116 - type: nauc_ndcg_at_10_max value: 43.00026520872974 - type: nauc_ndcg_at_10_std value: 22.507354939162806 - type: nauc_ndcg_at_1_diff1 value: 20.321421442201913 - type: nauc_ndcg_at_1_max value: 38.36531448180009 - type: nauc_ndcg_at_1_std value: 18.422203207777688 - type: nauc_ndcg_at_20_diff1 value: 17.127747123248835 - type: nauc_ndcg_at_20_max value: 44.57322943752733 - type: nauc_ndcg_at_20_std value: 23.146541187377036 - type: nauc_ndcg_at_3_diff1 value: 16.372742984728514 - type: nauc_ndcg_at_3_max value: 40.91938017883993 - type: nauc_ndcg_at_3_std value: 21.50917089194154 - type: nauc_ndcg_at_5_diff1 value: 16.40486505525073 - type: nauc_ndcg_at_5_max value: 41.94597203181329 - type: nauc_ndcg_at_5_std value: 22.068260809047562 - type: nauc_precision_at_1000_diff1 value: -15.9415313729527 - type: nauc_precision_at_1000_max value: 12.653329948983643 - type: nauc_precision_at_1000_std value: 26.371820703256173 - type: nauc_precision_at_100_diff1 value: -11.851070166675289 - type: nauc_precision_at_100_max value: 32.164365923950115 - type: nauc_precision_at_100_std value: 45.930226426725426 - type: nauc_precision_at_10_diff1 value: -3.1352660378259163 - type: nauc_precision_at_10_max value: 45.48359878733272 - type: nauc_precision_at_10_std value: 40.2917038044196 - type: nauc_precision_at_1_diff1 value: 20.321421442201913 - type: nauc_precision_at_1_max value: 38.36531448180009 - type: nauc_precision_at_1_std value: 18.422203207777688 - type: nauc_precision_at_20_diff1 value: -7.087513342144751 - type: nauc_precision_at_20_max value: 43.66272019058357 - type: nauc_precision_at_20_std value: 44.22863351071686 - type: nauc_precision_at_3_diff1 value: 7.836185032609045 - type: nauc_precision_at_3_max value: 44.85412904097269 - type: nauc_precision_at_3_std value: 30.209139149500057 - type: nauc_precision_at_5_diff1 value: 3.028150537253791 - type: nauc_precision_at_5_max value: 45.73661708882973 - type: nauc_precision_at_5_std value: 34.65500311185052 - type: nauc_recall_at_1000_diff1 value: 9.526124668370704 - type: nauc_recall_at_1000_max value: 51.4190208452196 - type: nauc_recall_at_1000_std value: 45.694891695646426 - type: nauc_recall_at_100_diff1 value: 12.68466215400009 - type: nauc_recall_at_100_max value: 42.79112054268112 - type: nauc_recall_at_100_std value: 28.61954251400998 - type: nauc_recall_at_10_diff1 value: 17.95124413416829 - type: nauc_recall_at_10_max value: 33.1192036755167 - type: nauc_recall_at_10_std value: 9.3588175959525 - type: nauc_recall_at_1_diff1 value: 35.09239143996649 - type: nauc_recall_at_1_max value: 23.72498533914286 - type: nauc_recall_at_1_std value: -4.164387883546102 - type: nauc_recall_at_20_diff1 value: 16.24916980445646 - type: nauc_recall_at_20_max value: 36.51316122236076 - type: nauc_recall_at_20_std value: 13.641588062425736 - type: nauc_recall_at_3_diff1 value: 23.263199724138786 - type: nauc_recall_at_3_max value: 27.67354561610614 - type: nauc_recall_at_3_std value: 3.103127242654415 - type: nauc_recall_at_5_diff1 value: 20.719704839229635 - type: nauc_recall_at_5_max value: 29.66480839111333 - type: nauc_recall_at_5_std value: 5.514884455797986 - type: ndcg_at_1 value: 24.643 - type: ndcg_at_10 value: 26.592 - type: ndcg_at_100 value: 31.887 - type: ndcg_at_1000 value: 36.695 - type: ndcg_at_20 value: 28.166000000000004 - type: ndcg_at_3 value: 25.238 - type: ndcg_at_5 value: 25.545 - type: precision_at_1 value: 24.643 - type: precision_at_10 value: 13.730999999999998 - type: precision_at_100 value: 4.744000000000001 - type: precision_at_1000 value: 1.167 - type: precision_at_20 value: 10.562000000000001 - type: precision_at_3 value: 20.288999999999998 - type: precision_at_5 value: 17.337 - type: recall_at_1 value: 8.081000000000001 - type: recall_at_10 value: 25.911 - type: recall_at_100 value: 48.176 - type: recall_at_1000 value: 69.655 - type: recall_at_20 value: 32.924 - type: recall_at_3 value: 16.125 - type: recall_at_5 value: 19.988 - task: type: Retrieval dataset: name: MTEB XPQARetrieval (deu-deu) type: jinaai/xpqa config: deu-deu split: test revision: c99d599f0a6ab9b85b065da6f9d94f9cf731679f metrics: - type: main_score value: 84.552 - type: map_at_1 value: 59.023 - type: map_at_10 value: 81.051 - type: map_at_100 value: 81.539 - type: map_at_1000 value: 81.54299999999999 - type: map_at_20 value: 81.401 - type: map_at_3 value: 76.969 - type: map_at_5 value: 80.07600000000001 - type: mrr_at_1 value: 77.67624020887729 - type: mrr_at_10 value: 83.30509967259314 - type: mrr_at_100 value: 83.58599391639456 - type: mrr_at_1000 value: 83.58970114722587 - type: mrr_at_20 value: 83.50275980440317 - type: mrr_at_3 value: 82.07136640557006 - type: mrr_at_5 value: 82.94604003481287 - type: nauc_map_at_1000_diff1 value: 63.12885104269942 - type: nauc_map_at_1000_max value: 57.7017996674959 - type: nauc_map_at_1000_std value: -24.951068985070513 - type: nauc_map_at_100_diff1 value: 63.12866509393162 - type: nauc_map_at_100_max value: 57.70176426013332 - type: nauc_map_at_100_std value: -24.96012290790273 - type: nauc_map_at_10_diff1 value: 62.847709436211204 - type: nauc_map_at_10_max value: 57.408873624779524 - type: nauc_map_at_10_std value: -25.635130363219062 - type: nauc_map_at_1_diff1 value: 71.89683981857102 - type: nauc_map_at_1_max value: 20.204460967432645 - type: nauc_map_at_1_std value: -23.07894656629493 - type: nauc_map_at_20_diff1 value: 63.00504457011043 - type: nauc_map_at_20_max value: 57.66009512514262 - type: nauc_map_at_20_std value: -25.100138593754885 - type: nauc_map_at_3_diff1 value: 63.199874607788274 - type: nauc_map_at_3_max value: 47.54482033763308 - type: nauc_map_at_3_std value: -27.714557098916963 - type: nauc_map_at_5_diff1 value: 63.01006523518669 - type: nauc_map_at_5_max value: 56.501965964288495 - type: nauc_map_at_5_std value: -25.367825762790925 - type: nauc_mrr_at_1000_diff1 value: 66.24988063948112 - type: nauc_mrr_at_1000_max value: 63.56921667744273 - type: nauc_mrr_at_1000_std value: -22.073973768031863 - type: nauc_mrr_at_100_diff1 value: 66.24919554296275 - type: nauc_mrr_at_100_max value: 63.57382447608361 - type: nauc_mrr_at_100_std value: -22.084627248538187 - type: nauc_mrr_at_10_diff1 value: 66.0143885124066 - type: nauc_mrr_at_10_max value: 63.51277586011898 - type: nauc_mrr_at_10_std value: -22.477523960705454 - type: nauc_mrr_at_1_diff1 value: 68.25415199323474 - type: nauc_mrr_at_1_max value: 63.069019003272416 - type: nauc_mrr_at_1_std value: -18.77085924093244 - type: nauc_mrr_at_20_diff1 value: 66.16203167351055 - type: nauc_mrr_at_20_max value: 63.607477776215845 - type: nauc_mrr_at_20_std value: -22.15083176017266 - type: nauc_mrr_at_3_diff1 value: 66.39368842782302 - type: nauc_mrr_at_3_max value: 63.11411066585295 - type: nauc_mrr_at_3_std value: -22.63174342814071 - type: nauc_mrr_at_5_diff1 value: 66.17932562332354 - type: nauc_mrr_at_5_max value: 63.70434825329594 - type: nauc_mrr_at_5_std value: -21.704012812430438 - type: nauc_ndcg_at_1000_diff1 value: 63.958010361549356 - type: nauc_ndcg_at_1000_max value: 60.516445000134624 - type: nauc_ndcg_at_1000_std value: -24.264672248289923 - type: nauc_ndcg_at_100_diff1 value: 63.97654644758022 - type: nauc_ndcg_at_100_max value: 60.62187552803407 - type: nauc_ndcg_at_100_std value: -24.317149225778312 - type: nauc_ndcg_at_10_diff1 value: 62.505321221321566 - type: nauc_ndcg_at_10_max value: 59.77891112351258 - type: nauc_ndcg_at_10_std value: -26.90910005589911 - type: nauc_ndcg_at_1_diff1 value: 68.25415199323474 - type: nauc_ndcg_at_1_max value: 63.069019003272416 - type: nauc_ndcg_at_1_std value: -18.77085924093244 - type: nauc_ndcg_at_20_diff1 value: 63.04281805056225 - type: nauc_ndcg_at_20_max value: 60.600957307444226 - type: nauc_ndcg_at_20_std value: -24.954862079889203 - type: nauc_ndcg_at_3_diff1 value: 62.970441139740316 - type: nauc_ndcg_at_3_max value: 57.543715669055295 - type: nauc_ndcg_at_3_std value: -25.659388431714703 - type: nauc_ndcg_at_5_diff1 value: 62.82652127664541 - type: nauc_ndcg_at_5_max value: 58.6970443258532 - type: nauc_ndcg_at_5_std value: -25.66329354851023 - type: nauc_precision_at_1000_diff1 value: -33.38530947486223 - type: nauc_precision_at_1000_max value: 25.972468024345414 - type: nauc_precision_at_1000_std value: 17.460222955117978 - type: nauc_precision_at_100_diff1 value: -32.45175999251703 - type: nauc_precision_at_100_max value: 26.367996120487337 - type: nauc_precision_at_100_std value: 17.097957946391208 - type: nauc_precision_at_10_diff1 value: -26.97411235289487 - type: nauc_precision_at_10_max value: 31.504961687240762 - type: nauc_precision_at_10_std value: 11.125341183874687 - type: nauc_precision_at_1_diff1 value: 68.25415199323474 - type: nauc_precision_at_1_max value: 63.069019003272416 - type: nauc_precision_at_1_std value: -18.77085924093244 - type: nauc_precision_at_20_diff1 value: -29.8678078736273 - type: nauc_precision_at_20_max value: 29.031222186584504 - type: nauc_precision_at_20_std value: 14.943600563087928 - type: nauc_precision_at_3_diff1 value: -15.92947221299854 - type: nauc_precision_at_3_max value: 37.73833494235097 - type: nauc_precision_at_3_std value: 3.1573228443500847 - type: nauc_precision_at_5_diff1 value: -22.269156821101642 - type: nauc_precision_at_5_max value: 35.65821838116355 - type: nauc_precision_at_5_std value: 9.265930386198972 - type: nauc_recall_at_1000_diff1 value: .nan - type: nauc_recall_at_1000_max value: .nan - type: nauc_recall_at_1000_std value: .nan - type: nauc_recall_at_100_diff1 value: 66.17058859539249 - type: nauc_recall_at_100_max value: 78.066942935192 - type: nauc_recall_at_100_std value: -22.213377762074686 - type: nauc_recall_at_10_diff1 value: 50.82149700700275 - type: nauc_recall_at_10_max value: 56.68053325008221 - type: nauc_recall_at_10_std value: -41.81657941433277 - type: nauc_recall_at_1_diff1 value: 71.89683981857102 - type: nauc_recall_at_1_max value: 20.204460967432645 - type: nauc_recall_at_1_std value: -23.07894656629493 - type: nauc_recall_at_20_diff1 value: 48.28076011857885 - type: nauc_recall_at_20_max value: 63.29641555519295 - type: nauc_recall_at_20_std value: -32.953559708819405 - type: nauc_recall_at_3_diff1 value: 58.15516956312558 - type: nauc_recall_at_3_max value: 42.66315890283056 - type: nauc_recall_at_3_std value: -32.16572530544806 - type: nauc_recall_at_5_diff1 value: 55.900844052439766 - type: nauc_recall_at_5_max value: 55.23702018862884 - type: nauc_recall_at_5_std value: -30.105929528165 - type: ndcg_at_1 value: 77.676 - type: ndcg_at_10 value: 84.552 - type: ndcg_at_100 value: 86.232 - type: ndcg_at_1000 value: 86.33800000000001 - type: ndcg_at_20 value: 85.515 - type: ndcg_at_3 value: 81.112 - type: ndcg_at_5 value: 82.943 - type: precision_at_1 value: 77.676 - type: precision_at_10 value: 15.17 - type: precision_at_100 value: 1.6230000000000002 - type: precision_at_1000 value: 0.163 - type: precision_at_20 value: 7.858999999999999 - type: precision_at_3 value: 42.994 - type: precision_at_5 value: 28.747 - type: recall_at_1 value: 59.023 - type: recall_at_10 value: 92.465 - type: recall_at_100 value: 99.18400000000001 - type: recall_at_1000 value: 100.0 - type: recall_at_20 value: 95.844 - type: recall_at_3 value: 81.826 - type: recall_at_5 value: 88.22 - task: type: Retrieval dataset: name: MTEB XPQARetrieval (deu-eng) type: jinaai/xpqa config: deu-eng split: test revision: c99d599f0a6ab9b85b065da6f9d94f9cf731679f metrics: - type: main_score value: 82.149 - type: map_at_1 value: 56.277 - type: map_at_10 value: 78.36999999999999 - type: map_at_100 value: 78.94 - type: map_at_1000 value: 78.95 - type: map_at_20 value: 78.818 - type: map_at_3 value: 74.25 - type: map_at_5 value: 77.11099999999999 - type: mrr_at_1 value: 74.28198433420366 - type: mrr_at_10 value: 80.57487877657589 - type: mrr_at_100 value: 80.94025764149008 - type: mrr_at_1000 value: 80.94608738871234 - type: mrr_at_20 value: 80.86240675885023 - type: mrr_at_3 value: 79.4604003481288 - type: mrr_at_5 value: 80.10008703220191 - type: nauc_map_at_1000_diff1 value: 60.44369249057189 - type: nauc_map_at_1000_max value: 49.822240441830246 - type: nauc_map_at_1000_std value: -27.34026380762817 - type: nauc_map_at_100_diff1 value: 60.44635668050401 - type: nauc_map_at_100_max value: 49.838675926660684 - type: nauc_map_at_100_std value: -27.310365556055583 - type: nauc_map_at_10_diff1 value: 60.18546951726522 - type: nauc_map_at_10_max value: 49.72075398096832 - type: nauc_map_at_10_std value: -27.86056102461558 - type: nauc_map_at_1_diff1 value: 71.2906657099758 - type: nauc_map_at_1_max value: 18.970399251589 - type: nauc_map_at_1_std value: -27.260776614286602 - type: nauc_map_at_20_diff1 value: 60.3525975566164 - type: nauc_map_at_20_max value: 49.852487866710646 - type: nauc_map_at_20_std value: -27.305173830170332 - type: nauc_map_at_3_diff1 value: 60.66803500571236 - type: nauc_map_at_3_max value: 41.18191941521972 - type: nauc_map_at_3_std value: -28.71383593401732 - type: nauc_map_at_5_diff1 value: 60.57216514504887 - type: nauc_map_at_5_max value: 47.99837400446299 - type: nauc_map_at_5_std value: -28.756183015949986 - type: nauc_mrr_at_1000_diff1 value: 63.77031955602516 - type: nauc_mrr_at_1000_max value: 54.26907383811417 - type: nauc_mrr_at_1000_std value: -26.227442087164714 - type: nauc_mrr_at_100_diff1 value: 63.77196650108669 - type: nauc_mrr_at_100_max value: 54.281801457913126 - type: nauc_mrr_at_100_std value: -26.216077891830793 - type: nauc_mrr_at_10_diff1 value: 63.50095284903051 - type: nauc_mrr_at_10_max value: 54.3186301730016 - type: nauc_mrr_at_10_std value: -26.29570241722173 - type: nauc_mrr_at_1_diff1 value: 65.15855770999057 - type: nauc_mrr_at_1_max value: 53.213286738515066 - type: nauc_mrr_at_1_std value: -24.683178252901943 - type: nauc_mrr_at_20_diff1 value: 63.74936550280859 - type: nauc_mrr_at_20_max value: 54.355343751439065 - type: nauc_mrr_at_20_std value: -26.197316900009817 - type: nauc_mrr_at_3_diff1 value: 63.912612979082695 - type: nauc_mrr_at_3_max value: 53.75399024225975 - type: nauc_mrr_at_3_std value: -27.194143264554675 - type: nauc_mrr_at_5_diff1 value: 63.72491059053639 - type: nauc_mrr_at_5_max value: 53.66107604019352 - type: nauc_mrr_at_5_std value: -26.92281560584754 - type: nauc_ndcg_at_1000_diff1 value: 61.304218998714354 - type: nauc_ndcg_at_1000_max value: 52.409135743660386 - type: nauc_ndcg_at_1000_std value: -26.539796489464056 - type: nauc_ndcg_at_100_diff1 value: 61.40355045085304 - type: nauc_ndcg_at_100_max value: 52.79402259608008 - type: nauc_ndcg_at_100_std value: -25.927273456979965 - type: nauc_ndcg_at_10_diff1 value: 59.93675608684116 - type: nauc_ndcg_at_10_max value: 52.617848197542706 - type: nauc_ndcg_at_10_std value: -27.314820020095887 - type: nauc_ndcg_at_1_diff1 value: 65.15855770999057 - type: nauc_ndcg_at_1_max value: 53.213286738515066 - type: nauc_ndcg_at_1_std value: -24.683178252901943 - type: nauc_ndcg_at_20_diff1 value: 60.85093704358376 - type: nauc_ndcg_at_20_max value: 53.14529242671602 - type: nauc_ndcg_at_20_std value: -25.93187916231906 - type: nauc_ndcg_at_3_diff1 value: 60.42301123518882 - type: nauc_ndcg_at_3_max value: 49.59021992975956 - type: nauc_ndcg_at_3_std value: -27.397117967810363 - type: nauc_ndcg_at_5_diff1 value: 60.78655153154219 - type: nauc_ndcg_at_5_max value: 49.54194799556953 - type: nauc_ndcg_at_5_std value: -29.467910172913413 - type: nauc_precision_at_1000_diff1 value: -34.35027108027456 - type: nauc_precision_at_1000_max value: 23.762671066858815 - type: nauc_precision_at_1000_std value: 16.1704780298982 - type: nauc_precision_at_100_diff1 value: -32.66610016754961 - type: nauc_precision_at_100_max value: 25.504044603109588 - type: nauc_precision_at_100_std value: 16.932402988816786 - type: nauc_precision_at_10_diff1 value: -25.720903145017342 - type: nauc_precision_at_10_max value: 30.37029690599926 - type: nauc_precision_at_10_std value: 10.560753160200314 - type: nauc_precision_at_1_diff1 value: 65.15855770999057 - type: nauc_precision_at_1_max value: 53.213286738515066 - type: nauc_precision_at_1_std value: -24.683178252901943 - type: nauc_precision_at_20_diff1 value: -29.577582332619084 - type: nauc_precision_at_20_max value: 27.984145595920417 - type: nauc_precision_at_20_std value: 15.083711704044727 - type: nauc_precision_at_3_diff1 value: -14.736267532892697 - type: nauc_precision_at_3_max value: 36.12211021824307 - type: nauc_precision_at_3_std value: 3.068643876519412 - type: nauc_precision_at_5_diff1 value: -19.846707283120825 - type: nauc_precision_at_5_max value: 33.573804532177896 - type: nauc_precision_at_5_std value: 5.700545622744924 - type: nauc_recall_at_1000_diff1 value: .nan - type: nauc_recall_at_1000_max value: .nan - type: nauc_recall_at_1000_std value: .nan - type: nauc_recall_at_100_diff1 value: 68.24749796604452 - type: nauc_recall_at_100_max value: 83.30024864929815 - type: nauc_recall_at_100_std value: 21.23763053711522 - type: nauc_recall_at_10_diff1 value: 50.704049683241436 - type: nauc_recall_at_10_max value: 57.64578984555556 - type: nauc_recall_at_10_std value: -26.632759037746073 - type: nauc_recall_at_1_diff1 value: 71.2906657099758 - type: nauc_recall_at_1_max value: 18.970399251589 - type: nauc_recall_at_1_std value: -27.260776614286602 - type: nauc_recall_at_20_diff1 value: 54.124480837579505 - type: nauc_recall_at_20_max value: 66.4641515433479 - type: nauc_recall_at_20_std value: -14.615911455379393 - type: nauc_recall_at_3_diff1 value: 56.54358788321059 - type: nauc_recall_at_3_max value: 37.765735322465744 - type: nauc_recall_at_3_std value: -30.824147408598574 - type: nauc_recall_at_5_diff1 value: 56.392894535029214 - type: nauc_recall_at_5_max value: 45.959268387521554 - type: nauc_recall_at_5_std value: -33.58175576925282 - type: ndcg_at_1 value: 74.28200000000001 - type: ndcg_at_10 value: 82.149 - type: ndcg_at_100 value: 84.129 - type: ndcg_at_1000 value: 84.307 - type: ndcg_at_20 value: 83.39999999999999 - type: ndcg_at_3 value: 78.583 - type: ndcg_at_5 value: 80.13900000000001 - type: precision_at_1 value: 74.28200000000001 - type: precision_at_10 value: 14.960999999999999 - type: precision_at_100 value: 1.6119999999999999 - type: precision_at_1000 value: 0.163 - type: precision_at_20 value: 7.813000000000001 - type: precision_at_3 value: 41.819 - type: precision_at_5 value: 27.911 - type: recall_at_1 value: 56.277 - type: recall_at_10 value: 90.729 - type: recall_at_100 value: 98.792 - type: recall_at_1000 value: 100.0 - type: recall_at_20 value: 95.148 - type: recall_at_3 value: 79.989 - type: recall_at_5 value: 85.603 - task: type: Retrieval dataset: name: MTEB XPQARetrieval (eng-deu) type: jinaai/xpqa config: eng-deu split: test revision: c99d599f0a6ab9b85b065da6f9d94f9cf731679f metrics: - type: main_score value: 60.428000000000004 - type: map_at_1 value: 33.453 - type: map_at_10 value: 54.217000000000006 - type: map_at_100 value: 55.832 - type: map_at_1000 value: 55.884 - type: map_at_20 value: 55.236 - type: map_at_3 value: 48.302 - type: map_at_5 value: 51.902 - type: mrr_at_1 value: 53.916449086161876 - type: mrr_at_10 value: 61.4685647975465 - type: mrr_at_100 value: 62.13718159287348 - type: mrr_at_1000 value: 62.15799113826325 - type: mrr_at_20 value: 61.885388764243544 - type: mrr_at_3 value: 59.44299390774582 - type: mrr_at_5 value: 60.26544821583981 - type: nauc_map_at_1000_diff1 value: 39.824412602121804 - type: nauc_map_at_1000_max value: 39.49332709959374 - type: nauc_map_at_1000_std value: -17.27462623749702 - type: nauc_map_at_100_diff1 value: 39.80528910003463 - type: nauc_map_at_100_max value: 39.51471609156093 - type: nauc_map_at_100_std value: -17.275536933094937 - type: nauc_map_at_10_diff1 value: 39.28558292349772 - type: nauc_map_at_10_max value: 38.13220294838968 - type: nauc_map_at_10_std value: -18.235985574392863 - type: nauc_map_at_1_diff1 value: 43.68892397816937 - type: nauc_map_at_1_max value: 14.478978190224353 - type: nauc_map_at_1_std value: -18.435031919225477 - type: nauc_map_at_20_diff1 value: 39.8733530971344 - type: nauc_map_at_20_max value: 39.30513202591992 - type: nauc_map_at_20_std value: -17.62362848144766 - type: nauc_map_at_3_diff1 value: 40.31116611188815 - type: nauc_map_at_3_max value: 31.107314675202165 - type: nauc_map_at_3_std value: -19.52930881946966 - type: nauc_map_at_5_diff1 value: 39.1241499095765 - type: nauc_map_at_5_max value: 37.330543901034055 - type: nauc_map_at_5_std value: -17.893862772447548 - type: nauc_mrr_at_1000_diff1 value: 43.07490530140024 - type: nauc_mrr_at_1000_max value: 42.28469195779226 - type: nauc_mrr_at_1000_std value: -15.583217110180737 - type: nauc_mrr_at_100_diff1 value: 43.068836494603886 - type: nauc_mrr_at_100_max value: 42.29612450479168 - type: nauc_mrr_at_100_std value: -15.57218089438229 - type: nauc_mrr_at_10_diff1 value: 42.88685919151777 - type: nauc_mrr_at_10_max value: 41.89944452003811 - type: nauc_mrr_at_10_std value: -15.909673572763165 - type: nauc_mrr_at_1_diff1 value: 45.67646898532131 - type: nauc_mrr_at_1_max value: 43.0541870425035 - type: nauc_mrr_at_1_std value: -15.597124291613563 - type: nauc_mrr_at_20_diff1 value: 43.14141873150977 - type: nauc_mrr_at_20_max value: 42.33063543184022 - type: nauc_mrr_at_20_std value: -15.607612016107304 - type: nauc_mrr_at_3_diff1 value: 43.18370928261982 - type: nauc_mrr_at_3_max value: 42.18529980773961 - type: nauc_mrr_at_3_std value: -15.900151400673629 - type: nauc_mrr_at_5_diff1 value: 42.43443044877765 - type: nauc_mrr_at_5_max value: 42.05818605278972 - type: nauc_mrr_at_5_std value: -15.436502733299893 - type: nauc_ndcg_at_1000_diff1 value: 40.60606676178781 - type: nauc_ndcg_at_1000_max value: 41.71923393878376 - type: nauc_ndcg_at_1000_std value: -15.694740326899556 - type: nauc_ndcg_at_100_diff1 value: 40.15270376312309 - type: nauc_ndcg_at_100_max value: 42.234126305709225 - type: nauc_ndcg_at_100_std value: -15.436051984708952 - type: nauc_ndcg_at_10_diff1 value: 39.142259831299455 - type: nauc_ndcg_at_10_max value: 38.61470104273746 - type: nauc_ndcg_at_10_std value: -18.577452829132742 - type: nauc_ndcg_at_1_diff1 value: 45.67646898532131 - type: nauc_ndcg_at_1_max value: 43.0541870425035 - type: nauc_ndcg_at_1_std value: -15.597124291613563 - type: nauc_ndcg_at_20_diff1 value: 40.805159395901306 - type: nauc_ndcg_at_20_max value: 41.58685629374952 - type: nauc_ndcg_at_20_std value: -16.862408156222592 - type: nauc_ndcg_at_3_diff1 value: 39.12028215488432 - type: nauc_ndcg_at_3_max value: 39.70580596343164 - type: nauc_ndcg_at_3_std value: -16.705546903936213 - type: nauc_ndcg_at_5_diff1 value: 38.42075404927361 - type: nauc_ndcg_at_5_max value: 38.064219879504385 - type: nauc_ndcg_at_5_std value: -17.20282111665876 - type: nauc_precision_at_1000_diff1 value: -4.419224540552891 - type: nauc_precision_at_1000_max value: 35.686022591225246 - type: nauc_precision_at_1000_std value: 15.023520191032972 - type: nauc_precision_at_100_diff1 value: -2.9027602601603895 - type: nauc_precision_at_100_max value: 39.99864013028808 - type: nauc_precision_at_100_std value: 13.863497117255525 - type: nauc_precision_at_10_diff1 value: 5.539104839809501 - type: nauc_precision_at_10_max value: 42.41625740557432 - type: nauc_precision_at_10_std value: 1.0894693748662556 - type: nauc_precision_at_1_diff1 value: 45.67646898532131 - type: nauc_precision_at_1_max value: 43.0541870425035 - type: nauc_precision_at_1_std value: -15.597124291613563 - type: nauc_precision_at_20_diff1 value: 4.734562571681868 - type: nauc_precision_at_20_max value: 44.35081213316202 - type: nauc_precision_at_20_std value: 6.642891478284595 - type: nauc_precision_at_3_diff1 value: 13.936559341472101 - type: nauc_precision_at_3_max value: 45.426668552497524 - type: nauc_precision_at_3_std value: -5.219785419247125 - type: nauc_precision_at_5_diff1 value: 8.366706789546015 - type: nauc_precision_at_5_max value: 46.161942989326896 - type: nauc_precision_at_5_std value: -0.193140343545876 - type: nauc_recall_at_1000_diff1 value: 45.61785312444842 - type: nauc_recall_at_1000_max value: 75.68258976531774 - type: nauc_recall_at_1000_std value: 37.469059422121575 - type: nauc_recall_at_100_diff1 value: 26.798748531805096 - type: nauc_recall_at_100_max value: 54.72134095197765 - type: nauc_recall_at_100_std value: -1.5967608233799417 - type: nauc_recall_at_10_diff1 value: 32.13211696200521 - type: nauc_recall_at_10_max value: 31.13866254975895 - type: nauc_recall_at_10_std value: -22.31404161136118 - type: nauc_recall_at_1_diff1 value: 43.68892397816937 - type: nauc_recall_at_1_max value: 14.478978190224353 - type: nauc_recall_at_1_std value: -18.435031919225477 - type: nauc_recall_at_20_diff1 value: 38.597996930461385 - type: nauc_recall_at_20_max value: 42.49849027366794 - type: nauc_recall_at_20_std value: -16.536471900752154 - type: nauc_recall_at_3_diff1 value: 35.343730012759266 - type: nauc_recall_at_3_max value: 26.898722085043392 - type: nauc_recall_at_3_std value: -19.4459792273884 - type: nauc_recall_at_5_diff1 value: 31.8310298012186 - type: nauc_recall_at_5_max value: 32.67800489655844 - type: nauc_recall_at_5_std value: -16.800929103347283 - type: ndcg_at_1 value: 53.916 - type: ndcg_at_10 value: 60.428000000000004 - type: ndcg_at_100 value: 65.95 - type: ndcg_at_1000 value: 66.88 - type: ndcg_at_20 value: 62.989 - type: ndcg_at_3 value: 55.204 - type: ndcg_at_5 value: 56.42700000000001 - type: precision_at_1 value: 53.916 - type: precision_at_10 value: 14.346999999999998 - type: precision_at_100 value: 1.849 - type: precision_at_1000 value: 0.196 - type: precision_at_20 value: 8.022 - type: precision_at_3 value: 34.552 - type: precision_at_5 value: 24.569 - type: recall_at_1 value: 33.453 - type: recall_at_10 value: 71.07900000000001 - type: recall_at_100 value: 93.207 - type: recall_at_1000 value: 99.60799999999999 - type: recall_at_20 value: 79.482 - type: recall_at_3 value: 53.98 - type: recall_at_5 value: 60.781 - task: type: Retrieval dataset: name: MTEB XPQARetrieval (eng-pol) type: jinaai/xpqa config: eng-pol split: test revision: c99d599f0a6ab9b85b065da6f9d94f9cf731679f metrics: - type: main_score value: 34.042 - type: map_at_1 value: 13.236 - type: map_at_10 value: 27.839999999999996 - type: map_at_100 value: 30.171999999999997 - type: map_at_1000 value: 30.349999999999998 - type: map_at_20 value: 29.044999999999998 - type: map_at_3 value: 22.58 - type: map_at_5 value: 25.83 - type: mrr_at_1 value: 30.318471337579616 - type: mrr_at_10 value: 37.4983823678091 - type: mrr_at_100 value: 38.5784523175009 - type: mrr_at_1000 value: 38.63608698968148 - type: mrr_at_20 value: 38.02996157871825 - type: mrr_at_3 value: 34.798301486199584 - type: mrr_at_5 value: 36.39702760084925 - type: nauc_map_at_1000_diff1 value: 21.07199789609177 - type: nauc_map_at_1000_max value: 25.959233507893277 - type: nauc_map_at_1000_std value: -28.011925372852826 - type: nauc_map_at_100_diff1 value: 21.086788412737548 - type: nauc_map_at_100_max value: 25.8611620203686 - type: nauc_map_at_100_std value: -28.179239912057515 - type: nauc_map_at_10_diff1 value: 21.23841745922078 - type: nauc_map_at_10_max value: 25.44290342378288 - type: nauc_map_at_10_std value: -28.75578689110275 - type: nauc_map_at_1_diff1 value: 28.87454015638211 - type: nauc_map_at_1_max value: 17.50681123879997 - type: nauc_map_at_1_std value: -30.382831850562432 - type: nauc_map_at_20_diff1 value: 21.076559713540455 - type: nauc_map_at_20_max value: 25.538154202494535 - type: nauc_map_at_20_std value: -28.518764617658555 - type: nauc_map_at_3_diff1 value: 22.159185358766468 - type: nauc_map_at_3_max value: 23.01652660927249 - type: nauc_map_at_3_std value: -29.567722713221862 - type: nauc_map_at_5_diff1 value: 21.35578810370897 - type: nauc_map_at_5_max value: 25.550550437767395 - type: nauc_map_at_5_std value: -28.7889035461355 - type: nauc_mrr_at_1000_diff1 value: 22.28633009221923 - type: nauc_mrr_at_1000_max value: 26.920205393136392 - type: nauc_mrr_at_1000_std value: -25.887791634977642 - type: nauc_mrr_at_100_diff1 value: 22.2754975739755 - type: nauc_mrr_at_100_max value: 26.90235716615346 - type: nauc_mrr_at_100_std value: -25.891596020584345 - type: nauc_mrr_at_10_diff1 value: 22.415076305593534 - type: nauc_mrr_at_10_max value: 26.504643796222222 - type: nauc_mrr_at_10_std value: -26.6046081215833 - type: nauc_mrr_at_1_diff1 value: 23.406748619244368 - type: nauc_mrr_at_1_max value: 29.058228240823553 - type: nauc_mrr_at_1_std value: -26.450169820901078 - type: nauc_mrr_at_20_diff1 value: 22.29233141817678 - type: nauc_mrr_at_20_max value: 26.69021351064081 - type: nauc_mrr_at_20_std value: -26.086596227376656 - type: nauc_mrr_at_3_diff1 value: 22.20746187500145 - type: nauc_mrr_at_3_max value: 27.143725946169457 - type: nauc_mrr_at_3_std value: -26.7017708594376 - type: nauc_mrr_at_5_diff1 value: 22.71898965233195 - type: nauc_mrr_at_5_max value: 26.932386658571662 - type: nauc_mrr_at_5_std value: -26.725541058780234 - type: nauc_ndcg_at_1000_diff1 value: 20.541734305148466 - type: nauc_ndcg_at_1000_max value: 27.180534238090758 - type: nauc_ndcg_at_1000_std value: -23.74197745177845 - type: nauc_ndcg_at_100_diff1 value: 20.570052839937468 - type: nauc_ndcg_at_100_max value: 26.21605034405486 - type: nauc_ndcg_at_100_std value: -25.359817188805028 - type: nauc_ndcg_at_10_diff1 value: 21.241423075073467 - type: nauc_ndcg_at_10_max value: 24.599199195239475 - type: nauc_ndcg_at_10_std value: -28.404540333309008 - type: nauc_ndcg_at_1_diff1 value: 23.406748619244368 - type: nauc_ndcg_at_1_max value: 29.058228240823553 - type: nauc_ndcg_at_1_std value: -26.450169820901078 - type: nauc_ndcg_at_20_diff1 value: 20.740460046196873 - type: nauc_ndcg_at_20_max value: 24.82380195169634 - type: nauc_ndcg_at_20_std value: -27.376298834244313 - type: nauc_ndcg_at_3_diff1 value: 19.994948682426504 - type: nauc_ndcg_at_3_max value: 26.153790759405105 - type: nauc_ndcg_at_3_std value: -27.194548404540885 - type: nauc_ndcg_at_5_diff1 value: 21.48414272096384 - type: nauc_ndcg_at_5_max value: 25.239652015076373 - type: nauc_ndcg_at_5_std value: -28.2620160957961 - type: nauc_precision_at_1000_diff1 value: -0.7557639926687744 - type: nauc_precision_at_1000_max value: 24.265591636994436 - type: nauc_precision_at_1000_std value: 16.833104654292654 - type: nauc_precision_at_100_diff1 value: 4.647847665941115 - type: nauc_precision_at_100_max value: 24.42192644844434 - type: nauc_precision_at_100_std value: 0.2718848568876648 - type: nauc_precision_at_10_diff1 value: 9.465969286722654 - type: nauc_precision_at_10_max value: 27.448993150448043 - type: nauc_precision_at_10_std value: -16.519099596502212 - type: nauc_precision_at_1_diff1 value: 23.406748619244368 - type: nauc_precision_at_1_max value: 29.058228240823553 - type: nauc_precision_at_1_std value: -26.450169820901078 - type: nauc_precision_at_20_diff1 value: 8.021421615668114 - type: nauc_precision_at_20_max value: 26.18556481398635 - type: nauc_precision_at_20_std value: -12.207152108668367 - type: nauc_precision_at_3_diff1 value: 11.783572803634241 - type: nauc_precision_at_3_max value: 29.259715774978893 - type: nauc_precision_at_3_std value: -20.407524967717425 - type: nauc_precision_at_5_diff1 value: 10.371728615220821 - type: nauc_precision_at_5_max value: 30.270642833482864 - type: nauc_precision_at_5_std value: -18.407334880575494 - type: nauc_recall_at_1000_diff1 value: 6.008969959111555 - type: nauc_recall_at_1000_max value: 39.79691734058127 - type: nauc_recall_at_1000_std value: 32.43591825510109 - type: nauc_recall_at_100_diff1 value: 15.2374566058917 - type: nauc_recall_at_100_max value: 23.058785539503717 - type: nauc_recall_at_100_std value: -15.962888794058165 - type: nauc_recall_at_10_diff1 value: 19.46184821807753 - type: nauc_recall_at_10_max value: 19.001003513986866 - type: nauc_recall_at_10_std value: -27.753332786663876 - type: nauc_recall_at_1_diff1 value: 28.87454015638211 - type: nauc_recall_at_1_max value: 17.50681123879997 - type: nauc_recall_at_1_std value: -30.382831850562432 - type: nauc_recall_at_20_diff1 value: 17.237090858517405 - type: nauc_recall_at_20_max value: 18.42118474134871 - type: nauc_recall_at_20_std value: -24.862787724031957 - type: nauc_recall_at_3_diff1 value: 18.813019521758577 - type: nauc_recall_at_3_max value: 19.198572333053544 - type: nauc_recall_at_3_std value: -28.5644958605618 - type: nauc_recall_at_5_diff1 value: 20.247501986329482 - type: nauc_recall_at_5_max value: 21.121526202170358 - type: nauc_recall_at_5_std value: -27.220378617864853 - type: ndcg_at_1 value: 30.318 - type: ndcg_at_10 value: 34.042 - type: ndcg_at_100 value: 42.733 - type: ndcg_at_1000 value: 46.015 - type: ndcg_at_20 value: 37.053999999999995 - type: ndcg_at_3 value: 29.254 - type: ndcg_at_5 value: 30.514000000000003 - type: precision_at_1 value: 30.318 - type: precision_at_10 value: 10.981 - type: precision_at_100 value: 1.889 - type: precision_at_1000 value: 0.234 - type: precision_at_20 value: 6.643000000000001 - type: precision_at_3 value: 22.166 - type: precision_at_5 value: 17.477999999999998 - type: recall_at_1 value: 13.236 - type: recall_at_10 value: 41.461 - type: recall_at_100 value: 75.008 - type: recall_at_1000 value: 96.775 - type: recall_at_20 value: 50.754 - type: recall_at_3 value: 26.081 - type: recall_at_5 value: 33.168 - task: type: Retrieval dataset: name: MTEB XPQARetrieval (eng-cmn) type: jinaai/xpqa config: eng-cmn split: test revision: c99d599f0a6ab9b85b065da6f9d94f9cf731679f metrics: - type: main_score value: 37.504 - type: map_at_1 value: 16.019 - type: map_at_10 value: 30.794 - type: map_at_100 value: 33.157 - type: map_at_1000 value: 33.324999999999996 - type: map_at_20 value: 32.161 - type: map_at_3 value: 25.372 - type: map_at_5 value: 28.246 - type: mrr_at_1 value: 30.461165048543688 - type: mrr_at_10 value: 39.393107566651224 - type: mrr_at_100 value: 40.570039540602295 - type: mrr_at_1000 value: 40.6306116407744 - type: mrr_at_20 value: 40.09428159978876 - type: mrr_at_3 value: 37.176375404530745 - type: mrr_at_5 value: 38.09870550161812 - type: nauc_map_at_1000_diff1 value: 30.82306881892873 - type: nauc_map_at_1000_max value: 5.877636000666466 - type: nauc_map_at_1000_std value: -30.7140513386797 - type: nauc_map_at_100_diff1 value: 30.85192449151961 - type: nauc_map_at_100_max value: 5.809195131550909 - type: nauc_map_at_100_std value: -30.838556702972063 - type: nauc_map_at_10_diff1 value: 30.50359163635058 - type: nauc_map_at_10_max value: 6.373491595869303 - type: nauc_map_at_10_std value: -29.89368007827676 - type: nauc_map_at_1_diff1 value: 38.60240510083884 - type: nauc_map_at_1_max value: 10.407392664609139 - type: nauc_map_at_1_std value: -17.76327278732833 - type: nauc_map_at_20_diff1 value: 30.897489125753598 - type: nauc_map_at_20_max value: 5.9303381898248 - type: nauc_map_at_20_std value: -30.863345188760515 - type: nauc_map_at_3_diff1 value: 32.8150951852729 - type: nauc_map_at_3_max value: 7.671931402215177 - type: nauc_map_at_3_std value: -25.654809758216533 - type: nauc_map_at_5_diff1 value: 31.19558194781019 - type: nauc_map_at_5_max value: 6.426885613116939 - type: nauc_map_at_5_std value: -28.609027858850016 - type: nauc_mrr_at_1000_diff1 value: 30.7596332048733 - type: nauc_mrr_at_1000_max value: 1.1970748115580212 - type: nauc_mrr_at_1000_std value: -34.647570668150216 - type: nauc_mrr_at_100_diff1 value: 30.74693370788581 - type: nauc_mrr_at_100_max value: 1.1673272262754841 - type: nauc_mrr_at_100_std value: -34.67761028542745 - type: nauc_mrr_at_10_diff1 value: 30.537820575183076 - type: nauc_mrr_at_10_max value: 1.0261868725502707 - type: nauc_mrr_at_10_std value: -34.999990560631204 - type: nauc_mrr_at_1_diff1 value: 35.51868580113285 - type: nauc_mrr_at_1_max value: 5.117103773147307 - type: nauc_mrr_at_1_std value: -30.633913466736956 - type: nauc_mrr_at_20_diff1 value: 30.67318175430903 - type: nauc_mrr_at_20_max value: 1.0979983974981327 - type: nauc_mrr_at_20_std value: -34.8388339739997 - type: nauc_mrr_at_3_diff1 value: 30.884642006045702 - type: nauc_mrr_at_3_max value: 1.7970996544095983 - type: nauc_mrr_at_3_std value: -34.290172894906085 - type: nauc_mrr_at_5_diff1 value: 30.89687518368571 - type: nauc_mrr_at_5_max value: 1.2123714988495347 - type: nauc_mrr_at_5_std value: -35.01704580471926 - type: nauc_ndcg_at_1000_diff1 value: 29.214476799077342 - type: nauc_ndcg_at_1000_max value: 3.6379035546112872 - type: nauc_ndcg_at_1000_std value: -32.35757522049194 - type: nauc_ndcg_at_100_diff1 value: 29.130004541376298 - type: nauc_ndcg_at_100_max value: 2.9580589185293045 - type: nauc_ndcg_at_100_std value: -33.26884643871724 - type: nauc_ndcg_at_10_diff1 value: 28.521001084366393 - type: nauc_ndcg_at_10_max value: 3.630223957267483 - type: nauc_ndcg_at_10_std value: -33.14524140940815 - type: nauc_ndcg_at_1_diff1 value: 35.51868580113285 - type: nauc_ndcg_at_1_max value: 5.117103773147307 - type: nauc_ndcg_at_1_std value: -30.633913466736956 - type: nauc_ndcg_at_20_diff1 value: 29.194462756848782 - type: nauc_ndcg_at_20_max value: 2.61162903136461 - type: nauc_ndcg_at_20_std value: -34.59161403211834 - type: nauc_ndcg_at_3_diff1 value: 30.183555327135203 - type: nauc_ndcg_at_3_max value: 5.61949040917093 - type: nauc_ndcg_at_3_std value: -30.350117794058175 - type: nauc_ndcg_at_5_diff1 value: 29.74420394139971 - type: nauc_ndcg_at_5_max value: 3.952183813937688 - type: nauc_ndcg_at_5_std value: -31.807833795302038 - type: nauc_precision_at_1000_diff1 value: -5.467049121617333 - type: nauc_precision_at_1000_max value: -3.993986884198271 - type: nauc_precision_at_1000_std value: -13.703967324212224 - type: nauc_precision_at_100_diff1 value: 1.5585428307943647 - type: nauc_precision_at_100_max value: -4.250455723613214 - type: nauc_precision_at_100_std value: -22.294689856776493 - type: nauc_precision_at_10_diff1 value: 11.076036917255259 - type: nauc_precision_at_10_max value: -1.5859394644365377 - type: nauc_precision_at_10_std value: -34.94912594413202 - type: nauc_precision_at_1_diff1 value: 35.51868580113285 - type: nauc_precision_at_1_max value: 5.117103773147307 - type: nauc_precision_at_1_std value: -30.633913466736956 - type: nauc_precision_at_20_diff1 value: 9.311484455773828 - type: nauc_precision_at_20_max value: -3.678383428592432 - type: nauc_precision_at_20_std value: -33.700002761401635 - type: nauc_precision_at_3_diff1 value: 19.2787260874381 - type: nauc_precision_at_3_max value: 0.18292109396940018 - type: nauc_precision_at_3_std value: -35.23939824276542 - type: nauc_precision_at_5_diff1 value: 14.97930592298584 - type: nauc_precision_at_5_max value: -1.63540635880963 - type: nauc_precision_at_5_std value: -35.908283558321315 - type: nauc_recall_at_1000_diff1 value: 26.63056473607804 - type: nauc_recall_at_1000_max value: 62.7304558520689 - type: nauc_recall_at_1000_std value: 58.12421701377561 - type: nauc_recall_at_100_diff1 value: 21.42127379898579 - type: nauc_recall_at_100_max value: 1.4748203516921914 - type: nauc_recall_at_100_std value: -27.56467339041136 - type: nauc_recall_at_10_diff1 value: 21.20479652609812 - type: nauc_recall_at_10_max value: 1.7394881489709888 - type: nauc_recall_at_10_std value: -32.15116902585072 - type: nauc_recall_at_1_diff1 value: 38.60240510083884 - type: nauc_recall_at_1_max value: 10.407392664609139 - type: nauc_recall_at_1_std value: -17.76327278732833 - type: nauc_recall_at_20_diff1 value: 23.049652721582632 - type: nauc_recall_at_20_max value: -1.7715787106286838 - type: nauc_recall_at_20_std value: -36.14203686002867 - type: nauc_recall_at_3_diff1 value: 26.522179829461873 - type: nauc_recall_at_3_max value: 6.078208732431124 - type: nauc_recall_at_3_std value: -25.02625711226274 - type: nauc_recall_at_5_diff1 value: 24.19538553561693 - type: nauc_recall_at_5_max value: 2.4963810785503524 - type: nauc_recall_at_5_std value: -30.449635496921257 - type: ndcg_at_1 value: 30.461 - type: ndcg_at_10 value: 37.504 - type: ndcg_at_100 value: 46.156000000000006 - type: ndcg_at_1000 value: 48.985 - type: ndcg_at_20 value: 41.025 - type: ndcg_at_3 value: 32.165 - type: ndcg_at_5 value: 33.072 - type: precision_at_1 value: 30.461 - type: precision_at_10 value: 11.032 - type: precision_at_100 value: 1.8870000000000002 - type: precision_at_1000 value: 0.22499999999999998 - type: precision_at_20 value: 6.833 - type: precision_at_3 value: 22.532 - type: precision_at_5 value: 16.966 - type: recall_at_1 value: 16.019 - type: recall_at_10 value: 47.557 - type: recall_at_100 value: 80.376 - type: recall_at_1000 value: 98.904 - type: recall_at_20 value: 58.48100000000001 - type: recall_at_3 value: 30.682 - type: recall_at_5 value: 36.714999999999996 - task: type: Retrieval dataset: name: MTEB XPQARetrieval (eng-spa) type: jinaai/xpqa config: eng-spa split: test revision: c99d599f0a6ab9b85b065da6f9d94f9cf731679f metrics: - type: main_score value: 53.359 - type: map_at_1 value: 22.892000000000003 - type: map_at_10 value: 45.773 - type: map_at_100 value: 47.778999999999996 - type: map_at_1000 value: 47.882999999999996 - type: map_at_20 value: 46.869 - type: map_at_3 value: 37.643 - type: map_at_5 value: 43.120999999999995 - type: mrr_at_1 value: 47.28877679697352 - type: mrr_at_10 value: 56.95890630316857 - type: mrr_at_100 value: 57.71103367009639 - type: mrr_at_1000 value: 57.73661441948852 - type: mrr_at_20 value: 57.37701091311334 - type: mrr_at_3 value: 54.74989491382929 - type: mrr_at_5 value: 56.08659100462372 - type: nauc_map_at_1000_diff1 value: 27.8347129954991 - type: nauc_map_at_1000_max value: 38.04300600762859 - type: nauc_map_at_1000_std value: -18.294653328262868 - type: nauc_map_at_100_diff1 value: 27.818449297770858 - type: nauc_map_at_100_max value: 38.03533462156633 - type: nauc_map_at_100_std value: -18.332989980880644 - type: nauc_map_at_10_diff1 value: 27.520664180018358 - type: nauc_map_at_10_max value: 37.67109855753314 - type: nauc_map_at_10_std value: -18.496721673888683 - type: nauc_map_at_1_diff1 value: 37.56020148060502 - type: nauc_map_at_1_max value: 10.298394230150745 - type: nauc_map_at_1_std value: -20.41359936101547 - type: nauc_map_at_20_diff1 value: 27.615023038189722 - type: nauc_map_at_20_max value: 37.808525116320254 - type: nauc_map_at_20_std value: -18.49235775420803 - type: nauc_map_at_3_diff1 value: 30.797347567428424 - type: nauc_map_at_3_max value: 29.374407828869497 - type: nauc_map_at_3_std value: -19.75905772914969 - type: nauc_map_at_5_diff1 value: 28.431802888884803 - type: nauc_map_at_5_max value: 35.57723911610521 - type: nauc_map_at_5_std value: -19.093588845366824 - type: nauc_mrr_at_1000_diff1 value: 33.263611009054586 - type: nauc_mrr_at_1000_max value: 40.620639901613664 - type: nauc_mrr_at_1000_std value: -17.083016011032036 - type: nauc_mrr_at_100_diff1 value: 33.25375012559163 - type: nauc_mrr_at_100_max value: 40.62376205172005 - type: nauc_mrr_at_100_std value: -17.091930575226684 - type: nauc_mrr_at_10_diff1 value: 33.05787202690095 - type: nauc_mrr_at_10_max value: 40.4516362611674 - type: nauc_mrr_at_10_std value: -17.088910666499892 - type: nauc_mrr_at_1_diff1 value: 36.424151087824555 - type: nauc_mrr_at_1_max value: 40.955715626650445 - type: nauc_mrr_at_1_std value: -16.56636409111209 - type: nauc_mrr_at_20_diff1 value: 33.12029456858138 - type: nauc_mrr_at_20_max value: 40.56409347292635 - type: nauc_mrr_at_20_std value: -17.102034817242068 - type: nauc_mrr_at_3_diff1 value: 33.52377926814156 - type: nauc_mrr_at_3_max value: 40.824911575046876 - type: nauc_mrr_at_3_std value: -16.855935748811092 - type: nauc_mrr_at_5_diff1 value: 33.08646471768442 - type: nauc_mrr_at_5_max value: 40.59323589955881 - type: nauc_mrr_at_5_std value: -16.77829710500156 - type: nauc_ndcg_at_1000_diff1 value: 28.741186244590207 - type: nauc_ndcg_at_1000_max value: 40.0113825410539 - type: nauc_ndcg_at_1000_std value: -17.15655081742458 - type: nauc_ndcg_at_100_diff1 value: 28.680521359782972 - type: nauc_ndcg_at_100_max value: 39.94751899984445 - type: nauc_ndcg_at_100_std value: -17.82813814043932 - type: nauc_ndcg_at_10_diff1 value: 27.22858072673168 - type: nauc_ndcg_at_10_max value: 38.600188968554725 - type: nauc_ndcg_at_10_std value: -18.517203924893614 - type: nauc_ndcg_at_1_diff1 value: 36.424151087824555 - type: nauc_ndcg_at_1_max value: 40.955715626650445 - type: nauc_ndcg_at_1_std value: -16.56636409111209 - type: nauc_ndcg_at_20_diff1 value: 27.56875900623774 - type: nauc_ndcg_at_20_max value: 38.95264310199067 - type: nauc_ndcg_at_20_std value: -18.709973965688445 - type: nauc_ndcg_at_3_diff1 value: 28.682842749851574 - type: nauc_ndcg_at_3_max value: 38.361215408395964 - type: nauc_ndcg_at_3_std value: -16.800291231827515 - type: nauc_ndcg_at_5_diff1 value: 28.178239259093484 - type: nauc_ndcg_at_5_max value: 36.77096292606479 - type: nauc_ndcg_at_5_std value: -18.718861696641145 - type: nauc_precision_at_1000_diff1 value: -7.3686253252869305 - type: nauc_precision_at_1000_max value: 31.98896996987639 - type: nauc_precision_at_1000_std value: 13.125659676392267 - type: nauc_precision_at_100_diff1 value: -2.8239113056969156 - type: nauc_precision_at_100_max value: 36.95062472971812 - type: nauc_precision_at_100_std value: 7.230228733647562 - type: nauc_precision_at_10_diff1 value: 2.5515545798843555 - type: nauc_precision_at_10_max value: 45.46146019314904 - type: nauc_precision_at_10_std value: -1.3249340536211553 - type: nauc_precision_at_1_diff1 value: 36.424151087824555 - type: nauc_precision_at_1_max value: 40.955715626650445 - type: nauc_precision_at_1_std value: -16.56636409111209 - type: nauc_precision_at_20_diff1 value: 0.7202861770489576 - type: nauc_precision_at_20_max value: 41.9937596214609 - type: nauc_precision_at_20_std value: 0.2756400069730064 - type: nauc_precision_at_3_diff1 value: 12.89221206929447 - type: nauc_precision_at_3_max value: 48.57775126381142 - type: nauc_precision_at_3_std value: -8.042242254131068 - type: nauc_precision_at_5_diff1 value: 7.063616193387763 - type: nauc_precision_at_5_max value: 47.26496887331675 - type: nauc_precision_at_5_std value: -4.735805200913049 - type: nauc_recall_at_1000_diff1 value: 2.6650052980682224 - type: nauc_recall_at_1000_max value: 81.94826279951472 - type: nauc_recall_at_1000_std value: 48.46012388224573 - type: nauc_recall_at_100_diff1 value: 24.516371948375827 - type: nauc_recall_at_100_max value: 39.17639620389552 - type: nauc_recall_at_100_std value: -17.884197602579533 - type: nauc_recall_at_10_diff1 value: 19.93892097640112 - type: nauc_recall_at_10_max value: 33.079079440022106 - type: nauc_recall_at_10_std value: -20.22227622801884 - type: nauc_recall_at_1_diff1 value: 37.56020148060502 - type: nauc_recall_at_1_max value: 10.298394230150745 - type: nauc_recall_at_1_std value: -20.41359936101547 - type: nauc_recall_at_20_diff1 value: 20.363784035670633 - type: nauc_recall_at_20_max value: 33.39352971625336 - type: nauc_recall_at_20_std value: -21.712050932168875 - type: nauc_recall_at_3_diff1 value: 26.220072121604655 - type: nauc_recall_at_3_max value: 25.853218030218507 - type: nauc_recall_at_3_std value: -17.830613372910907 - type: nauc_recall_at_5_diff1 value: 22.25850162680252 - type: nauc_recall_at_5_max value: 30.89620539042785 - type: nauc_recall_at_5_std value: -19.16786434439169 - type: ndcg_at_1 value: 47.288999999999994 - type: ndcg_at_10 value: 53.359 - type: ndcg_at_100 value: 60.25899999999999 - type: ndcg_at_1000 value: 61.902 - type: ndcg_at_20 value: 56.025000000000006 - type: ndcg_at_3 value: 47.221999999999994 - type: ndcg_at_5 value: 49.333 - type: precision_at_1 value: 47.288999999999994 - type: precision_at_10 value: 16.003 - type: precision_at_100 value: 2.221 - type: precision_at_1000 value: 0.246 - type: precision_at_20 value: 8.985 - type: precision_at_3 value: 34.510000000000005 - type: precision_at_5 value: 26.961000000000002 - type: recall_at_1 value: 22.892000000000003 - type: recall_at_10 value: 62.928 - type: recall_at_100 value: 89.105 - type: recall_at_1000 value: 99.319 - type: recall_at_20 value: 71.387 - type: recall_at_3 value: 43.492999999999995 - type: recall_at_5 value: 53.529 - task: type: Retrieval dataset: name: MTEB XPQARetrieval (eng-fra) type: jinaai/xpqa config: eng-fra split: test revision: c99d599f0a6ab9b85b065da6f9d94f9cf731679f metrics: - type: main_score value: 54.888000000000005 - type: map_at_1 value: 26.079 - type: map_at_10 value: 47.434 - type: map_at_100 value: 49.376 - type: map_at_1000 value: 49.461 - type: map_at_20 value: 48.634 - type: map_at_3 value: 40.409 - type: map_at_5 value: 44.531 - type: mrr_at_1 value: 46.86248331108144 - type: mrr_at_10 value: 56.45506177548896 - type: mrr_at_100 value: 57.20360629445577 - type: mrr_at_1000 value: 57.227004696897986 - type: mrr_at_20 value: 56.905302765737865 - type: mrr_at_3 value: 54.09434801958164 - type: mrr_at_5 value: 55.40943480195811 - type: nauc_map_at_1000_diff1 value: 37.739936045535885 - type: nauc_map_at_1000_max value: 35.92625003516368 - type: nauc_map_at_1000_std value: -15.825119611638398 - type: nauc_map_at_100_diff1 value: 37.71697833661983 - type: nauc_map_at_100_max value: 35.91174068136317 - type: nauc_map_at_100_std value: -15.838841891589006 - type: nauc_map_at_10_diff1 value: 37.52309268219689 - type: nauc_map_at_10_max value: 35.4887130483351 - type: nauc_map_at_10_std value: -16.61132378136234 - type: nauc_map_at_1_diff1 value: 42.705087329207984 - type: nauc_map_at_1_max value: 12.047671550242974 - type: nauc_map_at_1_std value: -17.156030827065834 - type: nauc_map_at_20_diff1 value: 37.59446680137666 - type: nauc_map_at_20_max value: 35.80559546695052 - type: nauc_map_at_20_std value: -16.158338316249786 - type: nauc_map_at_3_diff1 value: 38.618415267131816 - type: nauc_map_at_3_max value: 27.030227996183925 - type: nauc_map_at_3_std value: -18.962500694157857 - type: nauc_map_at_5_diff1 value: 37.980845601534256 - type: nauc_map_at_5_max value: 32.82374761283266 - type: nauc_map_at_5_std value: -17.856875825229565 - type: nauc_mrr_at_1000_diff1 value: 40.26059509279346 - type: nauc_mrr_at_1000_max value: 39.28453752990871 - type: nauc_mrr_at_1000_std value: -13.306217279524212 - type: nauc_mrr_at_100_diff1 value: 40.23390833398881 - type: nauc_mrr_at_100_max value: 39.26041461025653 - type: nauc_mrr_at_100_std value: -13.317700798873153 - type: nauc_mrr_at_10_diff1 value: 40.163737640180145 - type: nauc_mrr_at_10_max value: 39.27138538165913 - type: nauc_mrr_at_10_std value: -13.472971360323038 - type: nauc_mrr_at_1_diff1 value: 42.95339241383707 - type: nauc_mrr_at_1_max value: 40.62982307619158 - type: nauc_mrr_at_1_std value: -10.429597045942748 - type: nauc_mrr_at_20_diff1 value: 40.23703505923782 - type: nauc_mrr_at_20_max value: 39.27051308063652 - type: nauc_mrr_at_20_std value: -13.390197643922038 - type: nauc_mrr_at_3_diff1 value: 40.5721313555661 - type: nauc_mrr_at_3_max value: 39.254774354468594 - type: nauc_mrr_at_3_std value: -13.773803807863827 - type: nauc_mrr_at_5_diff1 value: 40.41081287079734 - type: nauc_mrr_at_5_max value: 39.515241132077335 - type: nauc_mrr_at_5_std value: -13.306544090087336 - type: nauc_ndcg_at_1000_diff1 value: 38.04772268296103 - type: nauc_ndcg_at_1000_max value: 38.03364565521176 - type: nauc_ndcg_at_1000_std value: -14.203182726102263 - type: nauc_ndcg_at_100_diff1 value: 37.51752795463643 - type: nauc_ndcg_at_100_max value: 37.809671511710604 - type: nauc_ndcg_at_100_std value: -13.880578225081408 - type: nauc_ndcg_at_10_diff1 value: 36.78438984005559 - type: nauc_ndcg_at_10_max value: 36.98105155993232 - type: nauc_ndcg_at_10_std value: -16.886308645939113 - type: nauc_ndcg_at_1_diff1 value: 42.95339241383707 - type: nauc_ndcg_at_1_max value: 40.62982307619158 - type: nauc_ndcg_at_1_std value: -10.429597045942748 - type: nauc_ndcg_at_20_diff1 value: 36.94164323893683 - type: nauc_ndcg_at_20_max value: 37.333583379288285 - type: nauc_ndcg_at_20_std value: -15.853318071434716 - type: nauc_ndcg_at_3_diff1 value: 36.905604845477384 - type: nauc_ndcg_at_3_max value: 35.10252586688781 - type: nauc_ndcg_at_3_std value: -17.128435988977742 - type: nauc_ndcg_at_5_diff1 value: 37.96742463612705 - type: nauc_ndcg_at_5_max value: 34.65945109443365 - type: nauc_ndcg_at_5_std value: -17.916428667861183 - type: nauc_precision_at_1000_diff1 value: -3.740861894117653 - type: nauc_precision_at_1000_max value: 31.993854396874177 - type: nauc_precision_at_1000_std value: 17.445629474196448 - type: nauc_precision_at_100_diff1 value: -0.4825948747911606 - type: nauc_precision_at_100_max value: 35.834638448782954 - type: nauc_precision_at_100_std value: 16.82718796079511 - type: nauc_precision_at_10_diff1 value: 8.285949866268147 - type: nauc_precision_at_10_max value: 45.3292519726866 - type: nauc_precision_at_10_std value: 4.5574850748441555 - type: nauc_precision_at_1_diff1 value: 42.95339241383707 - type: nauc_precision_at_1_max value: 40.62982307619158 - type: nauc_precision_at_1_std value: -10.429597045942748 - type: nauc_precision_at_20_diff1 value: 4.890590733611442 - type: nauc_precision_at_20_max value: 41.83051757078859 - type: nauc_precision_at_20_std value: 9.197347125630467 - type: nauc_precision_at_3_diff1 value: 17.79940075411976 - type: nauc_precision_at_3_max value: 45.224103632426946 - type: nauc_precision_at_3_std value: -5.017203435609909 - type: nauc_precision_at_5_diff1 value: 13.548063145911929 - type: nauc_precision_at_5_max value: 46.84837547409909 - type: nauc_precision_at_5_std value: -0.8925939386354484 - type: nauc_recall_at_1000_diff1 value: 74.48441717138078 - type: nauc_recall_at_1000_max value: 74.66717137705027 - type: nauc_recall_at_1000_std value: 0.24030117471512125 - type: nauc_recall_at_100_diff1 value: 22.553777341988656 - type: nauc_recall_at_100_max value: 31.67861029246527 - type: nauc_recall_at_100_std value: 0.2707450517253687 - type: nauc_recall_at_10_diff1 value: 28.490866614443235 - type: nauc_recall_at_10_max value: 31.722970141434352 - type: nauc_recall_at_10_std value: -21.97893365028007 - type: nauc_recall_at_1_diff1 value: 42.705087329207984 - type: nauc_recall_at_1_max value: 12.047671550242974 - type: nauc_recall_at_1_std value: -17.156030827065834 - type: nauc_recall_at_20_diff1 value: 27.44043454173112 - type: nauc_recall_at_20_max value: 31.454281772040716 - type: nauc_recall_at_20_std value: -20.1735695305415 - type: nauc_recall_at_3_diff1 value: 34.08447534706394 - type: nauc_recall_at_3_max value: 21.793973773840865 - type: nauc_recall_at_3_std value: -22.753978372378906 - type: nauc_recall_at_5_diff1 value: 33.59686526199479 - type: nauc_recall_at_5_max value: 29.188889073761302 - type: nauc_recall_at_5_std value: -21.96156333744562 - type: ndcg_at_1 value: 46.861999999999995 - type: ndcg_at_10 value: 54.888000000000005 - type: ndcg_at_100 value: 61.477000000000004 - type: ndcg_at_1000 value: 62.768 - type: ndcg_at_20 value: 57.812 - type: ndcg_at_3 value: 48.721 - type: ndcg_at_5 value: 50.282000000000004 - type: precision_at_1 value: 46.861999999999995 - type: precision_at_10 value: 15.167 - type: precision_at_100 value: 2.072 - type: precision_at_1000 value: 0.22499999999999998 - type: precision_at_20 value: 8.672 - type: precision_at_3 value: 33.066 - type: precision_at_5 value: 24.726 - type: recall_at_1 value: 26.079 - type: recall_at_10 value: 66.095 - type: recall_at_100 value: 91.65299999999999 - type: recall_at_1000 value: 99.83999999999999 - type: recall_at_20 value: 75.28 - type: recall_at_3 value: 46.874 - type: recall_at_5 value: 55.062 - task: type: Retrieval dataset: name: MTEB XPQARetrieval (pol-eng) type: jinaai/xpqa config: pol-eng split: test revision: c99d599f0a6ab9b85b065da6f9d94f9cf731679f metrics: - type: main_score value: 50.831 - type: map_at_1 value: 25.549 - type: map_at_10 value: 44.432 - type: map_at_100 value: 46.431 - type: map_at_1000 value: 46.525 - type: map_at_20 value: 45.595 - type: map_at_3 value: 38.574000000000005 - type: map_at_5 value: 42.266999999999996 - type: mrr_at_1 value: 43.5006435006435 - type: mrr_at_10 value: 51.561255132683684 - type: mrr_at_100 value: 52.59912482635216 - type: mrr_at_1000 value: 52.631337587043056 - type: mrr_at_20 value: 52.23234440063273 - type: mrr_at_3 value: 48.97039897039895 - type: mrr_at_5 value: 50.31531531531527 - type: nauc_map_at_1000_diff1 value: 35.907901295900174 - type: nauc_map_at_1000_max value: 24.573763602041687 - type: nauc_map_at_1000_std value: -29.524077960309313 - type: nauc_map_at_100_diff1 value: 35.86869121827827 - type: nauc_map_at_100_max value: 24.532343818487494 - type: nauc_map_at_100_std value: -29.613979124488864 - type: nauc_map_at_10_diff1 value: 35.90171794022391 - type: nauc_map_at_10_max value: 23.90914892943268 - type: nauc_map_at_10_std value: -30.43698820061533 - type: nauc_map_at_1_diff1 value: 50.80313333312038 - type: nauc_map_at_1_max value: 16.649890421888156 - type: nauc_map_at_1_std value: -22.323989416471683 - type: nauc_map_at_20_diff1 value: 35.77755470212964 - type: nauc_map_at_20_max value: 24.199895270297034 - type: nauc_map_at_20_std value: -30.223411960170647 - type: nauc_map_at_3_diff1 value: 38.964124882315936 - type: nauc_map_at_3_max value: 21.187432510177167 - type: nauc_map_at_3_std value: -28.976663506389887 - type: nauc_map_at_5_diff1 value: 36.04644236616672 - type: nauc_map_at_5_max value: 23.501186429317094 - type: nauc_map_at_5_std value: -30.068144596060748 - type: nauc_mrr_at_1000_diff1 value: 41.36555452105447 - type: nauc_mrr_at_1000_max value: 26.376799280402867 - type: nauc_mrr_at_1000_std value: -30.008603028757424 - type: nauc_mrr_at_100_diff1 value: 41.35523965220727 - type: nauc_mrr_at_100_max value: 26.402612115967706 - type: nauc_mrr_at_100_std value: -29.991754627128024 - type: nauc_mrr_at_10_diff1 value: 41.001395127259315 - type: nauc_mrr_at_10_max value: 26.104860505051384 - type: nauc_mrr_at_10_std value: -30.38420449487516 - type: nauc_mrr_at_1_diff1 value: 44.882846373248206 - type: nauc_mrr_at_1_max value: 26.61905322890808 - type: nauc_mrr_at_1_std value: -28.724565662206153 - type: nauc_mrr_at_20_diff1 value: 41.278009142648834 - type: nauc_mrr_at_20_max value: 26.284565529087295 - type: nauc_mrr_at_20_std value: -30.19549140549242 - type: nauc_mrr_at_3_diff1 value: 41.74663893951077 - type: nauc_mrr_at_3_max value: 26.263048464325884 - type: nauc_mrr_at_3_std value: -30.676733442965688 - type: nauc_mrr_at_5_diff1 value: 41.11461477846568 - type: nauc_mrr_at_5_max value: 25.94713927964926 - type: nauc_mrr_at_5_std value: -30.317066480767817 - type: nauc_ndcg_at_1000_diff1 value: 36.34161052445199 - type: nauc_ndcg_at_1000_max value: 26.321036033696206 - type: nauc_ndcg_at_1000_std value: -27.59146917115399 - type: nauc_ndcg_at_100_diff1 value: 35.66557800007035 - type: nauc_ndcg_at_100_max value: 26.282211208336136 - type: nauc_ndcg_at_100_std value: -27.905634124461333 - type: nauc_ndcg_at_10_diff1 value: 35.34872687407275 - type: nauc_ndcg_at_10_max value: 24.018561915792272 - type: nauc_ndcg_at_10_std value: -31.57712772869015 - type: nauc_ndcg_at_1_diff1 value: 44.882846373248206 - type: nauc_ndcg_at_1_max value: 26.865602442152554 - type: nauc_ndcg_at_1_std value: -28.509295454329152 - type: nauc_ndcg_at_20_diff1 value: 35.46177768045546 - type: nauc_ndcg_at_20_max value: 24.921273675141542 - type: nauc_ndcg_at_20_std value: -30.84348812979793 - type: nauc_ndcg_at_3_diff1 value: 36.84688489063923 - type: nauc_ndcg_at_3_max value: 24.088513229463736 - type: nauc_ndcg_at_3_std value: -30.05640995379297 - type: nauc_ndcg_at_5_diff1 value: 35.623143276796185 - type: nauc_ndcg_at_5_max value: 23.76654250474061 - type: nauc_ndcg_at_5_std value: -30.87847710074466 - type: nauc_precision_at_1000_diff1 value: -16.270532533886932 - type: nauc_precision_at_1000_max value: 17.37365042394671 - type: nauc_precision_at_1000_std value: 16.27166715693082 - type: nauc_precision_at_100_diff1 value: -13.175264889436313 - type: nauc_precision_at_100_max value: 19.488571046893963 - type: nauc_precision_at_100_std value: 9.055429698007798 - type: nauc_precision_at_10_diff1 value: 0.6806938753592942 - type: nauc_precision_at_10_max value: 21.933083960522616 - type: nauc_precision_at_10_std value: -18.2147036942157 - type: nauc_precision_at_1_diff1 value: 44.882846373248206 - type: nauc_precision_at_1_max value: 26.865602442152554 - type: nauc_precision_at_1_std value: -28.509295454329152 - type: nauc_precision_at_20_diff1 value: -4.318119150162302 - type: nauc_precision_at_20_max value: 21.089702301041687 - type: nauc_precision_at_20_std value: -10.333077681479546 - type: nauc_precision_at_3_diff1 value: 11.496076462671107 - type: nauc_precision_at_3_max value: 23.018301549827008 - type: nauc_precision_at_3_std value: -23.98652995416454 - type: nauc_precision_at_5_diff1 value: 4.271050668117355 - type: nauc_precision_at_5_max value: 23.61051327966779 - type: nauc_precision_at_5_std value: -21.557618503107847 - type: nauc_recall_at_1000_diff1 value: 62.23955911850697 - type: nauc_recall_at_1000_max value: 83.20491723365542 - type: nauc_recall_at_1000_std value: 66.5173462601958 - type: nauc_recall_at_100_diff1 value: 20.503778602988177 - type: nauc_recall_at_100_max value: 29.379026288767506 - type: nauc_recall_at_100_std value: -16.139120874540573 - type: nauc_recall_at_10_diff1 value: 27.659110249896557 - type: nauc_recall_at_10_max value: 19.69557968026332 - type: nauc_recall_at_10_std value: -33.95657132767551 - type: nauc_recall_at_1_diff1 value: 50.80313333312038 - type: nauc_recall_at_1_max value: 16.649890421888156 - type: nauc_recall_at_1_std value: -22.323989416471683 - type: nauc_recall_at_20_diff1 value: 27.084453724565176 - type: nauc_recall_at_20_max value: 21.40080632474994 - type: nauc_recall_at_20_std value: -32.83683639340239 - type: nauc_recall_at_3_diff1 value: 34.32950941333572 - type: nauc_recall_at_3_max value: 18.55616615958199 - type: nauc_recall_at_3_std value: -30.375983327454076 - type: nauc_recall_at_5_diff1 value: 29.44516734974564 - type: nauc_recall_at_5_max value: 20.630543534300312 - type: nauc_recall_at_5_std value: -31.30763062499127 - type: ndcg_at_1 value: 43.501 - type: ndcg_at_10 value: 50.831 - type: ndcg_at_100 value: 58.17099999999999 - type: ndcg_at_1000 value: 59.705 - type: ndcg_at_20 value: 54.047999999999995 - type: ndcg_at_3 value: 44.549 - type: ndcg_at_5 value: 46.861000000000004 - type: precision_at_1 value: 43.501 - type: precision_at_10 value: 12.895999999999999 - type: precision_at_100 value: 1.9 - type: precision_at_1000 value: 0.21 - type: precision_at_20 value: 7.593 - type: precision_at_3 value: 29.215000000000003 - type: precision_at_5 value: 21.57 - type: recall_at_1 value: 25.549 - type: recall_at_10 value: 61.795 - type: recall_at_100 value: 90.019 - type: recall_at_1000 value: 99.807 - type: recall_at_20 value: 72.096 - type: recall_at_3 value: 43.836999999999996 - type: recall_at_5 value: 51.714000000000006 - task: type: Retrieval dataset: name: MTEB XPQARetrieval (pol-pol) type: jinaai/xpqa config: pol-pol split: test revision: c99d599f0a6ab9b85b065da6f9d94f9cf731679f metrics: - type: main_score value: 53.70399999999999 - type: map_at_1 value: 27.739000000000004 - type: map_at_10 value: 47.469 - type: map_at_100 value: 49.392 - type: map_at_1000 value: 49.483 - type: map_at_20 value: 48.646 - type: map_at_3 value: 41.467 - type: map_at_5 value: 45.467 - type: mrr_at_1 value: 47.00636942675159 - type: mrr_at_10 value: 54.63699322616519 - type: mrr_at_100 value: 55.54525182833755 - type: mrr_at_1000 value: 55.581331515356155 - type: mrr_at_20 value: 55.22918377451415 - type: mrr_at_3 value: 52.03821656050952 - type: mrr_at_5 value: 53.38216560509549 - type: nauc_map_at_1000_diff1 value: 45.03530825034854 - type: nauc_map_at_1000_max value: 34.22740272603397 - type: nauc_map_at_1000_std value: -30.428880484199244 - type: nauc_map_at_100_diff1 value: 44.978704455592805 - type: nauc_map_at_100_max value: 34.20908357964765 - type: nauc_map_at_100_std value: -30.47325365059666 - type: nauc_map_at_10_diff1 value: 44.9560579177672 - type: nauc_map_at_10_max value: 33.70097588985278 - type: nauc_map_at_10_std value: -31.205563222357885 - type: nauc_map_at_1_diff1 value: 57.94711780881773 - type: nauc_map_at_1_max value: 21.60278071836319 - type: nauc_map_at_1_std value: -23.273741268035923 - type: nauc_map_at_20_diff1 value: 44.97859054699532 - type: nauc_map_at_20_max value: 34.153729150181846 - type: nauc_map_at_20_std value: -30.97482545902907 - type: nauc_map_at_3_diff1 value: 47.52016138686765 - type: nauc_map_at_3_max value: 30.176197065298417 - type: nauc_map_at_3_std value: -29.90628984041898 - type: nauc_map_at_5_diff1 value: 45.36581638257985 - type: nauc_map_at_5_max value: 33.697200263698036 - type: nauc_map_at_5_std value: -31.165331120088453 - type: nauc_mrr_at_1000_diff1 value: 53.32889526818364 - type: nauc_mrr_at_1000_max value: 36.104118340589736 - type: nauc_mrr_at_1000_std value: -31.321132494516984 - type: nauc_mrr_at_100_diff1 value: 53.30695875258367 - type: nauc_mrr_at_100_max value: 36.114890079024455 - type: nauc_mrr_at_100_std value: -31.291749322117447 - type: nauc_mrr_at_10_diff1 value: 53.189084772141435 - type: nauc_mrr_at_10_max value: 35.939061062282484 - type: nauc_mrr_at_10_std value: -31.502185884653645 - type: nauc_mrr_at_1_diff1 value: 56.89368291041337 - type: nauc_mrr_at_1_max value: 36.07581125496313 - type: nauc_mrr_at_1_std value: -29.703764232519475 - type: nauc_mrr_at_20_diff1 value: 53.23955737199497 - type: nauc_mrr_at_20_max value: 36.068824838215676 - type: nauc_mrr_at_20_std value: -31.420039428197594 - type: nauc_mrr_at_3_diff1 value: 53.74385074861207 - type: nauc_mrr_at_3_max value: 35.57054587735015 - type: nauc_mrr_at_3_std value: -32.356894834537684 - type: nauc_mrr_at_5_diff1 value: 53.66669556981826 - type: nauc_mrr_at_5_max value: 36.02102289605049 - type: nauc_mrr_at_5_std value: -32.030437067359124 - type: nauc_ndcg_at_1000_diff1 value: 46.34900536768847 - type: nauc_ndcg_at_1000_max value: 35.6314995837715 - type: nauc_ndcg_at_1000_std value: -28.965103958822624 - type: nauc_ndcg_at_100_diff1 value: 45.1587893788861 - type: nauc_ndcg_at_100_max value: 35.62430753595297 - type: nauc_ndcg_at_100_std value: -28.77303405812772 - type: nauc_ndcg_at_10_diff1 value: 44.928781590765965 - type: nauc_ndcg_at_10_max value: 34.315200006430366 - type: nauc_ndcg_at_10_std value: -32.05164097076614 - type: nauc_ndcg_at_1_diff1 value: 57.228262350455125 - type: nauc_ndcg_at_1_max value: 35.645285703387366 - type: nauc_ndcg_at_1_std value: -29.893553821348718 - type: nauc_ndcg_at_20_diff1 value: 44.959903633039865 - type: nauc_ndcg_at_20_max value: 35.493022926282755 - type: nauc_ndcg_at_20_std value: -31.54989291850644 - type: nauc_ndcg_at_3_diff1 value: 46.65266185996905 - type: nauc_ndcg_at_3_max value: 33.74458119579594 - type: nauc_ndcg_at_3_std value: -31.493683304534176 - type: nauc_ndcg_at_5_diff1 value: 46.08707037187612 - type: nauc_ndcg_at_5_max value: 34.7401426055243 - type: nauc_ndcg_at_5_std value: -32.44390676345172 - type: nauc_precision_at_1000_diff1 value: -12.11355300492561 - type: nauc_precision_at_1000_max value: 14.490738062121233 - type: nauc_precision_at_1000_std value: 14.448811005059097 - type: nauc_precision_at_100_diff1 value: -9.742085657181239 - type: nauc_precision_at_100_max value: 18.030305489251223 - type: nauc_precision_at_100_std value: 8.213089709529765 - type: nauc_precision_at_10_diff1 value: 5.153466672774969 - type: nauc_precision_at_10_max value: 27.29412644661678 - type: nauc_precision_at_10_std value: -15.505053884112355 - type: nauc_precision_at_1_diff1 value: 57.228262350455125 - type: nauc_precision_at_1_max value: 35.645285703387366 - type: nauc_precision_at_1_std value: -29.893553821348718 - type: nauc_precision_at_20_diff1 value: -0.6812430761066635 - type: nauc_precision_at_20_max value: 25.81911286466295 - type: nauc_precision_at_20_std value: -8.388506222482595 - type: nauc_precision_at_3_diff1 value: 18.263873866510576 - type: nauc_precision_at_3_max value: 30.879576105862345 - type: nauc_precision_at_3_std value: -24.0342929870108 - type: nauc_precision_at_5_diff1 value: 10.9905804265327 - type: nauc_precision_at_5_max value: 30.88468087429045 - type: nauc_precision_at_5_std value: -20.458684056213507 - type: nauc_recall_at_1000_diff1 value: -64.887668417171 - type: nauc_recall_at_1000_max value: 52.25501730358092 - type: nauc_recall_at_1000_std value: 85.13647916200132 - type: nauc_recall_at_100_diff1 value: 18.956777346127655 - type: nauc_recall_at_100_max value: 36.10473493564588 - type: nauc_recall_at_100_std value: -10.007474558899949 - type: nauc_recall_at_10_diff1 value: 33.810344497568046 - type: nauc_recall_at_10_max value: 31.395430183214245 - type: nauc_recall_at_10_std value: -33.12920524433795 - type: nauc_recall_at_1_diff1 value: 57.94711780881773 - type: nauc_recall_at_1_max value: 21.60278071836319 - type: nauc_recall_at_1_std value: -23.273741268035923 - type: nauc_recall_at_20_diff1 value: 31.449657437065397 - type: nauc_recall_at_20_max value: 34.519574934321945 - type: nauc_recall_at_20_std value: -33.43406862055647 - type: nauc_recall_at_3_diff1 value: 42.07841848382365 - type: nauc_recall_at_3_max value: 28.7648772833266 - type: nauc_recall_at_3_std value: -31.56367736320086 - type: nauc_recall_at_5_diff1 value: 39.21392858246301 - type: nauc_recall_at_5_max value: 34.28338202081927 - type: nauc_recall_at_5_std value: -33.725680523721906 - type: ndcg_at_1 value: 46.879 - type: ndcg_at_10 value: 53.70399999999999 - type: ndcg_at_100 value: 60.532 - type: ndcg_at_1000 value: 61.997 - type: ndcg_at_20 value: 56.818999999999996 - type: ndcg_at_3 value: 47.441 - type: ndcg_at_5 value: 49.936 - type: precision_at_1 value: 46.879 - type: precision_at_10 value: 13.376 - type: precision_at_100 value: 1.8980000000000001 - type: precision_at_1000 value: 0.208 - type: precision_at_20 value: 7.771 - type: precision_at_3 value: 30.658 - type: precision_at_5 value: 22.828 - type: recall_at_1 value: 27.739000000000004 - type: recall_at_10 value: 64.197 - type: recall_at_100 value: 90.54100000000001 - type: recall_at_1000 value: 99.90400000000001 - type: recall_at_20 value: 74.178 - type: recall_at_3 value: 46.312 - type: recall_at_5 value: 54.581999999999994 - task: type: Retrieval dataset: name: MTEB XPQARetrieval (cmn-eng) type: jinaai/xpqa config: cmn-eng split: test revision: c99d599f0a6ab9b85b065da6f9d94f9cf731679f metrics: - type: main_score value: 64.64 - type: map_at_1 value: 35.858000000000004 - type: map_at_10 value: 58.547000000000004 - type: map_at_100 value: 60.108 - type: map_at_1000 value: 60.153999999999996 - type: map_at_20 value: 59.528000000000006 - type: map_at_3 value: 51.578 - type: map_at_5 value: 56.206999999999994 - type: mrr_at_1 value: 56.95121951219512 - type: mrr_at_10 value: 64.93975029036001 - type: mrr_at_100 value: 65.63357055718294 - type: mrr_at_1000 value: 65.64844109026834 - type: mrr_at_20 value: 65.41280668715439 - type: mrr_at_3 value: 62.68292682926826 - type: mrr_at_5 value: 64.1585365853658 - type: nauc_map_at_1000_diff1 value: 45.82740870907091 - type: nauc_map_at_1000_max value: 21.9696540066807 - type: nauc_map_at_1000_std value: -32.028262356639495 - type: nauc_map_at_100_diff1 value: 45.802053117616396 - type: nauc_map_at_100_max value: 21.946002070290966 - type: nauc_map_at_100_std value: -32.06190418866229 - type: nauc_map_at_10_diff1 value: 46.017774155748945 - type: nauc_map_at_10_max value: 21.876909086095544 - type: nauc_map_at_10_std value: -32.13913568843985 - type: nauc_map_at_1_diff1 value: 56.34671160956164 - type: nauc_map_at_1_max value: 17.6796949796236 - type: nauc_map_at_1_std value: -13.741140688066045 - type: nauc_map_at_20_diff1 value: 46.027469176858716 - type: nauc_map_at_20_max value: 21.80738432042703 - type: nauc_map_at_20_std value: -32.430379634015395 - type: nauc_map_at_3_diff1 value: 48.40096725254027 - type: nauc_map_at_3_max value: 21.15442803574233 - type: nauc_map_at_3_std value: -26.205850292181417 - type: nauc_map_at_5_diff1 value: 45.77800041356389 - type: nauc_map_at_5_max value: 22.11718771798752 - type: nauc_map_at_5_std value: -30.32876338031471 - type: nauc_mrr_at_1000_diff1 value: 49.748274798877944 - type: nauc_mrr_at_1000_max value: 24.547774167219906 - type: nauc_mrr_at_1000_std value: -32.728447209433504 - type: nauc_mrr_at_100_diff1 value: 49.734549290377856 - type: nauc_mrr_at_100_max value: 24.536933315055222 - type: nauc_mrr_at_100_std value: -32.74076335880697 - type: nauc_mrr_at_10_diff1 value: 49.82827711456392 - type: nauc_mrr_at_10_max value: 24.536773657485075 - type: nauc_mrr_at_10_std value: -33.05707547166962 - type: nauc_mrr_at_1_diff1 value: 51.954289992321044 - type: nauc_mrr_at_1_max value: 26.336255074856886 - type: nauc_mrr_at_1_std value: -29.042962019692446 - type: nauc_mrr_at_20_diff1 value: 49.70938465628863 - type: nauc_mrr_at_20_max value: 24.433219849576947 - type: nauc_mrr_at_20_std value: -32.94123791846049 - type: nauc_mrr_at_3_diff1 value: 50.289486880347134 - type: nauc_mrr_at_3_max value: 24.978796972860142 - type: nauc_mrr_at_3_std value: -32.11305594784892 - type: nauc_mrr_at_5_diff1 value: 49.95013396316144 - type: nauc_mrr_at_5_max value: 24.514452761198303 - type: nauc_mrr_at_5_std value: -32.865859962984146 - type: nauc_ndcg_at_1000_diff1 value: 45.73806489233998 - type: nauc_ndcg_at_1000_max value: 22.404941391043867 - type: nauc_ndcg_at_1000_std value: -33.063445720849685 - type: nauc_ndcg_at_100_diff1 value: 45.1046206923062 - type: nauc_ndcg_at_100_max value: 22.081133719684658 - type: nauc_ndcg_at_100_std value: -33.299291459450146 - type: nauc_ndcg_at_10_diff1 value: 46.140608688357496 - type: nauc_ndcg_at_10_max value: 21.442489279388916 - type: nauc_ndcg_at_10_std value: -35.115870342856006 - type: nauc_ndcg_at_1_diff1 value: 51.954289992321044 - type: nauc_ndcg_at_1_max value: 26.336255074856886 - type: nauc_ndcg_at_1_std value: -29.042962019692446 - type: nauc_ndcg_at_20_diff1 value: 45.966784725457046 - type: nauc_ndcg_at_20_max value: 21.166632858613145 - type: nauc_ndcg_at_20_std value: -35.65112890375392 - type: nauc_ndcg_at_3_diff1 value: 46.7404863978999 - type: nauc_ndcg_at_3_max value: 22.701743709129456 - type: nauc_ndcg_at_3_std value: -30.907633466983192 - type: nauc_ndcg_at_5_diff1 value: 45.86487199083486 - type: nauc_ndcg_at_5_max value: 22.088804840002513 - type: nauc_ndcg_at_5_std value: -32.3853481632832 - type: nauc_precision_at_1000_diff1 value: -25.69710612774455 - type: nauc_precision_at_1000_max value: 1.3964400247388091 - type: nauc_precision_at_1000_std value: -8.873947511634814 - type: nauc_precision_at_100_diff1 value: -24.013497191077978 - type: nauc_precision_at_100_max value: 2.0197725715909343 - type: nauc_precision_at_100_std value: -11.387423148770633 - type: nauc_precision_at_10_diff1 value: -6.47728645242781 - type: nauc_precision_at_10_max value: 6.815261443768304 - type: nauc_precision_at_10_std value: -26.825062292855943 - type: nauc_precision_at_1_diff1 value: 51.954289992321044 - type: nauc_precision_at_1_max value: 26.336255074856886 - type: nauc_precision_at_1_std value: -29.042962019692446 - type: nauc_precision_at_20_diff1 value: -12.355232044747511 - type: nauc_precision_at_20_max value: 4.022126850949725 - type: nauc_precision_at_20_std value: -23.688935769326772 - type: nauc_precision_at_3_diff1 value: 7.662671665835864 - type: nauc_precision_at_3_max value: 14.372394760986248 - type: nauc_precision_at_3_std value: -28.635125665532453 - type: nauc_precision_at_5_diff1 value: -1.4592476425511611 - type: nauc_precision_at_5_max value: 11.124310161474174 - type: nauc_precision_at_5_std value: -27.89526669318053 - type: nauc_recall_at_1000_diff1 value: -19.58450046684932 - type: nauc_recall_at_1000_max value: 70.71661998133165 - type: nauc_recall_at_1000_std value: 93.05555555556315 - type: nauc_recall_at_100_diff1 value: 15.06356457571853 - type: nauc_recall_at_100_max value: 14.051414749344806 - type: nauc_recall_at_100_std value: -29.461874235153008 - type: nauc_recall_at_10_diff1 value: 41.29842726117901 - type: nauc_recall_at_10_max value: 15.768699673830898 - type: nauc_recall_at_10_std value: -42.11585661287712 - type: nauc_recall_at_1_diff1 value: 56.34671160956164 - type: nauc_recall_at_1_max value: 17.6796949796236 - type: nauc_recall_at_1_std value: -13.741140688066045 - type: nauc_recall_at_20_diff1 value: 38.8078283585263 - type: nauc_recall_at_20_max value: 12.06816084005326 - type: nauc_recall_at_20_std value: -48.20956170056591 - type: nauc_recall_at_3_diff1 value: 44.71028758038993 - type: nauc_recall_at_3_max value: 19.1059093689162 - type: nauc_recall_at_3_std value: -26.795164453784253 - type: nauc_recall_at_5_diff1 value: 41.06320797773054 - type: nauc_recall_at_5_max value: 19.117028272530998 - type: nauc_recall_at_5_std value: -33.985747504612156 - type: ndcg_at_1 value: 56.95099999999999 - type: ndcg_at_10 value: 64.64 - type: ndcg_at_100 value: 70.017 - type: ndcg_at_1000 value: 70.662 - type: ndcg_at_20 value: 67.256 - type: ndcg_at_3 value: 58.269000000000005 - type: ndcg_at_5 value: 60.94199999999999 - type: precision_at_1 value: 56.95099999999999 - type: precision_at_10 value: 15.671 - type: precision_at_100 value: 2.002 - type: precision_at_1000 value: 0.208 - type: precision_at_20 value: 8.689 - type: precision_at_3 value: 36.341 - type: precision_at_5 value: 26.854 - type: recall_at_1 value: 35.858000000000004 - type: recall_at_10 value: 75.02 - type: recall_at_100 value: 95.76 - type: recall_at_1000 value: 99.837 - type: recall_at_20 value: 83.732 - type: recall_at_3 value: 57.093 - type: recall_at_5 value: 66.193 - task: type: Retrieval dataset: name: MTEB XPQARetrieval (cmn-cmn) type: jinaai/xpqa config: cmn-cmn split: test revision: c99d599f0a6ab9b85b065da6f9d94f9cf731679f metrics: - type: main_score value: 69.446 - type: map_at_1 value: 39.995999999999995 - type: map_at_10 value: 64.033 - type: map_at_100 value: 65.51599999999999 - type: map_at_1000 value: 65.545 - type: map_at_20 value: 64.958 - type: map_at_3 value: 57.767 - type: map_at_5 value: 61.998 - type: mrr_at_1 value: 63.3495145631068 - type: mrr_at_10 value: 70.21146363075978 - type: mrr_at_100 value: 70.82810974202124 - type: mrr_at_1000 value: 70.83816803303915 - type: mrr_at_20 value: 70.60140248428802 - type: mrr_at_3 value: 68.66909385113267 - type: mrr_at_5 value: 69.56108414239482 - type: nauc_map_at_1000_diff1 value: 51.649897072831465 - type: nauc_map_at_1000_max value: 38.25222728655331 - type: nauc_map_at_1000_std value: -39.10327919949334 - type: nauc_map_at_100_diff1 value: 51.644205886401465 - type: nauc_map_at_100_max value: 38.23611154355255 - type: nauc_map_at_100_std value: -39.1677073977285 - type: nauc_map_at_10_diff1 value: 51.81444145636039 - type: nauc_map_at_10_max value: 38.03382104326485 - type: nauc_map_at_10_std value: -38.999395639812015 - type: nauc_map_at_1_diff1 value: 59.785298201044704 - type: nauc_map_at_1_max value: 23.273537759937785 - type: nauc_map_at_1_std value: -17.838712689290194 - type: nauc_map_at_20_diff1 value: 51.680208795601004 - type: nauc_map_at_20_max value: 38.23334583518634 - type: nauc_map_at_20_std value: -39.24344495939061 - type: nauc_map_at_3_diff1 value: 52.180913298194056 - type: nauc_map_at_3_max value: 33.45482478000481 - type: nauc_map_at_3_std value: -31.682911030586297 - type: nauc_map_at_5_diff1 value: 50.804900676175436 - type: nauc_map_at_5_max value: 37.68924816012326 - type: nauc_map_at_5_std value: -36.85016896616712 - type: nauc_mrr_at_1000_diff1 value: 56.371477471577535 - type: nauc_mrr_at_1000_max value: 42.773877962050086 - type: nauc_mrr_at_1000_std value: -40.41765081873682 - type: nauc_mrr_at_100_diff1 value: 56.3619751528192 - type: nauc_mrr_at_100_max value: 42.76298794859916 - type: nauc_mrr_at_100_std value: -40.44070582448831 - type: nauc_mrr_at_10_diff1 value: 56.33810523477712 - type: nauc_mrr_at_10_max value: 42.76591937795783 - type: nauc_mrr_at_10_std value: -40.69339583030244 - type: nauc_mrr_at_1_diff1 value: 58.90399906884378 - type: nauc_mrr_at_1_max value: 43.38806571165292 - type: nauc_mrr_at_1_std value: -38.224015285584 - type: nauc_mrr_at_20_diff1 value: 56.32629070537032 - type: nauc_mrr_at_20_max value: 42.79615263472604 - type: nauc_mrr_at_20_std value: -40.496777397603076 - type: nauc_mrr_at_3_diff1 value: 55.96989454480743 - type: nauc_mrr_at_3_max value: 42.49832220744744 - type: nauc_mrr_at_3_std value: -39.883799467132384 - type: nauc_mrr_at_5_diff1 value: 56.003080766475755 - type: nauc_mrr_at_5_max value: 42.73308051011805 - type: nauc_mrr_at_5_std value: -39.87179511166683 - type: nauc_ndcg_at_1000_diff1 value: 52.49054229225255 - type: nauc_ndcg_at_1000_max value: 39.61644750719859 - type: nauc_ndcg_at_1000_std value: -40.89845763194674 - type: nauc_ndcg_at_100_diff1 value: 52.33511250864434 - type: nauc_ndcg_at_100_max value: 39.25530146124452 - type: nauc_ndcg_at_100_std value: -41.92444498004374 - type: nauc_ndcg_at_10_diff1 value: 52.62031505931842 - type: nauc_ndcg_at_10_max value: 38.667195545396766 - type: nauc_ndcg_at_10_std value: -42.59503924641507 - type: nauc_ndcg_at_1_diff1 value: 58.90399906884378 - type: nauc_ndcg_at_1_max value: 43.38806571165292 - type: nauc_ndcg_at_1_std value: -38.224015285584 - type: nauc_ndcg_at_20_diff1 value: 52.15061629809436 - type: nauc_ndcg_at_20_max value: 39.09332400054708 - type: nauc_ndcg_at_20_std value: -42.80018671618001 - type: nauc_ndcg_at_3_diff1 value: 51.04210728138207 - type: nauc_ndcg_at_3_max value: 38.19034802567046 - type: nauc_ndcg_at_3_std value: -38.179821090765216 - type: nauc_ndcg_at_5_diff1 value: 51.04399574045204 - type: nauc_ndcg_at_5_max value: 38.42492210204548 - type: nauc_ndcg_at_5_std value: -38.868073241617715 - type: nauc_precision_at_1000_diff1 value: -25.151369907213734 - type: nauc_precision_at_1000_max value: 9.012549147054989 - type: nauc_precision_at_1000_std value: -9.319786589947698 - type: nauc_precision_at_100_diff1 value: -23.20945211843088 - type: nauc_precision_at_100_max value: 9.860701593969862 - type: nauc_precision_at_100_std value: -13.073877818347231 - type: nauc_precision_at_10_diff1 value: -6.970781124246847 - type: nauc_precision_at_10_max value: 19.392675322254487 - type: nauc_precision_at_10_std value: -26.74943490717657 - type: nauc_precision_at_1_diff1 value: 58.90399906884378 - type: nauc_precision_at_1_max value: 43.38806571165292 - type: nauc_precision_at_1_std value: -38.224015285584 - type: nauc_precision_at_20_diff1 value: -13.046456108081102 - type: nauc_precision_at_20_max value: 15.69439950383875 - type: nauc_precision_at_20_std value: -23.836004512018093 - type: nauc_precision_at_3_diff1 value: 3.5444232965528846 - type: nauc_precision_at_3_max value: 27.08858445453865 - type: nauc_precision_at_3_std value: -29.12757283665593 - type: nauc_precision_at_5_diff1 value: -3.6853986353320267 - type: nauc_precision_at_5_max value: 24.32059689571271 - type: nauc_precision_at_5_std value: -27.46188072134163 - type: nauc_recall_at_1000_diff1 value: 86.93515141907919 - type: nauc_recall_at_1000_max value: 100.0 - type: nauc_recall_at_1000_std value: 100.0 - type: nauc_recall_at_100_diff1 value: 39.7052887613879 - type: nauc_recall_at_100_max value: 18.40943977796887 - type: nauc_recall_at_100_std value: -88.74014854144974 - type: nauc_recall_at_10_diff1 value: 48.85342500870892 - type: nauc_recall_at_10_max value: 32.69617204234419 - type: nauc_recall_at_10_std value: -51.9937231860804 - type: nauc_recall_at_1_diff1 value: 59.785298201044704 - type: nauc_recall_at_1_max value: 23.273537759937785 - type: nauc_recall_at_1_std value: -17.838712689290194 - type: nauc_recall_at_20_diff1 value: 45.40839773314378 - type: nauc_recall_at_20_max value: 33.02458321493215 - type: nauc_recall_at_20_std value: -55.97800739448166 - type: nauc_recall_at_3_diff1 value: 47.05565693416531 - type: nauc_recall_at_3_max value: 28.743850400344297 - type: nauc_recall_at_3_std value: -32.436470486397475 - type: nauc_recall_at_5_diff1 value: 45.30223758669577 - type: nauc_recall_at_5_max value: 33.6567274747059 - type: nauc_recall_at_5_std value: -39.946712017948514 - type: ndcg_at_1 value: 63.349999999999994 - type: ndcg_at_10 value: 69.446 - type: ndcg_at_100 value: 74.439 - type: ndcg_at_1000 value: 74.834 - type: ndcg_at_20 value: 71.763 - type: ndcg_at_3 value: 64.752 - type: ndcg_at_5 value: 66.316 - type: precision_at_1 value: 63.349999999999994 - type: precision_at_10 value: 16.286 - type: precision_at_100 value: 2.024 - type: precision_at_1000 value: 0.207 - type: precision_at_20 value: 8.908000000000001 - type: precision_at_3 value: 40.655 - type: precision_at_5 value: 28.859 - type: recall_at_1 value: 39.995999999999995 - type: recall_at_10 value: 78.107 - type: recall_at_100 value: 97.538 - type: recall_at_1000 value: 99.96000000000001 - type: recall_at_20 value: 85.72 - type: recall_at_3 value: 63.291 - type: recall_at_5 value: 70.625 - task: type: Retrieval dataset: name: MTEB XPQARetrieval (spa-eng) type: jinaai/xpqa config: spa-eng split: test revision: c99d599f0a6ab9b85b065da6f9d94f9cf731679f metrics: - type: main_score value: 68.258 - type: map_at_1 value: 33.06 - type: map_at_10 value: 61.590999999999994 - type: map_at_100 value: 63.341 - type: map_at_1000 value: 63.385999999999996 - type: map_at_20 value: 62.77700000000001 - type: map_at_3 value: 52.547999999999995 - type: map_at_5 value: 58.824 - type: mrr_at_1 value: 63.80832282471627 - type: mrr_at_10 value: 70.76848015372607 - type: mrr_at_100 value: 71.33996704518061 - type: mrr_at_1000 value: 71.35368444388072 - type: mrr_at_20 value: 71.18191741103522 - type: mrr_at_3 value: 68.83144178226142 - type: mrr_at_5 value: 69.88440521227405 - type: nauc_map_at_1000_diff1 value: 41.59255746310511 - type: nauc_map_at_1000_max value: 42.064075373358065 - type: nauc_map_at_1000_std value: -25.130730194381723 - type: nauc_map_at_100_diff1 value: 41.56447648820406 - type: nauc_map_at_100_max value: 42.06711634651607 - type: nauc_map_at_100_std value: -25.14871585556968 - type: nauc_map_at_10_diff1 value: 41.28968387107058 - type: nauc_map_at_10_max value: 41.511538272139774 - type: nauc_map_at_10_std value: -25.99906440164276 - type: nauc_map_at_1_diff1 value: 51.09859596320021 - type: nauc_map_at_1_max value: 12.406789321338222 - type: nauc_map_at_1_std value: -18.227486548655076 - type: nauc_map_at_20_diff1 value: 41.39469672947315 - type: nauc_map_at_20_max value: 41.98309315808902 - type: nauc_map_at_20_std value: -25.44704720985219 - type: nauc_map_at_3_diff1 value: 43.16164995512842 - type: nauc_map_at_3_max value: 30.935400935562818 - type: nauc_map_at_3_std value: -23.53095555148866 - type: nauc_map_at_5_diff1 value: 41.23474352142375 - type: nauc_map_at_5_max value: 39.03088859147947 - type: nauc_map_at_5_std value: -26.046526443708366 - type: nauc_mrr_at_1000_diff1 value: 51.79649678213789 - type: nauc_mrr_at_1000_max value: 50.50340748045259 - type: nauc_mrr_at_1000_std value: -24.777183703493407 - type: nauc_mrr_at_100_diff1 value: 51.78609028166551 - type: nauc_mrr_at_100_max value: 50.51732896833555 - type: nauc_mrr_at_100_std value: -24.760054686874717 - type: nauc_mrr_at_10_diff1 value: 51.705268395036995 - type: nauc_mrr_at_10_max value: 50.35818415293149 - type: nauc_mrr_at_10_std value: -25.170367120250404 - type: nauc_mrr_at_1_diff1 value: 53.91475115581825 - type: nauc_mrr_at_1_max value: 49.122529616282016 - type: nauc_mrr_at_1_std value: -22.377647552937155 - type: nauc_mrr_at_20_diff1 value: 51.778984221197774 - type: nauc_mrr_at_20_max value: 50.5070957827813 - type: nauc_mrr_at_20_std value: -24.908935023607285 - type: nauc_mrr_at_3_diff1 value: 51.82683773090423 - type: nauc_mrr_at_3_max value: 50.77993196421369 - type: nauc_mrr_at_3_std value: -24.3925832021831 - type: nauc_mrr_at_5_diff1 value: 51.722232683543034 - type: nauc_mrr_at_5_max value: 50.334865493961864 - type: nauc_mrr_at_5_std value: -25.513593495703297 - type: nauc_ndcg_at_1000_diff1 value: 44.21851582991263 - type: nauc_ndcg_at_1000_max value: 45.73539068637836 - type: nauc_ndcg_at_1000_std value: -24.716522467580397 - type: nauc_ndcg_at_100_diff1 value: 43.8002401615357 - type: nauc_ndcg_at_100_max value: 45.801409410061915 - type: nauc_ndcg_at_100_std value: -24.73171742499903 - type: nauc_ndcg_at_10_diff1 value: 42.540922778755885 - type: nauc_ndcg_at_10_max value: 44.348836943874595 - type: nauc_ndcg_at_10_std value: -28.05403666494785 - type: nauc_ndcg_at_1_diff1 value: 53.91475115581825 - type: nauc_ndcg_at_1_max value: 49.122529616282016 - type: nauc_ndcg_at_1_std value: -22.377647552937155 - type: nauc_ndcg_at_20_diff1 value: 43.10347921163421 - type: nauc_ndcg_at_20_max value: 45.53253270265022 - type: nauc_ndcg_at_20_std value: -26.63902791862846 - type: nauc_ndcg_at_3_diff1 value: 42.41720274782384 - type: nauc_ndcg_at_3_max value: 42.91778219334943 - type: nauc_ndcg_at_3_std value: -24.793252033594076 - type: nauc_ndcg_at_5_diff1 value: 42.51515034945093 - type: nauc_ndcg_at_5_max value: 41.62080576508792 - type: nauc_ndcg_at_5_std value: -28.209669314955065 - type: nauc_precision_at_1000_diff1 value: -14.89794075433148 - type: nauc_precision_at_1000_max value: 27.85387929356412 - type: nauc_precision_at_1000_std value: 10.728618597190849 - type: nauc_precision_at_100_diff1 value: -13.075270046295856 - type: nauc_precision_at_100_max value: 29.77208946756632 - type: nauc_precision_at_100_std value: 8.491662697326039 - type: nauc_precision_at_10_diff1 value: -4.0826025188781205 - type: nauc_precision_at_10_max value: 39.04278085180075 - type: nauc_precision_at_10_std value: -5.925408651372333 - type: nauc_precision_at_1_diff1 value: 53.91475115581825 - type: nauc_precision_at_1_max value: 49.122529616282016 - type: nauc_precision_at_1_std value: -22.377647552937155 - type: nauc_precision_at_20_diff1 value: -7.93186440645135 - type: nauc_precision_at_20_max value: 35.81281308891365 - type: nauc_precision_at_20_std value: 0.1241277857515697 - type: nauc_precision_at_3_diff1 value: 7.563562511484409 - type: nauc_precision_at_3_max value: 43.43738862378524 - type: nauc_precision_at_3_std value: -11.958059731912615 - type: nauc_precision_at_5_diff1 value: -0.1801152449011624 - type: nauc_precision_at_5_max value: 41.32486715619513 - type: nauc_precision_at_5_std value: -10.088699021919552 - type: nauc_recall_at_1000_diff1 value: 86.93359696819986 - type: nauc_recall_at_1000_max value: 100.0 - type: nauc_recall_at_1000_std value: 72.21843645604022 - type: nauc_recall_at_100_diff1 value: 29.86050842714198 - type: nauc_recall_at_100_max value: 48.106658251136245 - type: nauc_recall_at_100_std value: -14.981886214880035 - type: nauc_recall_at_10_diff1 value: 33.67119240737528 - type: nauc_recall_at_10_max value: 39.271984859561414 - type: nauc_recall_at_10_std value: -35.6434883839217 - type: nauc_recall_at_1_diff1 value: 51.09859596320021 - type: nauc_recall_at_1_max value: 12.406789321338222 - type: nauc_recall_at_1_std value: -18.227486548655076 - type: nauc_recall_at_20_diff1 value: 33.211979983240724 - type: nauc_recall_at_20_max value: 43.47676074743184 - type: nauc_recall_at_20_std value: -33.88107138395349 - type: nauc_recall_at_3_diff1 value: 39.22513750146998 - type: nauc_recall_at_3_max value: 27.066674083840166 - type: nauc_recall_at_3_std value: -26.963282529629893 - type: nauc_recall_at_5_diff1 value: 36.53718917129459 - type: nauc_recall_at_5_max value: 35.40550013169686 - type: nauc_recall_at_5_std value: -34.209159379410806 - type: ndcg_at_1 value: 63.808 - type: ndcg_at_10 value: 68.258 - type: ndcg_at_100 value: 73.38799999999999 - type: ndcg_at_1000 value: 74.03 - type: ndcg_at_20 value: 70.968 - type: ndcg_at_3 value: 62.33 - type: ndcg_at_5 value: 64.096 - type: precision_at_1 value: 63.808 - type: precision_at_10 value: 19.243 - type: precision_at_100 value: 2.367 - type: precision_at_1000 value: 0.245 - type: precision_at_20 value: 10.599 - type: precision_at_3 value: 44.515 - type: precision_at_5 value: 33.467999999999996 - type: recall_at_1 value: 33.06 - type: recall_at_10 value: 77.423 - type: recall_at_100 value: 95.923 - type: recall_at_1000 value: 99.874 - type: recall_at_20 value: 85.782 - type: recall_at_3 value: 57.098000000000006 - type: recall_at_5 value: 67.472 - task: type: Retrieval dataset: name: MTEB XPQARetrieval (spa-spa) type: jinaai/xpqa config: spa-spa split: test revision: c99d599f0a6ab9b85b065da6f9d94f9cf731679f metrics: - type: main_score value: 72.004 - type: map_at_1 value: 36.248000000000005 - type: map_at_10 value: 65.679 - type: map_at_100 value: 67.22399999999999 - type: map_at_1000 value: 67.264 - type: map_at_20 value: 66.705 - type: map_at_3 value: 56.455 - type: map_at_5 value: 62.997 - type: mrr_at_1 value: 67.71752837326608 - type: mrr_at_10 value: 74.59782021257429 - type: mrr_at_100 value: 75.0640960767943 - type: mrr_at_1000 value: 75.07324799466076 - type: mrr_at_20 value: 74.9323963386884 - type: mrr_at_3 value: 72.95081967213115 - type: mrr_at_5 value: 73.82723833543506 - type: nauc_map_at_1000_diff1 value: 43.111810717567714 - type: nauc_map_at_1000_max value: 44.835247208972476 - type: nauc_map_at_1000_std value: -32.798405973931985 - type: nauc_map_at_100_diff1 value: 43.090223482932764 - type: nauc_map_at_100_max value: 44.83392441557943 - type: nauc_map_at_100_std value: -32.81149166676563 - type: nauc_map_at_10_diff1 value: 42.87841934951979 - type: nauc_map_at_10_max value: 43.9838653389494 - type: nauc_map_at_10_std value: -33.588084643627084 - type: nauc_map_at_1_diff1 value: 54.509245848379095 - type: nauc_map_at_1_max value: 10.05921648322742 - type: nauc_map_at_1_std value: -24.652326014826762 - type: nauc_map_at_20_diff1 value: 43.07468612984794 - type: nauc_map_at_20_max value: 44.75663122615032 - type: nauc_map_at_20_std value: -33.11788887878321 - type: nauc_map_at_3_diff1 value: 44.63272828938906 - type: nauc_map_at_3_max value: 32.1584369869227 - type: nauc_map_at_3_std value: -30.761662210142944 - type: nauc_map_at_5_diff1 value: 42.77296997803048 - type: nauc_map_at_5_max value: 41.78894616737652 - type: nauc_map_at_5_std value: -33.56459774477362 - type: nauc_mrr_at_1000_diff1 value: 53.097544131833494 - type: nauc_mrr_at_1000_max value: 50.61134979184588 - type: nauc_mrr_at_1000_std value: -35.6221191487669 - type: nauc_mrr_at_100_diff1 value: 53.096609856182106 - type: nauc_mrr_at_100_max value: 50.61951585642645 - type: nauc_mrr_at_100_std value: -35.62396157508327 - type: nauc_mrr_at_10_diff1 value: 52.771534471912304 - type: nauc_mrr_at_10_max value: 50.430863224435726 - type: nauc_mrr_at_10_std value: -36.027992076620365 - type: nauc_mrr_at_1_diff1 value: 55.05316238884337 - type: nauc_mrr_at_1_max value: 49.461858515275196 - type: nauc_mrr_at_1_std value: -31.87492636319712 - type: nauc_mrr_at_20_diff1 value: 53.083253469629746 - type: nauc_mrr_at_20_max value: 50.62156424256193 - type: nauc_mrr_at_20_std value: -35.879153692447154 - type: nauc_mrr_at_3_diff1 value: 52.98283109188415 - type: nauc_mrr_at_3_max value: 50.83561260429378 - type: nauc_mrr_at_3_std value: -35.30839538038797 - type: nauc_mrr_at_5_diff1 value: 52.93270510879709 - type: nauc_mrr_at_5_max value: 50.54595596761199 - type: nauc_mrr_at_5_std value: -35.84059376434395 - type: nauc_ndcg_at_1000_diff1 value: 45.343685089209416 - type: nauc_ndcg_at_1000_max value: 47.801141576669465 - type: nauc_ndcg_at_1000_std value: -33.512958862879195 - type: nauc_ndcg_at_100_diff1 value: 45.255590461515894 - type: nauc_ndcg_at_100_max value: 47.99240031881967 - type: nauc_ndcg_at_100_std value: -33.614465006695205 - type: nauc_ndcg_at_10_diff1 value: 43.93472511731019 - type: nauc_ndcg_at_10_max value: 45.92599752897053 - type: nauc_ndcg_at_10_std value: -36.43629114491574 - type: nauc_ndcg_at_1_diff1 value: 55.05316238884337 - type: nauc_ndcg_at_1_max value: 49.461858515275196 - type: nauc_ndcg_at_1_std value: -31.87492636319712 - type: nauc_ndcg_at_20_diff1 value: 44.93534591273201 - type: nauc_ndcg_at_20_max value: 47.55153940713458 - type: nauc_ndcg_at_20_std value: -35.56392448745206 - type: nauc_ndcg_at_3_diff1 value: 43.17916122133396 - type: nauc_ndcg_at_3_max value: 45.603634205103276 - type: nauc_ndcg_at_3_std value: -32.473227507181214 - type: nauc_ndcg_at_5_diff1 value: 44.10242961669216 - type: nauc_ndcg_at_5_max value: 43.61666669031808 - type: nauc_ndcg_at_5_std value: -35.98808321497782 - type: nauc_precision_at_1000_diff1 value: -23.264714449991146 - type: nauc_precision_at_1000_max value: 28.505729576735465 - type: nauc_precision_at_1000_std value: 11.987379232920926 - type: nauc_precision_at_100_diff1 value: -21.156119174614627 - type: nauc_precision_at_100_max value: 30.711646221646255 - type: nauc_precision_at_100_std value: 9.650486536340322 - type: nauc_precision_at_10_diff1 value: -10.98001328477502 - type: nauc_precision_at_10_max value: 39.25638073760597 - type: nauc_precision_at_10_std value: -4.3456859257488 - type: nauc_precision_at_1_diff1 value: 55.05316238884337 - type: nauc_precision_at_1_max value: 49.461858515275196 - type: nauc_precision_at_1_std value: -31.87492636319712 - type: nauc_precision_at_20_diff1 value: -14.97565390664424 - type: nauc_precision_at_20_max value: 36.383835295942355 - type: nauc_precision_at_20_std value: 1.525158880381114 - type: nauc_precision_at_3_diff1 value: 1.0448345623903483 - type: nauc_precision_at_3_max value: 45.69772060667404 - type: nauc_precision_at_3_std value: -13.002685018948293 - type: nauc_precision_at_5_diff1 value: -5.434185597628904 - type: nauc_precision_at_5_max value: 42.99162431099203 - type: nauc_precision_at_5_std value: -9.789308817624534 - type: nauc_recall_at_1000_diff1 value: 12.309303236094845 - type: nauc_recall_at_1000_max value: 100.0 - type: nauc_recall_at_1000_std value: 86.93359696819986 - type: nauc_recall_at_100_diff1 value: 39.093544920901415 - type: nauc_recall_at_100_max value: 55.62814395062938 - type: nauc_recall_at_100_std value: -22.6919033301514 - type: nauc_recall_at_10_diff1 value: 35.50100141633622 - type: nauc_recall_at_10_max value: 39.25750019586647 - type: nauc_recall_at_10_std value: -43.01273078031791 - type: nauc_recall_at_1_diff1 value: 54.509245848379095 - type: nauc_recall_at_1_max value: 10.05921648322742 - type: nauc_recall_at_1_std value: -24.652326014826762 - type: nauc_recall_at_20_diff1 value: 38.1281707132327 - type: nauc_recall_at_20_max value: 43.97950642900301 - type: nauc_recall_at_20_std value: -44.049952771307574 - type: nauc_recall_at_3_diff1 value: 40.01986938242728 - type: nauc_recall_at_3_max value: 27.517114421061173 - type: nauc_recall_at_3_std value: -32.99056780232045 - type: nauc_recall_at_5_diff1 value: 38.52035606499483 - type: nauc_recall_at_5_max value: 37.05834604678859 - type: nauc_recall_at_5_std value: -39.86196378897912 - type: ndcg_at_1 value: 67.718 - type: ndcg_at_10 value: 72.004 - type: ndcg_at_100 value: 76.554 - type: ndcg_at_1000 value: 77.07300000000001 - type: ndcg_at_20 value: 74.37899999999999 - type: ndcg_at_3 value: 66.379 - type: ndcg_at_5 value: 68.082 - type: precision_at_1 value: 67.718 - type: precision_at_10 value: 19.849 - type: precision_at_100 value: 2.3800000000000003 - type: precision_at_1000 value: 0.245 - type: precision_at_20 value: 10.813 - type: precision_at_3 value: 46.574 - type: precision_at_5 value: 34.83 - type: recall_at_1 value: 36.248000000000005 - type: recall_at_10 value: 80.252 - type: recall_at_100 value: 96.73 - type: recall_at_1000 value: 99.874 - type: recall_at_20 value: 87.703 - type: recall_at_3 value: 60.815 - type: recall_at_5 value: 71.16 - task: type: Retrieval dataset: name: MTEB XPQARetrieval (fra-eng) type: jinaai/xpqa config: fra-eng split: test revision: c99d599f0a6ab9b85b065da6f9d94f9cf731679f metrics: - type: main_score value: 73.729 - type: map_at_1 value: 43.964999999999996 - type: map_at_10 value: 67.803 - type: map_at_100 value: 69.188 - type: map_at_1000 value: 69.21000000000001 - type: map_at_20 value: 68.747 - type: map_at_3 value: 60.972 - type: map_at_5 value: 65.39399999999999 - type: mrr_at_1 value: 68.4913217623498 - type: mrr_at_10 value: 75.2600822260368 - type: mrr_at_100 value: 75.6599169808848 - type: mrr_at_1000 value: 75.66720883727534 - type: mrr_at_20 value: 75.52375865860405 - type: mrr_at_3 value: 73.54250111259452 - type: mrr_at_5 value: 74.51713395638626 - type: nauc_map_at_1000_diff1 value: 46.81533703002097 - type: nauc_map_at_1000_max value: 46.30794757084772 - type: nauc_map_at_1000_std value: -14.953470500312335 - type: nauc_map_at_100_diff1 value: 46.82464740277745 - type: nauc_map_at_100_max value: 46.32852879948254 - type: nauc_map_at_100_std value: -14.950035098066172 - type: nauc_map_at_10_diff1 value: 46.31406143369831 - type: nauc_map_at_10_max value: 45.337593270786634 - type: nauc_map_at_10_std value: -16.011789445907876 - type: nauc_map_at_1_diff1 value: 57.097134715065835 - type: nauc_map_at_1_max value: 21.93931500350721 - type: nauc_map_at_1_std value: -15.134457251301637 - type: nauc_map_at_20_diff1 value: 46.47030891134173 - type: nauc_map_at_20_max value: 46.29169960276292 - type: nauc_map_at_20_std value: -15.14241106541829 - type: nauc_map_at_3_diff1 value: 50.27064228648596 - type: nauc_map_at_3_max value: 39.43058773971639 - type: nauc_map_at_3_std value: -16.16545993089126 - type: nauc_map_at_5_diff1 value: 46.974867679747426 - type: nauc_map_at_5_max value: 44.31091104855002 - type: nauc_map_at_5_std value: -16.50175337658926 - type: nauc_mrr_at_1000_diff1 value: 55.20294005110399 - type: nauc_mrr_at_1000_max value: 51.947725719119966 - type: nauc_mrr_at_1000_std value: -14.586112939597232 - type: nauc_mrr_at_100_diff1 value: 55.20426251109304 - type: nauc_mrr_at_100_max value: 51.95648725402534 - type: nauc_mrr_at_100_std value: -14.579769236539143 - type: nauc_mrr_at_10_diff1 value: 54.93870506205835 - type: nauc_mrr_at_10_max value: 51.89312772900638 - type: nauc_mrr_at_10_std value: -14.692635010092939 - type: nauc_mrr_at_1_diff1 value: 56.54945935175171 - type: nauc_mrr_at_1_max value: 51.28134504197991 - type: nauc_mrr_at_1_std value: -12.909042186563061 - type: nauc_mrr_at_20_diff1 value: 55.10667018041461 - type: nauc_mrr_at_20_max value: 51.98236870783707 - type: nauc_mrr_at_20_std value: -14.599377575198025 - type: nauc_mrr_at_3_diff1 value: 55.67124311746892 - type: nauc_mrr_at_3_max value: 51.77903236246767 - type: nauc_mrr_at_3_std value: -14.94452633860763 - type: nauc_mrr_at_5_diff1 value: 55.42849172366371 - type: nauc_mrr_at_5_max value: 51.76902965753959 - type: nauc_mrr_at_5_std value: -15.357993534727072 - type: nauc_ndcg_at_1000_diff1 value: 48.736844959280326 - type: nauc_ndcg_at_1000_max value: 48.92891159935398 - type: nauc_ndcg_at_1000_std value: -13.983968675611056 - type: nauc_ndcg_at_100_diff1 value: 48.73859328503975 - type: nauc_ndcg_at_100_max value: 49.31867149556439 - type: nauc_ndcg_at_100_std value: -13.72387564912742 - type: nauc_ndcg_at_10_diff1 value: 46.50313862975287 - type: nauc_ndcg_at_10_max value: 47.13599793554596 - type: nauc_ndcg_at_10_std value: -16.317919977400113 - type: nauc_ndcg_at_1_diff1 value: 56.54945935175171 - type: nauc_ndcg_at_1_max value: 51.28134504197991 - type: nauc_ndcg_at_1_std value: -12.909042186563061 - type: nauc_ndcg_at_20_diff1 value: 47.01727117133912 - type: nauc_ndcg_at_20_max value: 49.121366036709105 - type: nauc_ndcg_at_20_std value: -14.411078677638775 - type: nauc_ndcg_at_3_diff1 value: 49.229581145458276 - type: nauc_ndcg_at_3_max value: 47.427609717032 - type: nauc_ndcg_at_3_std value: -16.52066627289908 - type: nauc_ndcg_at_5_diff1 value: 48.0152514127505 - type: nauc_ndcg_at_5_max value: 46.12152407850816 - type: nauc_ndcg_at_5_std value: -17.613295491954656 - type: nauc_precision_at_1000_diff1 value: -25.959006032642463 - type: nauc_precision_at_1000_max value: 12.81002362947137 - type: nauc_precision_at_1000_std value: 12.575312826061513 - type: nauc_precision_at_100_diff1 value: -24.35413527283394 - type: nauc_precision_at_100_max value: 14.878359236477303 - type: nauc_precision_at_100_std value: 12.384426050018428 - type: nauc_precision_at_10_diff1 value: -17.93220761770618 - type: nauc_precision_at_10_max value: 23.523485811847294 - type: nauc_precision_at_10_std value: 4.424456968716939 - type: nauc_precision_at_1_diff1 value: 56.54945935175171 - type: nauc_precision_at_1_max value: 51.28134504197991 - type: nauc_precision_at_1_std value: -12.909042186563061 - type: nauc_precision_at_20_diff1 value: -21.776871398686936 - type: nauc_precision_at_20_max value: 21.18436338264366 - type: nauc_precision_at_20_std value: 9.937274986573321 - type: nauc_precision_at_3_diff1 value: -1.2411845580934435 - type: nauc_precision_at_3_max value: 34.962281941875 - type: nauc_precision_at_3_std value: -2.447892908501237 - type: nauc_precision_at_5_diff1 value: -11.134164534114085 - type: nauc_precision_at_5_max value: 30.22079740070525 - type: nauc_precision_at_5_std value: -0.24232594421765946 - type: nauc_recall_at_1000_diff1 value: .nan - type: nauc_recall_at_1000_max value: .nan - type: nauc_recall_at_1000_std value: .nan - type: nauc_recall_at_100_diff1 value: 43.3647412452869 - type: nauc_recall_at_100_max value: 63.50094950500327 - type: nauc_recall_at_100_std value: 2.3911909633714044 - type: nauc_recall_at_10_diff1 value: 33.993445071666855 - type: nauc_recall_at_10_max value: 41.38694129134144 - type: nauc_recall_at_10_std value: -19.308698266099096 - type: nauc_recall_at_1_diff1 value: 57.097134715065835 - type: nauc_recall_at_1_max value: 21.93931500350721 - type: nauc_recall_at_1_std value: -15.134457251301637 - type: nauc_recall_at_20_diff1 value: 32.03888531880772 - type: nauc_recall_at_20_max value: 49.660787482562085 - type: nauc_recall_at_20_std value: -12.641456758778382 - type: nauc_recall_at_3_diff1 value: 47.94527082900579 - type: nauc_recall_at_3_max value: 36.51733131437679 - type: nauc_recall_at_3_std value: -18.65511713247495 - type: nauc_recall_at_5_diff1 value: 42.04545772092305 - type: nauc_recall_at_5_max value: 41.21440912972303 - type: nauc_recall_at_5_std value: -21.47386527081128 - type: ndcg_at_1 value: 68.491 - type: ndcg_at_10 value: 73.729 - type: ndcg_at_100 value: 77.684 - type: ndcg_at_1000 value: 78.084 - type: ndcg_at_20 value: 75.795 - type: ndcg_at_3 value: 68.568 - type: ndcg_at_5 value: 70.128 - type: precision_at_1 value: 68.491 - type: precision_at_10 value: 16.996 - type: precision_at_100 value: 2.023 - type: precision_at_1000 value: 0.207 - type: precision_at_20 value: 9.246 - type: precision_at_3 value: 41.923 - type: precision_at_5 value: 29.826000000000004 - type: recall_at_1 value: 43.964999999999996 - type: recall_at_10 value: 82.777 - type: recall_at_100 value: 97.287 - type: recall_at_1000 value: 100.0 - type: recall_at_20 value: 89.183 - type: recall_at_3 value: 65.803 - type: recall_at_5 value: 74.119 - task: type: Retrieval dataset: name: MTEB XPQARetrieval (fr) type: jinaai/xpqa config: fra-fra split: test revision: c99d599f0a6ab9b85b065da6f9d94f9cf731679f metrics: - type: main_score value: 77.581 - type: map_at_1 value: 46.444 - type: map_at_10 value: 72.084 - type: map_at_100 value: 73.175 - type: map_at_1000 value: 73.193 - type: map_at_20 value: 72.77799999999999 - type: map_at_3 value: 65.242 - type: map_at_5 value: 69.926 - type: mrr_at_1 value: 71.82910547396529 - type: mrr_at_10 value: 78.66594612923046 - type: mrr_at_100 value: 78.97334934049613 - type: mrr_at_1000 value: 78.97687021803557 - type: mrr_at_20 value: 78.85701141744282 - type: mrr_at_3 value: 76.96929238985311 - type: mrr_at_5 value: 77.99732977303067 - type: nauc_map_at_1000_diff1 value: 49.090956807097804 - type: nauc_map_at_1000_max value: 52.01095354889508 - type: nauc_map_at_1000_std value: -12.182870421711026 - type: nauc_map_at_100_diff1 value: 49.091664766684566 - type: nauc_map_at_100_max value: 52.017499797253755 - type: nauc_map_at_100_std value: -12.188342487271528 - type: nauc_map_at_10_diff1 value: 48.6619338205362 - type: nauc_map_at_10_max value: 50.93591260329888 - type: nauc_map_at_10_std value: -12.899399261673365 - type: nauc_map_at_1_diff1 value: 61.89699552471587 - type: nauc_map_at_1_max value: 22.387748207421946 - type: nauc_map_at_1_std value: -17.139518194308437 - type: nauc_map_at_20_diff1 value: 48.72828404686453 - type: nauc_map_at_20_max value: 51.781074586075434 - type: nauc_map_at_20_std value: -12.174270605093136 - type: nauc_map_at_3_diff1 value: 53.11509580126934 - type: nauc_map_at_3_max value: 42.1768380145106 - type: nauc_map_at_3_std value: -14.98340833032363 - type: nauc_map_at_5_diff1 value: 49.60521390803235 - type: nauc_map_at_5_max value: 49.80360562029127 - type: nauc_map_at_5_std value: -13.900652140457618 - type: nauc_mrr_at_1000_diff1 value: 58.10782478654255 - type: nauc_mrr_at_1000_max value: 61.31083013535486 - type: nauc_mrr_at_1000_std value: -9.624904298545921 - type: nauc_mrr_at_100_diff1 value: 58.11041683306092 - type: nauc_mrr_at_100_max value: 61.31590199755797 - type: nauc_mrr_at_100_std value: -9.625991053580865 - type: nauc_mrr_at_10_diff1 value: 57.883701815695375 - type: nauc_mrr_at_10_max value: 61.36276126424689 - type: nauc_mrr_at_10_std value: -9.495072468420386 - type: nauc_mrr_at_1_diff1 value: 60.18176977079093 - type: nauc_mrr_at_1_max value: 59.697615236642555 - type: nauc_mrr_at_1_std value: -9.396133077966779 - type: nauc_mrr_at_20_diff1 value: 57.964817434006754 - type: nauc_mrr_at_20_max value: 61.34073539502932 - type: nauc_mrr_at_20_std value: -9.602378876645131 - type: nauc_mrr_at_3_diff1 value: 58.44338049427257 - type: nauc_mrr_at_3_max value: 60.92272989411293 - type: nauc_mrr_at_3_std value: -9.928970439416162 - type: nauc_mrr_at_5_diff1 value: 58.01513016866578 - type: nauc_mrr_at_5_max value: 61.46805302986586 - type: nauc_mrr_at_5_std value: -9.842227002440984 - type: nauc_ndcg_at_1000_diff1 value: 50.99293152828167 - type: nauc_ndcg_at_1000_max value: 56.14232784664811 - type: nauc_ndcg_at_1000_std value: -10.529213072410288 - type: nauc_ndcg_at_100_diff1 value: 50.99385944312529 - type: nauc_ndcg_at_100_max value: 56.34825518954588 - type: nauc_ndcg_at_100_std value: -10.398943874846047 - type: nauc_ndcg_at_10_diff1 value: 48.51273364357823 - type: nauc_ndcg_at_10_max value: 53.77871849486298 - type: nauc_ndcg_at_10_std value: -11.82105972112472 - type: nauc_ndcg_at_1_diff1 value: 60.18176977079093 - type: nauc_ndcg_at_1_max value: 59.697615236642555 - type: nauc_ndcg_at_1_std value: -9.396133077966779 - type: nauc_ndcg_at_20_diff1 value: 49.04268319033412 - type: nauc_ndcg_at_20_max value: 55.47011381097071 - type: nauc_ndcg_at_20_std value: -10.486452945493042 - type: nauc_ndcg_at_3_diff1 value: 50.95112745400584 - type: nauc_ndcg_at_3_max value: 53.45473828705577 - type: nauc_ndcg_at_3_std value: -13.420699384045728 - type: nauc_ndcg_at_5_diff1 value: 50.313156212000074 - type: nauc_ndcg_at_5_max value: 52.78539129309866 - type: nauc_ndcg_at_5_std value: -13.586274096509122 - type: nauc_precision_at_1000_diff1 value: -31.13772049254778 - type: nauc_precision_at_1000_max value: 17.2847598361294 - type: nauc_precision_at_1000_std value: 15.497531773816887 - type: nauc_precision_at_100_diff1 value: -29.98812263553739 - type: nauc_precision_at_100_max value: 19.048620003227654 - type: nauc_precision_at_100_std value: 15.38499952171958 - type: nauc_precision_at_10_diff1 value: -25.33028097412579 - type: nauc_precision_at_10_max value: 26.077919168306853 - type: nauc_precision_at_10_std value: 11.35352933466097 - type: nauc_precision_at_1_diff1 value: 60.18176977079093 - type: nauc_precision_at_1_max value: 59.697615236642555 - type: nauc_precision_at_1_std value: -9.396133077966779 - type: nauc_precision_at_20_diff1 value: -28.417606311068905 - type: nauc_precision_at_20_max value: 23.958679828637692 - type: nauc_precision_at_20_std value: 14.442021499194205 - type: nauc_precision_at_3_diff1 value: -8.127396049790482 - type: nauc_precision_at_3_max value: 37.348067982957076 - type: nauc_precision_at_3_std value: 4.747913619596849 - type: nauc_precision_at_5_diff1 value: -16.902418446058395 - type: nauc_precision_at_5_max value: 32.73583852552014 - type: nauc_precision_at_5_std value: 7.031446423850052 - type: nauc_recall_at_1000_diff1 value: -14.485978369112514 - type: nauc_recall_at_1000_max value: 78.59123887333172 - type: nauc_recall_at_1000_std value: 90.7384575424963 - type: nauc_recall_at_100_diff1 value: 41.47842281590715 - type: nauc_recall_at_100_max value: 67.47271545727422 - type: nauc_recall_at_100_std value: 14.555561992253999 - type: nauc_recall_at_10_diff1 value: 33.05308907973924 - type: nauc_recall_at_10_max value: 45.49878918493155 - type: nauc_recall_at_10_std value: -11.560069806810926 - type: nauc_recall_at_1_diff1 value: 61.89699552471587 - type: nauc_recall_at_1_max value: 22.387748207421946 - type: nauc_recall_at_1_std value: -17.139518194308437 - type: nauc_recall_at_20_diff1 value: 31.305721376453754 - type: nauc_recall_at_20_max value: 51.24817763724019 - type: nauc_recall_at_20_std value: -5.0809908162023145 - type: nauc_recall_at_3_diff1 value: 49.27109038342917 - type: nauc_recall_at_3_max value: 37.69188317998447 - type: nauc_recall_at_3_std value: -17.119900758664336 - type: nauc_recall_at_5_diff1 value: 42.74501803377967 - type: nauc_recall_at_5_max value: 46.877008503354844 - type: nauc_recall_at_5_std value: -15.704892082115975 - type: ndcg_at_1 value: 71.829 - type: ndcg_at_10 value: 77.581 - type: ndcg_at_100 value: 80.75 - type: ndcg_at_1000 value: 81.026 - type: ndcg_at_20 value: 79.092 - type: ndcg_at_3 value: 72.81 - type: ndcg_at_5 value: 74.22999999999999 - type: precision_at_1 value: 71.829 - type: precision_at_10 value: 17.717 - type: precision_at_100 value: 2.031 - type: precision_at_1000 value: 0.207 - type: precision_at_20 value: 9.399000000000001 - type: precision_at_3 value: 44.458999999999996 - type: precision_at_5 value: 31.535000000000004 - type: recall_at_1 value: 46.444 - type: recall_at_10 value: 86.275 - type: recall_at_100 value: 98.017 - type: recall_at_1000 value: 99.8 - type: recall_at_20 value: 90.935 - type: recall_at_3 value: 70.167 - type: recall_at_5 value: 78.2 --- <br><br> <p align="center"> <img src="https://huggingface.co/datasets/jinaai/documentation-images/resolve/main/logo.webp" alt="Jina AI: Your Search Foundation, Supercharged!" width="150px"> </p> <p align="center"> <b>The embedding model trained by <a href="https://jina.ai/"><b>Jina AI</b></a>.</b> </p> <p align="center"> <b>jina-embeddings-v3: Multilingual Embeddings With Task LoRA</b> </p> ## Quick Start [Blog](https://jina.ai/news/jina-embeddings-v3-a-frontier-multilingual-embedding-model/#parameter-dimensions) | [Azure](https://azuremarketplace.microsoft.com/en-us/marketplace/apps/jinaai.jina-embeddings-v3-vm) | [AWS SageMaker](https://aws.amazon.com/marketplace/pp/prodview-kdi3xkt62lo32) | [API](https://jina.ai/embeddings) ## Intended Usage & Model Info `jina-embeddings-v3` is a **multilingual multi-task text embedding model** designed for a variety of NLP applications. Based on the [Jina-XLM-RoBERTa architecture](https://huggingface.co/jinaai/xlm-roberta-flash-implementation), this model supports Rotary Position Embeddings to handle long input sequences up to **8192 tokens**. Additionally, it features 5 LoRA adapters to generate task-specific embeddings efficiently. ### Key Features: - **Extended Sequence Length:** Supports up to 8192 tokens with RoPE. - **Task-Specific Embedding:** Customize embeddings through the `task` argument with the following options: - `retrieval.query`: Used for query embeddings in asymmetric retrieval tasks - `retrieval.passage`: Used for passage embeddings in asymmetric retrieval tasks - `separation`: Used for embeddings in clustering and re-ranking applications - `classification`: Used for embeddings in classification tasks - `text-matching`: Used for embeddings in tasks that quantify similarity between two texts, such as STS or symmetric retrieval tasks - **Matryoshka Embeddings**: Supports flexible embedding sizes (`32, 64, 128, 256, 512, 768, 1024`), allowing for truncating embeddings to fit your application. ### Supported Languages: While the foundation model supports 100 languages, we've focused our tuning efforts on the following 30 languages: **Arabic, Bengali, Chinese, Danish, Dutch, English, Finnish, French, Georgian, German, Greek, Hindi, Indonesian, Italian, Japanese, Korean, Latvian, Norwegian, Polish, Portuguese, Romanian, Russian, Slovak, Spanish, Swedish, Thai, Turkish, Ukrainian, Urdu,** and **Vietnamese.** > **⚠️ Important Notice:** > We fixed a bug in the `encode` function [#60](https://huggingface.co/jinaai/jina-embeddings-v3/discussions/60) where **Matryoshka embedding truncation** occurred *after normalization*, leading to non-normalized truncated embeddings. This issue has been resolved in the latest code revision. > > If you have encoded data using the previous version and wish to maintain consistency, please use the specific code revision when loading the model: `AutoModel.from_pretrained('jinaai/jina-embeddings-v3', code_revision='da863dd04a4e5dce6814c6625adfba87b83838aa', ...)` ## Usage **<details><summary>Apply mean pooling when integrating the model.</summary>** <p> ### Why Use Mean Pooling? Mean pooling takes all token embeddings from the model's output and averages them at the sentence or paragraph level. This approach has been shown to produce high-quality sentence embeddings. We provide an `encode` function that handles this for you automatically. However, if you're working with the model directly, outside of the `encode` function, you'll need to apply mean pooling manually. Here's how you can do it: ```python import torch import torch.nn.functional as F from transformers import AutoTokenizer, AutoModel def mean_pooling(model_output, attention_mask): token_embeddings = model_output[0] input_mask_expanded = ( attention_mask.unsqueeze(-1).expand(token_embeddings.size()).float() ) return torch.sum(token_embeddings * input_mask_expanded, 1) / torch.clamp( input_mask_expanded.sum(1), min=1e-9 ) sentences = ["How is the weather today?", "What is the current weather like today?"] tokenizer = AutoTokenizer.from_pretrained("jinaai/jina-embeddings-v3") model = AutoModel.from_pretrained("jinaai/jina-embeddings-v3", trust_remote_code=True) encoded_input = tokenizer(sentences, padding=True, truncation=True, return_tensors="pt") task = 'retrieval.query' task_id = model._adaptation_map[task] adapter_mask = torch.full((len(sentences),), task_id, dtype=torch.int32) with torch.no_grad(): model_output = model(**encoded_input, adapter_mask=adapter_mask) embeddings = mean_pooling(model_output, encoded_input["attention_mask"]) embeddings = F.normalize(embeddings, p=2, dim=1) ``` </p> </details> The easiest way to start using `jina-embeddings-v3` is with the [Jina Embedding API](https://jina.ai/embeddings/). Alternatively, you can use `jina-embeddings-v3` directly via Transformers package: ```bash !pip install transformers torch einops !pip install 'numpy<2' ``` If you run it on a GPU that support [FlashAttention-2](https://github.com/Dao-AILab/flash-attention). By 2024.9.12, it supports Ampere, Ada, or Hopper GPUs (e.g., A100, RTX 3090, RTX 4090, H100), ```bash !pip install flash-attn --no-build-isolation ``` ```python from transformers import AutoModel # Initialize the model model = AutoModel.from_pretrained("jinaai/jina-embeddings-v3", trust_remote_code=True) texts = [ "Follow the white rabbit.", # English "Sigue al conejo blanco.", # Spanish "Suis le lapin blanc.", # French "跟着白兔走。", # Chinese "اتبع الأرنب الأبيض.", # Arabic "Folge dem weißen Kaninchen.", # German ] # When calling the `encode` function, you can choose a `task` based on the use case: # 'retrieval.query', 'retrieval.passage', 'separation', 'classification', 'text-matching' # Alternatively, you can choose not to pass a `task`, and no specific LoRA adapter will be used. embeddings = model.encode(texts, task="text-matching") # Compute similarities print(embeddings[0] @ embeddings[1].T) ``` By default, the model supports a maximum sequence length of 8192 tokens. However, if you want to truncate your input texts to a shorter length, you can pass the `max_length` parameter to the `encode` function: ```python embeddings = model.encode(["Very long ... document"], max_length=2048) ``` In case you want to use **Matryoshka embeddings** and switch to a different dimension, you can adjust it by passing the `truncate_dim` parameter to the `encode` function: ```python embeddings = model.encode(['Sample text'], truncate_dim=256) ``` The latest version (3.1.0) of [SentenceTransformers](https://github.com/UKPLab/sentence-transformers) also supports `jina-embeddings-v3`: ```bash !pip install -U sentence-transformers ``` ```python from sentence_transformers import SentenceTransformer model = SentenceTransformer("jinaai/jina-embeddings-v3", trust_remote_code=True) task = "retrieval.query" embeddings = model.encode( ["What is the weather like in Berlin today?"], task=task, prompt_name=task, ) ``` You can fine-tune `jina-embeddings-v3` using [SentenceTransformerTrainer](https://sbert.net/docs/package_reference/sentence_transformer/trainer.html). To fine-tune for a specific task, you should set the task before passing the model to the ST Trainer, either during initialization: ```python model = SentenceTransformer("jinaai/jina-embeddings-v3", trust_remote_code=True, model_kwargs={'default_task': 'classification'}) ``` Or afterwards: ```python model = SentenceTransformer("jinaai/jina-embeddings-v3", trust_remote_code=True) model[0].default_task = 'classification' ``` This way you can fine-tune the LoRA adapter for the chosen task. However, If you want to fine-tune the entire model, make sure the main parameters are set as trainable when loading the model: ```python model = SentenceTransformer("jinaai/jina-embeddings-v3", trust_remote_code=True, model_kwargs={'lora_main_params_trainable': True}) ``` This will allow fine-tuning the whole model instead of just the LoRA adapters. **<details><summary>ONNX Inference.</summary>** <p> You can use ONNX for efficient inference with `jina-embeddings-v3`: ```python import onnxruntime import numpy as np from transformers import AutoTokenizer, PretrainedConfig # Mean pool function def mean_pooling(model_output: np.ndarray, attention_mask: np.ndarray): token_embeddings = model_output input_mask_expanded = np.expand_dims(attention_mask, axis=-1) input_mask_expanded = np.broadcast_to(input_mask_expanded, token_embeddings.shape) sum_embeddings = np.sum(token_embeddings * input_mask_expanded, axis=1) sum_mask = np.clip(np.sum(input_mask_expanded, axis=1), a_min=1e-9, a_max=None) return sum_embeddings / sum_mask # Load tokenizer and model config tokenizer = AutoTokenizer.from_pretrained('jinaai/jina-embeddings-v3') config = PretrainedConfig.from_pretrained('jinaai/jina-embeddings-v3') # Tokenize input input_text = tokenizer('sample text', return_tensors='np') # ONNX session model_path = 'jina-embeddings-v3/onnx/model.onnx' session = onnxruntime.InferenceSession(model_path) # Prepare inputs for ONNX model task_type = 'text-matching' task_id = np.array(config.lora_adaptations.index(task_type), dtype=np.int64) inputs = { 'input_ids': input_text['input_ids'], 'attention_mask': input_text['attention_mask'], 'task_id': task_id } # Run model outputs = session.run(None, inputs)[0] # Apply mean pooling and normalization to the model outputs embeddings = mean_pooling(outputs, input_text["attention_mask"]) embeddings = embeddings / np.linalg.norm(embeddings, ord=2, axis=1, keepdims=True) ``` </p> </details> ## Contact Join our [Discord community](https://discord.jina.ai) and chat with other community members about ideas. ## License `jina-embeddings-v3` is listed on AWS & Azure. If you need to use it beyond those platforms or on-premises within your company, note that the models is licensed under CC BY-NC 4.0. For commercial usage inquiries, feel free to [contact us](https://jina.ai/contact-sales/). ## Citation If you find `jina-embeddings-v3` useful in your research, please cite the following paper: ```bibtex @misc{sturua2024jinaembeddingsv3multilingualembeddingstask, title={jina-embeddings-v3: Multilingual Embeddings With Task LoRA}, author={Saba Sturua and Isabelle Mohr and Mohammad Kalim Akram and Michael Günther and Bo Wang and Markus Krimmel and Feng Wang and Georgios Mastrapas and Andreas Koukounas and Andreas Koukounas and Nan Wang and Han Xiao}, year={2024}, eprint={2409.10173}, archivePrefix={arXiv}, primaryClass={cs.CL}, url={https://arxiv.org/abs/2409.10173}, } ```
[ "SUMMARIZATION" ]
[ "BIOSSES", "SCIFACT" ]
moussaKam/barthez-orangesum-title
moussaKam
summarization
[ "transformers", "pytorch", "safetensors", "mbart", "text2text-generation", "summarization", "fr", "arxiv:2010.12321", "license:apache-2.0", "autotrain_compatible", "endpoints_compatible", "region:us" ]
2022-03-02T23:29:05
2024-12-17T10:21:25
171
3
--- language: - fr license: apache-2.0 tags: - summarization widget: - text: Citant les préoccupations de ses clients dénonçant des cas de censure après la suppression du compte de Trump, un fournisseur d'accès Internet de l'État de l'Idaho a décidé de bloquer Facebook et Twitter. La mesure ne concernera cependant que les clients mécontents de la politique de ces réseaux sociaux. --- ### Barthez model finetuned on orangeSum (title generation) finetuning: examples/seq2seq/ (as of Nov 06, 2020) Metrics: ROUGE-2 > 23 paper: https://arxiv.org/abs/2010.12321 \ github: https://github.com/moussaKam/BARThez ``` @article{eddine2020barthez, title={BARThez: a Skilled Pretrained French Sequence-to-Sequence Model}, author={Eddine, Moussa Kamal and Tixier, Antoine J-P and Vazirgiannis, Michalis}, journal={arXiv preprint arXiv:2010.12321}, year={2020} } ```
[ "SUMMARIZATION" ]
[ "CAS" ]
gety-ai/granite-embedding-107m-multilingual-onnx
gety-ai
sentence-similarity
[ "transformers", "onnx", "xlm-roberta", "feature-extraction", "language", "granite", "embeddings", "multilingual", "mteb", "sentence-similarity", "en", "ar", "cs", "de", "es", "fr", "it", "ja", "ko", "nl", "pt", "zh", "arxiv:0000.00000", "license:apache-2.0", "model-index", "text-embeddings-inference", "endpoints_compatible", "region:us" ]
2025-02-19T08:42:07
2025-02-19T09:04:16
171
1
--- language: - en - ar - cs - de - es - fr - it - ja - ko - nl - pt - zh library_name: transformers license: apache-2.0 pipeline_tag: sentence-similarity tags: - language - granite - embeddings - multilingual - mteb model-index: - name: ibm-granite/granite-embedding-107m-multilingual results: - task: type: Classification dataset: name: MTEB AmazonCounterfactualClassification (en-ext) type: mteb/amazon_counterfactual config: en-ext split: test revision: e8379541af4e31359cca9fbcf4b00f2671dba205 metrics: - type: accuracy value: 72.7136 - type: f1 value: 60.44540000000001 - type: f1_weighted value: 77.8541 - type: ap value: 22.4958 - type: ap_weighted value: 22.4958 - type: main_score value: 72.7136 - task: type: Classification dataset: name: MTEB AmazonCounterfactualClassification (en) type: mteb/amazon_counterfactual config: en split: test revision: e8379541af4e31359cca9fbcf4b00f2671dba205 metrics: - type: accuracy value: 71.6716 - type: f1 value: 65.4221 - type: f1_weighted value: 74.3533 - type: ap value: 33.7567 - type: ap_weighted value: 33.7567 - type: main_score value: 71.6716 - task: type: Classification dataset: name: MTEB AmazonPolarityClassification (default) type: mteb/amazon_polarity config: default split: test revision: e2d317d38cd51312af73b3d32a06d1a08b442046 metrics: - type: accuracy value: 66.5804 - type: f1 value: 66.2191 - type: f1_weighted value: 66.2191 - type: ap value: 61.340799999999994 - type: ap_weighted value: 61.340799999999994 - type: main_score value: 66.5804 - task: type: Classification dataset: name: MTEB AmazonReviewsClassification (en) type: mteb/amazon_reviews_multi config: en split: test revision: 1399c76144fd37290681b995c656ef9b2e06e26d metrics: - type: accuracy value: 36.412 - type: f1 value: 35.633199999999995 - type: f1_weighted value: 35.633199999999995 - type: main_score value: 36.412 - task: type: Retrieval dataset: name: MTEB AppsRetrieval (default) type: CoIR-Retrieval/apps config: default split: test revision: f22508f96b7a36c2415181ed8bb76f76e04ae2d5 metrics: - type: ndcg_at_1 value: 2.39 - type: ndcg_at_3 value: 3.527 - type: ndcg_at_5 value: 3.9759999999999995 - type: ndcg_at_10 value: 4.537 - type: ndcg_at_20 value: 5.140000000000001 - type: ndcg_at_100 value: 6.526 - type: ndcg_at_1000 value: 9.797 - type: map_at_1 value: 2.39 - type: map_at_3 value: 3.2489999999999997 - type: map_at_5 value: 3.499 - type: map_at_10 value: 3.7220000000000004 - type: map_at_20 value: 3.887 - type: map_at_100 value: 4.058 - type: map_at_1000 value: 4.146 - type: recall_at_1 value: 2.39 - type: recall_at_3 value: 4.329000000000001 - type: recall_at_5 value: 5.418 - type: recall_at_10 value: 7.198 - type: recall_at_20 value: 9.588000000000001 - type: recall_at_100 value: 17.371 - type: recall_at_1000 value: 45.206 - type: precision_at_1 value: 2.39 - type: precision_at_3 value: 1.443 - type: precision_at_5 value: 1.084 - type: precision_at_10 value: 0.72 - type: precision_at_20 value: 0.479 - type: precision_at_100 value: 0.174 - type: precision_at_1000 value: 0.045 - type: mrr_at_1 value: 2.3904 - type: mrr_at_3 value: 3.2492 - type: mrr_at_5 value: 3.4989 - type: mrr_at_10 value: 3.7220000000000004 - type: mrr_at_20 value: 3.8869000000000002 - type: mrr_at_100 value: 4.0578 - type: mrr_at_1000 value: 4.1463 - type: nauc_ndcg_at_1_max value: 37.599700000000006 - type: nauc_ndcg_at_1_std value: 20.302899999999998 - type: nauc_ndcg_at_1_diff1 value: 40.4987 - type: nauc_ndcg_at_3_max value: 31.119400000000002 - type: nauc_ndcg_at_3_std value: 11.7335 - type: nauc_ndcg_at_3_diff1 value: 28.788000000000004 - type: nauc_ndcg_at_5_max value: 28.505399999999998 - type: nauc_ndcg_at_5_std value: 12.1402 - type: nauc_ndcg_at_5_diff1 value: 25.730900000000002 - type: nauc_ndcg_at_10_max value: 27.0656 - type: nauc_ndcg_at_10_std value: 12.648699999999998 - type: nauc_ndcg_at_10_diff1 value: 22.0832 - type: nauc_ndcg_at_20_max value: 25.953599999999998 - type: nauc_ndcg_at_20_std value: 12.550500000000001 - type: nauc_ndcg_at_20_diff1 value: 19.3722 - type: nauc_ndcg_at_100_max value: 23.268 - type: nauc_ndcg_at_100_std value: 12.8176 - type: nauc_ndcg_at_100_diff1 value: 15.9275 - type: nauc_ndcg_at_1000_max value: 21.921499999999998 - type: nauc_ndcg_at_1000_std value: 12.656300000000002 - type: nauc_ndcg_at_1000_diff1 value: 13.9004 - type: nauc_map_at_1_max value: 37.599700000000006 - type: nauc_map_at_1_std value: 20.302899999999998 - type: nauc_map_at_1_diff1 value: 40.4987 - type: nauc_map_at_3_max value: 32.2818 - type: nauc_map_at_3_std value: 13.276399999999999 - type: nauc_map_at_3_diff1 value: 30.9064 - type: nauc_map_at_5_max value: 30.5166 - type: nauc_map_at_5_std value: 13.406 - type: nauc_map_at_5_diff1 value: 28.8213 - type: nauc_map_at_10_max value: 29.731999999999996 - type: nauc_map_at_10_std value: 13.5688 - type: nauc_map_at_10_diff1 value: 26.888499999999997 - type: nauc_map_at_20_max value: 29.211399999999998 - type: nauc_map_at_20_std value: 13.4739 - type: nauc_map_at_20_diff1 value: 25.6814 - type: nauc_map_at_100_max value: 28.578300000000002 - type: nauc_map_at_100_std value: 13.5385 - type: nauc_map_at_100_diff1 value: 24.793100000000003 - type: nauc_map_at_1000_max value: 28.3912 - type: nauc_map_at_1000_std value: 13.5039 - type: nauc_map_at_1000_diff1 value: 24.570600000000002 - type: nauc_recall_at_1_max value: 37.599700000000006 - type: nauc_recall_at_1_std value: 20.302899999999998 - type: nauc_recall_at_1_diff1 value: 40.4987 - type: nauc_recall_at_3_max value: 28.598000000000003 - type: nauc_recall_at_3_std value: 8.3847 - type: nauc_recall_at_3_diff1 value: 24.1871 - type: nauc_recall_at_5_max value: 24.5381 - type: nauc_recall_at_5_std value: 9.8274 - type: nauc_recall_at_5_diff1 value: 19.6821 - type: nauc_recall_at_10_max value: 22.5445 - type: nauc_recall_at_10_std value: 11.4415 - type: nauc_recall_at_10_diff1 value: 13.8268 - type: nauc_recall_at_20_max value: 21.3196 - type: nauc_recall_at_20_std value: 11.5932 - type: nauc_recall_at_20_diff1 value: 10.1991 - type: nauc_recall_at_100_max value: 16.9415 - type: nauc_recall_at_100_std value: 12.353200000000001 - type: nauc_recall_at_100_diff1 value: 5.7534 - type: nauc_recall_at_1000_max value: 15.9223 - type: nauc_recall_at_1000_std value: 12.2848 - type: nauc_recall_at_1000_diff1 value: 3.5477000000000003 - type: nauc_precision_at_1_max value: 37.599700000000006 - type: nauc_precision_at_1_std value: 20.302899999999998 - type: nauc_precision_at_1_diff1 value: 40.4987 - type: nauc_precision_at_3_max value: 28.598000000000003 - type: nauc_precision_at_3_std value: 8.3847 - type: nauc_precision_at_3_diff1 value: 24.1871 - type: nauc_precision_at_5_max value: 24.5381 - type: nauc_precision_at_5_std value: 9.8274 - type: nauc_precision_at_5_diff1 value: 19.6821 - type: nauc_precision_at_10_max value: 22.5445 - type: nauc_precision_at_10_std value: 11.4415 - type: nauc_precision_at_10_diff1 value: 13.8268 - type: nauc_precision_at_20_max value: 21.3196 - type: nauc_precision_at_20_std value: 11.5932 - type: nauc_precision_at_20_diff1 value: 10.1991 - type: nauc_precision_at_100_max value: 16.9415 - type: nauc_precision_at_100_std value: 12.353200000000001 - type: nauc_precision_at_100_diff1 value: 5.7534 - type: nauc_precision_at_1000_max value: 15.9223 - type: nauc_precision_at_1000_std value: 12.2848 - type: nauc_precision_at_1000_diff1 value: 3.5477000000000003 - type: nauc_mrr_at_1_max value: 37.599700000000006 - type: nauc_mrr_at_1_std value: 20.302899999999998 - type: nauc_mrr_at_1_diff1 value: 40.4987 - type: nauc_mrr_at_3_max value: 32.2818 - type: nauc_mrr_at_3_std value: 13.276399999999999 - type: nauc_mrr_at_3_diff1 value: 30.9064 - type: nauc_mrr_at_5_max value: 30.5166 - type: nauc_mrr_at_5_std value: 13.406 - type: nauc_mrr_at_5_diff1 value: 28.8213 - type: nauc_mrr_at_10_max value: 29.731999999999996 - type: nauc_mrr_at_10_std value: 13.5688 - type: nauc_mrr_at_10_diff1 value: 26.888499999999997 - type: nauc_mrr_at_20_max value: 29.211399999999998 - type: nauc_mrr_at_20_std value: 13.4739 - type: nauc_mrr_at_20_diff1 value: 25.6814 - type: nauc_mrr_at_100_max value: 28.578300000000002 - type: nauc_mrr_at_100_std value: 13.5385 - type: nauc_mrr_at_100_diff1 value: 24.793100000000003 - type: nauc_mrr_at_1000_max value: 28.3912 - type: nauc_mrr_at_1000_std value: 13.5039 - type: nauc_mrr_at_1000_diff1 value: 24.570600000000002 - type: main_score value: 4.537 - task: type: Retrieval dataset: name: MTEB ArguAna (default) type: mteb/arguana config: default split: test revision: c22ab2a51041ffd869aaddef7af8d8215647e41a metrics: - type: ndcg_at_1 value: 27.168999999999997 - type: ndcg_at_3 value: 41.08 - type: ndcg_at_5 value: 46.375 - type: ndcg_at_10 value: 51.663000000000004 - type: ndcg_at_20 value: 54.339999999999996 - type: ndcg_at_100 value: 55.656000000000006 - type: ndcg_at_1000 value: 55.875 - type: map_at_1 value: 27.168999999999997 - type: map_at_3 value: 37.482 - type: map_at_5 value: 40.416000000000004 - type: map_at_10 value: 42.624 - type: map_at_20 value: 43.376999999999995 - type: map_at_100 value: 43.578 - type: map_at_1000 value: 43.588 - type: recall_at_1 value: 27.168999999999997 - type: recall_at_3 value: 51.565000000000005 - type: recall_at_5 value: 64.43799999999999 - type: recall_at_10 value: 80.654 - type: recall_at_20 value: 91.11 - type: recall_at_100 value: 97.937 - type: recall_at_1000 value: 99.57300000000001 - type: precision_at_1 value: 27.168999999999997 - type: precision_at_3 value: 17.188 - type: precision_at_5 value: 12.888 - type: precision_at_10 value: 8.065 - type: precision_at_20 value: 4.555 - type: precision_at_100 value: 0.979 - type: precision_at_1000 value: 0.1 - type: mrr_at_1 value: 27.6671 - type: mrr_at_3 value: 37.6245 - type: mrr_at_5 value: 40.6188 - type: mrr_at_10 value: 42.8016 - type: mrr_at_20 value: 43.5582 - type: mrr_at_100 value: 43.7551 - type: mrr_at_1000 value: 43.765 - type: nauc_ndcg_at_1_max value: -4.3233 - type: nauc_ndcg_at_1_std value: -3.5458000000000003 - type: nauc_ndcg_at_1_diff1 value: 10.8118 - type: nauc_ndcg_at_3_max value: -1.1566 - type: nauc_ndcg_at_3_std value: -2.5897 - type: nauc_ndcg_at_3_diff1 value: 8.3298 - type: nauc_ndcg_at_5_max value: -1.399 - type: nauc_ndcg_at_5_std value: -1.9604 - type: nauc_ndcg_at_5_diff1 value: 7.6803 - type: nauc_ndcg_at_10_max value: 0.7746000000000001 - type: nauc_ndcg_at_10_std value: -0.9521 - type: nauc_ndcg_at_10_diff1 value: 9.1107 - type: nauc_ndcg_at_20_max value: 1.0111999999999999 - type: nauc_ndcg_at_20_std value: 0.1519 - type: nauc_ndcg_at_20_diff1 value: 9.5802 - type: nauc_ndcg_at_100_max value: -0.3616 - type: nauc_ndcg_at_100_std value: -0.6704 - type: nauc_ndcg_at_100_diff1 value: 9.2401 - type: nauc_ndcg_at_1000_max value: -0.6766 - type: nauc_ndcg_at_1000_std value: -1.0513 - type: nauc_ndcg_at_1000_diff1 value: 9.0561 - type: nauc_map_at_1_max value: -4.3233 - type: nauc_map_at_1_std value: -3.5458000000000003 - type: nauc_map_at_1_diff1 value: 10.8118 - type: nauc_map_at_3_max value: -1.9845000000000002 - type: nauc_map_at_3_std value: -2.6683 - type: nauc_map_at_3_diff1 value: 8.7329 - type: nauc_map_at_5_max value: -2.1342 - type: nauc_map_at_5_std value: -2.3612 - type: nauc_map_at_5_diff1 value: 8.4139 - type: nauc_map_at_10_max value: -1.331 - type: nauc_map_at_10_std value: -1.982 - type: nauc_map_at_10_diff1 value: 9.004199999999999 - type: nauc_map_at_20_max value: -1.3376000000000001 - type: nauc_map_at_20_std value: -1.7424 - type: nauc_map_at_20_diff1 value: 9.1012 - type: nauc_map_at_100_max value: -1.5152 - type: nauc_map_at_100_std value: -1.8418 - type: nauc_map_at_100_diff1 value: 9.0513 - type: nauc_map_at_1000_max value: -1.5264 - type: nauc_map_at_1000_std value: -1.8530000000000002 - type: nauc_map_at_1000_diff1 value: 9.043800000000001 - type: nauc_recall_at_1_max value: -4.3233 - type: nauc_recall_at_1_std value: -3.5458000000000003 - type: nauc_recall_at_1_diff1 value: 10.8118 - type: nauc_recall_at_3_max value: 1.2361 - type: nauc_recall_at_3_std value: -2.4248 - type: nauc_recall_at_3_diff1 value: 7.2543 - type: nauc_recall_at_5_max value: 0.9835999999999999 - type: nauc_recall_at_5_std value: -0.5726 - type: nauc_recall_at_5_diff1 value: 5.2376 - type: nauc_recall_at_10_max value: 12.7099 - type: nauc_recall_at_10_std value: 4.9688 - type: nauc_recall_at_10_diff1 value: 10.5016 - type: nauc_recall_at_20_max value: 28.2615 - type: nauc_recall_at_20_std value: 23.7662 - type: nauc_recall_at_20_diff1 value: 17.6392 - type: nauc_recall_at_100_max value: 31.295099999999998 - type: nauc_recall_at_100_std value: 47.1556 - type: nauc_recall_at_100_diff1 value: 24.055699999999998 - type: nauc_recall_at_1000_max value: 14.418000000000001 - type: nauc_recall_at_1000_std value: 56.899699999999996 - type: nauc_recall_at_1000_diff1 value: 3.7199999999999998 - type: nauc_precision_at_1_max value: -4.3233 - type: nauc_precision_at_1_std value: -3.5458000000000003 - type: nauc_precision_at_1_diff1 value: 10.8118 - type: nauc_precision_at_3_max value: 1.2361 - type: nauc_precision_at_3_std value: -2.4248 - type: nauc_precision_at_3_diff1 value: 7.2543 - type: nauc_precision_at_5_max value: 0.9835999999999999 - type: nauc_precision_at_5_std value: -0.5726 - type: nauc_precision_at_5_diff1 value: 5.2376 - type: nauc_precision_at_10_max value: 12.7099 - type: nauc_precision_at_10_std value: 4.9688 - type: nauc_precision_at_10_diff1 value: 10.5016 - type: nauc_precision_at_20_max value: 28.2615 - type: nauc_precision_at_20_std value: 23.7662 - type: nauc_precision_at_20_diff1 value: 17.6392 - type: nauc_precision_at_100_max value: 31.295099999999998 - type: nauc_precision_at_100_std value: 47.1556 - type: nauc_precision_at_100_diff1 value: 24.055699999999998 - type: nauc_precision_at_1000_max value: 14.418000000000001 - type: nauc_precision_at_1000_std value: 56.899699999999996 - type: nauc_precision_at_1000_diff1 value: 3.7199999999999998 - type: nauc_mrr_at_1_max value: -4.2285 - type: nauc_mrr_at_1_std value: -2.9951 - type: nauc_mrr_at_1_diff1 value: 9.2226 - type: nauc_mrr_at_3_max value: -2.8361 - type: nauc_mrr_at_3_std value: -2.5372 - type: nauc_mrr_at_3_diff1 value: 7.205 - type: nauc_mrr_at_5_max value: -2.827 - type: nauc_mrr_at_5_std value: -2.1469 - type: nauc_mrr_at_5_diff1 value: 6.9564 - type: nauc_mrr_at_10_max value: -2.0531 - type: nauc_mrr_at_10_std value: -1.8227 - type: nauc_mrr_at_10_diff1 value: 7.500500000000001 - type: nauc_mrr_at_20_max value: -2.0823 - type: nauc_mrr_at_20_std value: -1.585 - type: nauc_mrr_at_20_diff1 value: 7.5577000000000005 - type: nauc_mrr_at_100_max value: -2.2609 - type: nauc_mrr_at_100_std value: -1.6787 - type: nauc_mrr_at_100_diff1 value: 7.500500000000001 - type: nauc_mrr_at_1000_max value: -2.2721999999999998 - type: nauc_mrr_at_1000_std value: -1.6898 - type: nauc_mrr_at_1000_diff1 value: 7.492400000000001 - type: main_score value: 51.663000000000004 - task: type: Clustering dataset: name: MTEB ArxivClusteringP2P (default) type: mteb/arxiv-clustering-p2p config: default split: test revision: a122ad7f3f0291bf49cc6f4d32aa80929df69d5d metrics: - type: v_measure value: 41.4944 - type: v_measure_std value: 13.6458 - type: main_score value: 41.4944 - task: type: Clustering dataset: name: MTEB ArxivClusteringS2S (default) type: mteb/arxiv-clustering-s2s config: default split: test revision: f910caf1a6075f7329cdf8c1a6135696f37dbd53 metrics: - type: v_measure value: 30.6155 - type: v_measure_std value: 14.377999999999998 - type: main_score value: 30.6155 - task: type: Reranking dataset: name: MTEB AskUbuntuDupQuestions (default) type: mteb/askubuntudupquestions-reranking config: default split: test revision: 2000358ca161889fa9c082cb41daa8dcfb161a54 metrics: - type: map value: 61.9001 - type: mrr value: 77.0427 - type: nAUC_map_max value: 27.7273 - type: nAUC_map_std value: 14.369299999999999 - type: nAUC_map_diff1 value: 10.7899 - type: nAUC_mrr_max value: 35.606100000000005 - type: nAUC_mrr_std value: 20.2621 - type: nAUC_mrr_diff1 value: 17.814 - type: main_score value: 61.9001 - task: type: STS dataset: name: MTEB BIOSSES (default) type: mteb/biosses-sts config: default split: test revision: d3fb88f8f02e40887cd149695127462bbcf29b4a metrics: - type: pearson value: 81.5558 - type: spearman value: 79.2952 - type: cosine_pearson value: 81.5558 - type: cosine_spearman value: 79.2952 - type: manhattan_pearson value: 79.4434 - type: manhattan_spearman value: 78.803 - type: euclidean_pearson value: 80.0336 - type: euclidean_spearman value: 79.2952 - type: main_score value: 79.2952 - task: type: Classification dataset: name: MTEB Banking77Classification (default) type: mteb/banking77 config: default split: test revision: 0fd18e25b25c072e09e0d92ab615fda904d66300 metrics: - type: accuracy value: 75.9481 - type: f1 value: 74.9851 - type: f1_weighted value: 74.9851 - type: main_score value: 75.9481 - task: type: Clustering dataset: name: MTEB BiorxivClusteringP2P (default) type: mteb/biorxiv-clustering-p2p config: default split: test revision: 65b79d1d13f80053f67aca9498d9402c2d9f1f40 metrics: - type: v_measure value: 35.6038 - type: v_measure_std value: 0.5428999999999999 - type: main_score value: 35.6038 - task: type: Clustering dataset: name: MTEB BiorxivClusteringS2S (default) type: mteb/biorxiv-clustering-s2s config: default split: test revision: 258694dd0231531bc1fd9de6ceb52a0853c6d908 metrics: - type: v_measure value: 28.3652 - type: v_measure_std value: 1.0767 - type: main_score value: 28.3652 - task: type: Retrieval dataset: name: MTEB COIRCodeSearchNetRetrieval (python) type: CoIR-Retrieval/CodeSearchNet config: python split: test revision: 4adc7bc41202b5c13543c9c886a25f340634dab3 metrics: - type: ndcg_at_1 value: 78.69699999999999 - type: ndcg_at_3 value: 82.666 - type: ndcg_at_5 value: 83.607 - type: ndcg_at_10 value: 84.407 - type: ndcg_at_20 value: 84.92699999999999 - type: ndcg_at_100 value: 85.641 - type: ndcg_at_1000 value: 85.978 - type: map_at_1 value: 78.69699999999999 - type: map_at_3 value: 81.723 - type: map_at_5 value: 82.245 - type: map_at_10 value: 82.577 - type: map_at_20 value: 82.722 - type: map_at_100 value: 82.821 - type: map_at_1000 value: 82.834 - type: recall_at_1 value: 78.69699999999999 - type: recall_at_3 value: 85.38 - type: recall_at_5 value: 87.666 - type: recall_at_10 value: 90.133 - type: recall_at_20 value: 92.171 - type: recall_at_100 value: 96.012 - type: recall_at_1000 value: 98.68599999999999 - type: precision_at_1 value: 78.69699999999999 - type: precision_at_3 value: 28.46 - type: precision_at_5 value: 17.533 - type: precision_at_10 value: 9.013 - type: precision_at_20 value: 4.609 - type: precision_at_100 value: 0.96 - type: precision_at_1000 value: 0.099 - type: mrr_at_1 value: 78.7036 - type: mrr_at_3 value: 81.7223 - type: mrr_at_5 value: 82.24719999999999 - type: mrr_at_10 value: 82.5792 - type: mrr_at_20 value: 82.72460000000001 - type: mrr_at_100 value: 82.82350000000001 - type: mrr_at_1000 value: 82.8357 - type: nauc_ndcg_at_1_max value: 84.22319999999999 - type: nauc_ndcg_at_1_std value: 23.538999999999998 - type: nauc_ndcg_at_1_diff1 value: 90.73750000000001 - type: nauc_ndcg_at_3_max value: 85.0914 - type: nauc_ndcg_at_3_std value: 25.0172 - type: nauc_ndcg_at_3_diff1 value: 89.3858 - type: nauc_ndcg_at_5_max value: 84.9112 - type: nauc_ndcg_at_5_std value: 25.732899999999997 - type: nauc_ndcg_at_5_diff1 value: 89.1327 - type: nauc_ndcg_at_10_max value: 84.6806 - type: nauc_ndcg_at_10_std value: 26.488 - type: nauc_ndcg_at_10_diff1 value: 88.83879999999999 - type: nauc_ndcg_at_20_max value: 84.8315 - type: nauc_ndcg_at_20_std value: 26.9453 - type: nauc_ndcg_at_20_diff1 value: 88.9755 - type: nauc_ndcg_at_100_max value: 84.924 - type: nauc_ndcg_at_100_std value: 26.9297 - type: nauc_ndcg_at_100_diff1 value: 89.1861 - type: nauc_ndcg_at_1000_max value: 84.9058 - type: nauc_ndcg_at_1000_std value: 26.5904 - type: nauc_ndcg_at_1000_diff1 value: 89.2659 - type: nauc_map_at_1_max value: 84.22319999999999 - type: nauc_map_at_1_std value: 23.538999999999998 - type: nauc_map_at_1_diff1 value: 90.73750000000001 - type: nauc_map_at_3_max value: 84.9005 - type: nauc_map_at_3_std value: 24.622 - type: nauc_map_at_3_diff1 value: 89.74069999999999 - type: nauc_map_at_5_max value: 84.8017 - type: nauc_map_at_5_std value: 24.9739 - type: nauc_map_at_5_diff1 value: 89.61970000000001 - type: nauc_map_at_10_max value: 84.7091 - type: nauc_map_at_10_std value: 25.223699999999997 - type: nauc_map_at_10_diff1 value: 89.51639999999999 - type: nauc_map_at_20_max value: 84.7458 - type: nauc_map_at_20_std value: 25.3151 - type: nauc_map_at_20_diff1 value: 89.5589 - type: nauc_map_at_100_max value: 84.75930000000001 - type: nauc_map_at_100_std value: 25.318099999999998 - type: nauc_map_at_100_diff1 value: 89.58850000000001 - type: nauc_map_at_1000_max value: 84.75880000000001 - type: nauc_map_at_1000_std value: 25.3086 - type: nauc_map_at_1000_diff1 value: 89.591 - type: nauc_recall_at_1_max value: 84.22319999999999 - type: nauc_recall_at_1_std value: 23.538999999999998 - type: nauc_recall_at_1_diff1 value: 90.73750000000001 - type: nauc_recall_at_3_max value: 85.7389 - type: nauc_recall_at_3_std value: 26.4015 - type: nauc_recall_at_3_diff1 value: 88.1462 - type: nauc_recall_at_5_max value: 85.2854 - type: nauc_recall_at_5_std value: 28.9065 - type: nauc_recall_at_5_diff1 value: 87.15039999999999 - type: nauc_recall_at_10_max value: 84.3391 - type: nauc_recall_at_10_std value: 33.2602 - type: nauc_recall_at_10_diff1 value: 85.3733 - type: nauc_recall_at_20_max value: 85.3385 - type: nauc_recall_at_20_std value: 38.4429 - type: nauc_recall_at_20_diff1 value: 85.40299999999999 - type: nauc_recall_at_100_max value: 87.3325 - type: nauc_recall_at_100_std value: 48.357 - type: nauc_recall_at_100_diff1 value: 85.7283 - type: nauc_recall_at_1000_max value: 90.63419999999999 - type: nauc_recall_at_1000_std value: 63.489399999999996 - type: nauc_recall_at_1000_diff1 value: 85.1443 - type: nauc_precision_at_1_max value: 84.22319999999999 - type: nauc_precision_at_1_std value: 23.538999999999998 - type: nauc_precision_at_1_diff1 value: 90.73750000000001 - type: nauc_precision_at_3_max value: 85.7389 - type: nauc_precision_at_3_std value: 26.4015 - type: nauc_precision_at_3_diff1 value: 88.1462 - type: nauc_precision_at_5_max value: 85.2854 - type: nauc_precision_at_5_std value: 28.9065 - type: nauc_precision_at_5_diff1 value: 87.15039999999999 - type: nauc_precision_at_10_max value: 84.3391 - type: nauc_precision_at_10_std value: 33.2602 - type: nauc_precision_at_10_diff1 value: 85.3733 - type: nauc_precision_at_20_max value: 85.3385 - type: nauc_precision_at_20_std value: 38.4429 - type: nauc_precision_at_20_diff1 value: 85.40299999999999 - type: nauc_precision_at_100_max value: 87.3325 - type: nauc_precision_at_100_std value: 48.357 - type: nauc_precision_at_100_diff1 value: 85.7283 - type: nauc_precision_at_1000_max value: 90.63419999999999 - type: nauc_precision_at_1000_std value: 63.489399999999996 - type: nauc_precision_at_1000_diff1 value: 85.1443 - type: nauc_mrr_at_1_max value: 84.1909 - type: nauc_mrr_at_1_std value: 23.5506 - type: nauc_mrr_at_1_diff1 value: 90.7257 - type: nauc_mrr_at_3_max value: 84.883 - type: nauc_mrr_at_3_std value: 24.630499999999998 - type: nauc_mrr_at_3_diff1 value: 89.7361 - type: nauc_mrr_at_5_max value: 84.783 - type: nauc_mrr_at_5_std value: 24.9813 - type: nauc_mrr_at_5_diff1 value: 89.6132 - type: nauc_mrr_at_10_max value: 84.6899 - type: nauc_mrr_at_10_std value: 25.230200000000004 - type: nauc_mrr_at_10_diff1 value: 89.5099 - type: nauc_mrr_at_20_max value: 84.7264 - type: nauc_mrr_at_20_std value: 25.3216 - type: nauc_mrr_at_20_diff1 value: 89.5523 - type: nauc_mrr_at_100_max value: 84.7398 - type: nauc_mrr_at_100_std value: 25.324799999999996 - type: nauc_mrr_at_100_diff1 value: 89.5818 - type: nauc_mrr_at_1000_max value: 84.7393 - type: nauc_mrr_at_1000_std value: 25.315199999999997 - type: nauc_mrr_at_1000_diff1 value: 89.5843 - type: main_score value: 84.407 - task: type: Retrieval dataset: name: MTEB COIRCodeSearchNetRetrieval (javascript) type: CoIR-Retrieval/CodeSearchNet config: javascript split: test revision: 4adc7bc41202b5c13543c9c886a25f340634dab3 metrics: - type: ndcg_at_1 value: 22.03 - type: ndcg_at_3 value: 27.577 - type: ndcg_at_5 value: 29.465000000000003 - type: ndcg_at_10 value: 31.297000000000004 - type: ndcg_at_20 value: 32.666000000000004 - type: ndcg_at_100 value: 34.905 - type: ndcg_at_1000 value: 37.126 - type: map_at_1 value: 22.03 - type: map_at_3 value: 26.208 - type: map_at_5 value: 27.255000000000003 - type: map_at_10 value: 28.014 - type: map_at_20 value: 28.394000000000002 - type: map_at_100 value: 28.676000000000002 - type: map_at_1000 value: 28.747 - type: recall_at_1 value: 22.03 - type: recall_at_3 value: 31.541000000000004 - type: recall_at_5 value: 36.129 - type: recall_at_10 value: 41.781 - type: recall_at_20 value: 47.159 - type: recall_at_100 value: 59.648 - type: recall_at_1000 value: 77.818 - type: precision_at_1 value: 22.03 - type: precision_at_3 value: 10.514 - type: precision_at_5 value: 7.226000000000001 - type: precision_at_10 value: 4.178 - type: precision_at_20 value: 2.358 - type: precision_at_100 value: 0.596 - type: precision_at_1000 value: 0.078 - type: mrr_at_1 value: 22.029799999999998 - type: mrr_at_3 value: 26.2078 - type: mrr_at_5 value: 27.2546 - type: mrr_at_10 value: 28.0138 - type: mrr_at_20 value: 28.393800000000002 - type: mrr_at_100 value: 28.6755 - type: mrr_at_1000 value: 28.7473 - type: nauc_ndcg_at_1_max value: 43.7913 - type: nauc_ndcg_at_1_std value: 5.8822 - type: nauc_ndcg_at_1_diff1 value: 57.5892 - type: nauc_ndcg_at_3_max value: 43.6608 - type: nauc_ndcg_at_3_std value: 7.308199999999999 - type: nauc_ndcg_at_3_diff1 value: 50.727199999999996 - type: nauc_ndcg_at_5_max value: 43.540099999999995 - type: nauc_ndcg_at_5_std value: 8.2882 - type: nauc_ndcg_at_5_diff1 value: 49.7273 - type: nauc_ndcg_at_10_max value: 43.290800000000004 - type: nauc_ndcg_at_10_std value: 9.177 - type: nauc_ndcg_at_10_diff1 value: 48.6902 - type: nauc_ndcg_at_20_max value: 43.1726 - type: nauc_ndcg_at_20_std value: 9.9537 - type: nauc_ndcg_at_20_diff1 value: 48.2511 - type: nauc_ndcg_at_100_max value: 43.0801 - type: nauc_ndcg_at_100_std value: 11.2629 - type: nauc_ndcg_at_100_diff1 value: 47.7496 - type: nauc_ndcg_at_1000_max value: 43.0087 - type: nauc_ndcg_at_1000_std value: 11.3454 - type: nauc_ndcg_at_1000_diff1 value: 47.7628 - type: nauc_map_at_1_max value: 43.7913 - type: nauc_map_at_1_std value: 5.8822 - type: nauc_map_at_1_diff1 value: 57.5892 - type: nauc_map_at_3_max value: 43.623200000000004 - type: nauc_map_at_3_std value: 6.9021 - type: nauc_map_at_3_diff1 value: 52.296600000000005 - type: nauc_map_at_5_max value: 43.567099999999996 - type: nauc_map_at_5_std value: 7.4779 - type: nauc_map_at_5_diff1 value: 51.7259 - type: nauc_map_at_10_max value: 43.4204 - type: nauc_map_at_10_std value: 7.82 - type: nauc_map_at_10_diff1 value: 51.266 - type: nauc_map_at_20_max value: 43.3827 - type: nauc_map_at_20_std value: 8.0332 - type: nauc_map_at_20_diff1 value: 51.139599999999994 - type: nauc_map_at_100_max value: 43.3681 - type: nauc_map_at_100_std value: 8.205400000000001 - type: nauc_map_at_100_diff1 value: 51.054 - type: nauc_map_at_1000_max value: 43.3619 - type: nauc_map_at_1000_std value: 8.2009 - type: nauc_map_at_1000_diff1 value: 51.0517 - type: nauc_recall_at_1_max value: 43.7913 - type: nauc_recall_at_1_std value: 5.8822 - type: nauc_recall_at_1_diff1 value: 57.5892 - type: nauc_recall_at_3_max value: 43.7869 - type: nauc_recall_at_3_std value: 8.4252 - type: nauc_recall_at_3_diff1 value: 46.5268 - type: nauc_recall_at_5_max value: 43.4642 - type: nauc_recall_at_5_std value: 10.5594 - type: nauc_recall_at_5_diff1 value: 44.329800000000006 - type: nauc_recall_at_10_max value: 42.9497 - type: nauc_recall_at_10_std value: 13.252 - type: nauc_recall_at_10_diff1 value: 41.5027 - type: nauc_recall_at_20_max value: 42.5357 - type: nauc_recall_at_20_std value: 16.2323 - type: nauc_recall_at_20_diff1 value: 39.7814 - type: nauc_recall_at_100_max value: 41.963899999999995 - type: nauc_recall_at_100_std value: 24.3312 - type: nauc_recall_at_100_diff1 value: 36.321 - type: nauc_recall_at_1000_max value: 40.839999999999996 - type: nauc_recall_at_1000_std value: 32.861000000000004 - type: nauc_recall_at_1000_diff1 value: 30.7145 - type: nauc_precision_at_1_max value: 43.7913 - type: nauc_precision_at_1_std value: 5.8822 - type: nauc_precision_at_1_diff1 value: 57.5892 - type: nauc_precision_at_3_max value: 43.7869 - type: nauc_precision_at_3_std value: 8.4252 - type: nauc_precision_at_3_diff1 value: 46.5268 - type: nauc_precision_at_5_max value: 43.4642 - type: nauc_precision_at_5_std value: 10.5594 - type: nauc_precision_at_5_diff1 value: 44.329800000000006 - type: nauc_precision_at_10_max value: 42.9497 - type: nauc_precision_at_10_std value: 13.252 - type: nauc_precision_at_10_diff1 value: 41.5027 - type: nauc_precision_at_20_max value: 42.5357 - type: nauc_precision_at_20_std value: 16.2323 - type: nauc_precision_at_20_diff1 value: 39.7814 - type: nauc_precision_at_100_max value: 41.963899999999995 - type: nauc_precision_at_100_std value: 24.3312 - type: nauc_precision_at_100_diff1 value: 36.321 - type: nauc_precision_at_1000_max value: 40.839999999999996 - type: nauc_precision_at_1000_std value: 32.861000000000004 - type: nauc_precision_at_1000_diff1 value: 30.7145 - type: nauc_mrr_at_1_max value: 43.7913 - type: nauc_mrr_at_1_std value: 5.8822 - type: nauc_mrr_at_1_diff1 value: 57.5892 - type: nauc_mrr_at_3_max value: 43.623200000000004 - type: nauc_mrr_at_3_std value: 6.9021 - type: nauc_mrr_at_3_diff1 value: 52.296600000000005 - type: nauc_mrr_at_5_max value: 43.567099999999996 - type: nauc_mrr_at_5_std value: 7.4779 - type: nauc_mrr_at_5_diff1 value: 51.7259 - type: nauc_mrr_at_10_max value: 43.4204 - type: nauc_mrr_at_10_std value: 7.82 - type: nauc_mrr_at_10_diff1 value: 51.266 - type: nauc_mrr_at_20_max value: 43.3827 - type: nauc_mrr_at_20_std value: 8.0332 - type: nauc_mrr_at_20_diff1 value: 51.139599999999994 - type: nauc_mrr_at_100_max value: 43.3681 - type: nauc_mrr_at_100_std value: 8.2055 - type: nauc_mrr_at_100_diff1 value: 51.054100000000005 - type: nauc_mrr_at_1000_max value: 43.3619 - type: nauc_mrr_at_1000_std value: 8.2009 - type: nauc_mrr_at_1000_diff1 value: 51.0518 - type: main_score value: 31.297000000000004 - task: type: Retrieval dataset: name: MTEB COIRCodeSearchNetRetrieval (go) type: CoIR-Retrieval/CodeSearchNet config: go split: test revision: 4adc7bc41202b5c13543c9c886a25f340634dab3 metrics: - type: ndcg_at_1 value: 36.58 - type: ndcg_at_3 value: 44.633 - type: ndcg_at_5 value: 46.766000000000005 - type: ndcg_at_10 value: 48.921 - type: ndcg_at_20 value: 50.52100000000001 - type: ndcg_at_100 value: 52.795 - type: ndcg_at_1000 value: 54.291 - type: map_at_1 value: 36.58 - type: map_at_3 value: 42.666 - type: map_at_5 value: 43.852000000000004 - type: map_at_10 value: 44.744 - type: map_at_20 value: 45.188 - type: map_at_100 value: 45.493 - type: map_at_1000 value: 45.544000000000004 - type: recall_at_1 value: 36.58 - type: recall_at_3 value: 50.32 - type: recall_at_5 value: 55.491 - type: recall_at_10 value: 62.13999999999999 - type: recall_at_20 value: 68.431 - type: recall_at_100 value: 80.83 - type: recall_at_1000 value: 92.896 - type: precision_at_1 value: 36.58 - type: precision_at_3 value: 16.773 - type: precision_at_5 value: 11.097999999999999 - type: precision_at_10 value: 6.214 - type: precision_at_20 value: 3.422 - type: precision_at_100 value: 0.808 - type: precision_at_1000 value: 0.093 - type: mrr_at_1 value: 36.579699999999995 - type: mrr_at_3 value: 42.666 - type: mrr_at_5 value: 43.8517 - type: mrr_at_10 value: 44.7436 - type: mrr_at_20 value: 45.1875 - type: mrr_at_100 value: 45.493 - type: mrr_at_1000 value: 45.544200000000004 - type: nauc_ndcg_at_1_max value: 41.7601 - type: nauc_ndcg_at_1_std value: 4.5455000000000005 - type: nauc_ndcg_at_1_diff1 value: 58.6454 - type: nauc_ndcg_at_3_max value: 42.3992 - type: nauc_ndcg_at_3_std value: 6.3083 - type: nauc_ndcg_at_3_diff1 value: 52.4271 - type: nauc_ndcg_at_5_max value: 42.2462 - type: nauc_ndcg_at_5_std value: 6.8773 - type: nauc_ndcg_at_5_diff1 value: 51.75880000000001 - type: nauc_ndcg_at_10_max value: 41.7943 - type: nauc_ndcg_at_10_std value: 7.2982000000000005 - type: nauc_ndcg_at_10_diff1 value: 51.0016 - type: nauc_ndcg_at_20_max value: 41.5875 - type: nauc_ndcg_at_20_std value: 7.8825 - type: nauc_ndcg_at_20_diff1 value: 50.7648 - type: nauc_ndcg_at_100_max value: 41.6971 - type: nauc_ndcg_at_100_std value: 8.4077 - type: nauc_ndcg_at_100_diff1 value: 50.9386 - type: nauc_ndcg_at_1000_max value: 41.7837 - type: nauc_ndcg_at_1000_std value: 8.250300000000001 - type: nauc_ndcg_at_1000_diff1 value: 51.4691 - type: nauc_map_at_1_max value: 41.7601 - type: nauc_map_at_1_std value: 4.5455000000000005 - type: nauc_map_at_1_diff1 value: 58.6454 - type: nauc_map_at_3_max value: 42.2864 - type: nauc_map_at_3_std value: 5.8461 - type: nauc_map_at_3_diff1 value: 53.9381 - type: nauc_map_at_5_max value: 42.1957 - type: nauc_map_at_5_std value: 6.142 - type: nauc_map_at_5_diff1 value: 53.600300000000004 - type: nauc_map_at_10_max value: 42.005900000000004 - type: nauc_map_at_10_std value: 6.2986 - type: nauc_map_at_10_diff1 value: 53.296200000000006 - type: nauc_map_at_20_max value: 41.946099999999994 - type: nauc_map_at_20_std value: 6.452299999999999 - type: nauc_map_at_20_diff1 value: 53.2485 - type: nauc_map_at_100_max value: 41.9563 - type: nauc_map_at_100_std value: 6.511 - type: nauc_map_at_100_diff1 value: 53.2816 - type: nauc_map_at_1000_max value: 41.9598 - type: nauc_map_at_1000_std value: 6.5069 - type: nauc_map_at_1000_diff1 value: 53.3008 - type: nauc_recall_at_1_max value: 41.7601 - type: nauc_recall_at_1_std value: 4.5455000000000005 - type: nauc_recall_at_1_diff1 value: 58.6454 - type: nauc_recall_at_3_max value: 42.7117 - type: nauc_recall_at_3_std value: 7.674799999999999 - type: nauc_recall_at_3_diff1 value: 48.0061 - type: nauc_recall_at_5_max value: 42.365399999999994 - type: nauc_recall_at_5_std value: 9.2378 - type: nauc_recall_at_5_diff1 value: 46.0218 - type: nauc_recall_at_10_max value: 40.8705 - type: nauc_recall_at_10_std value: 10.9253 - type: nauc_recall_at_10_diff1 value: 43.0092 - type: nauc_recall_at_20_max value: 39.818599999999996 - type: nauc_recall_at_20_std value: 14.1425 - type: nauc_recall_at_20_diff1 value: 40.8455 - type: nauc_recall_at_100_max value: 40.1229 - type: nauc_recall_at_100_std value: 22.0804 - type: nauc_recall_at_100_diff1 value: 37.6538 - type: nauc_recall_at_1000_max value: 40.4194 - type: nauc_recall_at_1000_std value: 36.7051 - type: nauc_recall_at_1000_diff1 value: 35.3088 - type: nauc_precision_at_1_max value: 41.7601 - type: nauc_precision_at_1_std value: 4.5455000000000005 - type: nauc_precision_at_1_diff1 value: 58.6454 - type: nauc_precision_at_3_max value: 42.7117 - type: nauc_precision_at_3_std value: 7.674799999999999 - type: nauc_precision_at_3_diff1 value: 48.0061 - type: nauc_precision_at_5_max value: 42.365399999999994 - type: nauc_precision_at_5_std value: 9.2378 - type: nauc_precision_at_5_diff1 value: 46.0218 - type: nauc_precision_at_10_max value: 40.8705 - type: nauc_precision_at_10_std value: 10.9253 - type: nauc_precision_at_10_diff1 value: 43.0092 - type: nauc_precision_at_20_max value: 39.818599999999996 - type: nauc_precision_at_20_std value: 14.1425 - type: nauc_precision_at_20_diff1 value: 40.8455 - type: nauc_precision_at_100_max value: 40.1229 - type: nauc_precision_at_100_std value: 22.0804 - type: nauc_precision_at_100_diff1 value: 37.6538 - type: nauc_precision_at_1000_max value: 40.4194 - type: nauc_precision_at_1000_std value: 36.7051 - type: nauc_precision_at_1000_diff1 value: 35.3088 - type: nauc_mrr_at_1_max value: 41.7601 - type: nauc_mrr_at_1_std value: 4.5455000000000005 - type: nauc_mrr_at_1_diff1 value: 58.6454 - type: nauc_mrr_at_3_max value: 42.2864 - type: nauc_mrr_at_3_std value: 5.8461 - type: nauc_mrr_at_3_diff1 value: 53.9381 - type: nauc_mrr_at_5_max value: 42.1957 - type: nauc_mrr_at_5_std value: 6.142 - type: nauc_mrr_at_5_diff1 value: 53.600300000000004 - type: nauc_mrr_at_10_max value: 42.005900000000004 - type: nauc_mrr_at_10_std value: 6.2986 - type: nauc_mrr_at_10_diff1 value: 53.296200000000006 - type: nauc_mrr_at_20_max value: 41.946099999999994 - type: nauc_mrr_at_20_std value: 6.452299999999999 - type: nauc_mrr_at_20_diff1 value: 53.2485 - type: nauc_mrr_at_100_max value: 41.9563 - type: nauc_mrr_at_100_std value: 6.511 - type: nauc_mrr_at_100_diff1 value: 53.2816 - type: nauc_mrr_at_1000_max value: 41.9598 - type: nauc_mrr_at_1000_std value: 6.5069 - type: nauc_mrr_at_1000_diff1 value: 53.3008 - type: main_score value: 48.921 - task: type: Retrieval dataset: name: MTEB COIRCodeSearchNetRetrieval (ruby) type: CoIR-Retrieval/CodeSearchNet config: ruby split: test revision: 4adc7bc41202b5c13543c9c886a25f340634dab3 metrics: - type: ndcg_at_1 value: 27.359 - type: ndcg_at_3 value: 33.405 - type: ndcg_at_5 value: 35.111 - type: ndcg_at_10 value: 37.124 - type: ndcg_at_20 value: 38.637 - type: ndcg_at_100 value: 40.809 - type: ndcg_at_1000 value: 43.206 - type: map_at_1 value: 27.359 - type: map_at_3 value: 31.906000000000002 - type: map_at_5 value: 32.838 - type: map_at_10 value: 33.677 - type: map_at_20 value: 34.086 - type: map_at_100 value: 34.379 - type: map_at_1000 value: 34.458 - type: recall_at_1 value: 27.359 - type: recall_at_3 value: 37.748 - type: recall_at_5 value: 41.951 - type: recall_at_10 value: 48.136 - type: recall_at_20 value: 54.163 - type: recall_at_100 value: 65.979 - type: recall_at_1000 value: 85.488 - type: precision_at_1 value: 27.359 - type: precision_at_3 value: 12.583 - type: precision_at_5 value: 8.39 - type: precision_at_10 value: 4.814 - type: precision_at_20 value: 2.708 - type: precision_at_100 value: 0.66 - type: precision_at_1000 value: 0.08499999999999999 - type: mrr_at_1 value: 27.3592 - type: mrr_at_3 value: 31.9059 - type: mrr_at_5 value: 32.8377 - type: mrr_at_10 value: 33.677099999999996 - type: mrr_at_20 value: 34.086 - type: mrr_at_100 value: 34.3787 - type: mrr_at_1000 value: 34.4575 - type: nauc_ndcg_at_1_max value: 41.336 - type: nauc_ndcg_at_1_std value: 4.9167000000000005 - type: nauc_ndcg_at_1_diff1 value: 59.489599999999996 - type: nauc_ndcg_at_3_max value: 42.3939 - type: nauc_ndcg_at_3_std value: 9.324200000000001 - type: nauc_ndcg_at_3_diff1 value: 53.886 - type: nauc_ndcg_at_5_max value: 41.523700000000005 - type: nauc_ndcg_at_5_std value: 8.7661 - type: nauc_ndcg_at_5_diff1 value: 52.6116 - type: nauc_ndcg_at_10_max value: 40.7362 - type: nauc_ndcg_at_10_std value: 9.3454 - type: nauc_ndcg_at_10_diff1 value: 51.226000000000006 - type: nauc_ndcg_at_20_max value: 40.1284 - type: nauc_ndcg_at_20_std value: 10.1067 - type: nauc_ndcg_at_20_diff1 value: 50.6354 - type: nauc_ndcg_at_100_max value: 40.109899999999996 - type: nauc_ndcg_at_100_std value: 11.125599999999999 - type: nauc_ndcg_at_100_diff1 value: 50.021499999999996 - type: nauc_ndcg_at_1000_max value: 39.9325 - type: nauc_ndcg_at_1000_std value: 10.9899 - type: nauc_ndcg_at_1000_diff1 value: 50.3713 - type: nauc_map_at_1_max value: 41.336 - type: nauc_map_at_1_std value: 4.9167000000000005 - type: nauc_map_at_1_diff1 value: 59.489599999999996 - type: nauc_map_at_3_max value: 42.1793 - type: nauc_map_at_3_std value: 8.149099999999999 - type: nauc_map_at_3_diff1 value: 55.1967 - type: nauc_map_at_5_max value: 41.6768 - type: nauc_map_at_5_std value: 7.8223 - type: nauc_map_at_5_diff1 value: 54.4705 - type: nauc_map_at_10_max value: 41.3395 - type: nauc_map_at_10_std value: 8.076 - type: nauc_map_at_10_diff1 value: 53.87929999999999 - type: nauc_map_at_20_max value: 41.1762 - type: nauc_map_at_20_std value: 8.2845 - type: nauc_map_at_20_diff1 value: 53.7144 - type: nauc_map_at_100_max value: 41.1731 - type: nauc_map_at_100_std value: 8.394 - type: nauc_map_at_100_diff1 value: 53.64919999999999 - type: nauc_map_at_1000_max value: 41.165600000000005 - type: nauc_map_at_1000_std value: 8.3923 - type: nauc_map_at_1000_diff1 value: 53.654199999999996 - type: nauc_recall_at_1_max value: 41.336 - type: nauc_recall_at_1_std value: 4.9167000000000005 - type: nauc_recall_at_1_diff1 value: 59.489599999999996 - type: nauc_recall_at_3_max value: 42.9746 - type: nauc_recall_at_3_std value: 12.632399999999999 - type: nauc_recall_at_3_diff1 value: 50.259100000000004 - type: nauc_recall_at_5_max value: 40.9855 - type: nauc_recall_at_5_std value: 11.368300000000001 - type: nauc_recall_at_5_diff1 value: 47.3165 - type: nauc_recall_at_10_max value: 38.6473 - type: nauc_recall_at_10_std value: 13.1083 - type: nauc_recall_at_10_diff1 value: 43.1086 - type: nauc_recall_at_20_max value: 36.0858 - type: nauc_recall_at_20_std value: 16.345100000000002 - type: nauc_recall_at_20_diff1 value: 40.3971 - type: nauc_recall_at_100_max value: 35.3344 - type: nauc_recall_at_100_std value: 24.4293 - type: nauc_recall_at_100_diff1 value: 34.4263 - type: nauc_recall_at_1000_max value: 27.814 - type: nauc_recall_at_1000_std value: 34.5865 - type: nauc_recall_at_1000_diff1 value: 26.621 - type: nauc_precision_at_1_max value: 41.336 - type: nauc_precision_at_1_std value: 4.9167000000000005 - type: nauc_precision_at_1_diff1 value: 59.489599999999996 - type: nauc_precision_at_3_max value: 42.9746 - type: nauc_precision_at_3_std value: 12.632399999999999 - type: nauc_precision_at_3_diff1 value: 50.259100000000004 - type: nauc_precision_at_5_max value: 40.9855 - type: nauc_precision_at_5_std value: 11.368300000000001 - type: nauc_precision_at_5_diff1 value: 47.3165 - type: nauc_precision_at_10_max value: 38.6473 - type: nauc_precision_at_10_std value: 13.1083 - type: nauc_precision_at_10_diff1 value: 43.1086 - type: nauc_precision_at_20_max value: 36.0858 - type: nauc_precision_at_20_std value: 16.345100000000002 - type: nauc_precision_at_20_diff1 value: 40.3971 - type: nauc_precision_at_100_max value: 35.3344 - type: nauc_precision_at_100_std value: 24.4293 - type: nauc_precision_at_100_diff1 value: 34.4263 - type: nauc_precision_at_1000_max value: 27.814 - type: nauc_precision_at_1000_std value: 34.5865 - type: nauc_precision_at_1000_diff1 value: 26.621 - type: nauc_mrr_at_1_max value: 41.336 - type: nauc_mrr_at_1_std value: 4.9167000000000005 - type: nauc_mrr_at_1_diff1 value: 59.489599999999996 - type: nauc_mrr_at_3_max value: 42.1793 - type: nauc_mrr_at_3_std value: 8.149099999999999 - type: nauc_mrr_at_3_diff1 value: 55.1967 - type: nauc_mrr_at_5_max value: 41.6768 - type: nauc_mrr_at_5_std value: 7.8223 - type: nauc_mrr_at_5_diff1 value: 54.4705 - type: nauc_mrr_at_10_max value: 41.3395 - type: nauc_mrr_at_10_std value: 8.076 - type: nauc_mrr_at_10_diff1 value: 53.87929999999999 - type: nauc_mrr_at_20_max value: 41.1762 - type: nauc_mrr_at_20_std value: 8.2845 - type: nauc_mrr_at_20_diff1 value: 53.7144 - type: nauc_mrr_at_100_max value: 41.1731 - type: nauc_mrr_at_100_std value: 8.394 - type: nauc_mrr_at_100_diff1 value: 53.64919999999999 - type: nauc_mrr_at_1000_max value: 41.165600000000005 - type: nauc_mrr_at_1000_std value: 8.3923 - type: nauc_mrr_at_1000_diff1 value: 53.654199999999996 - type: main_score value: 37.124 - task: type: Retrieval dataset: name: MTEB COIRCodeSearchNetRetrieval (java) type: CoIR-Retrieval/CodeSearchNet config: java split: test revision: 4adc7bc41202b5c13543c9c886a25f340634dab3 metrics: - type: ndcg_at_1 value: 29.621 - type: ndcg_at_3 value: 36.388999999999996 - type: ndcg_at_5 value: 38.071 - type: ndcg_at_10 value: 39.856 - type: ndcg_at_20 value: 41.189 - type: ndcg_at_100 value: 43.391999999999996 - type: ndcg_at_1000 value: 45.080999999999996 - type: map_at_1 value: 29.621 - type: map_at_3 value: 34.733000000000004 - type: map_at_5 value: 35.668 - type: map_at_10 value: 36.411 - type: map_at_20 value: 36.778 - type: map_at_100 value: 37.077 - type: map_at_1000 value: 37.133 - type: recall_at_1 value: 29.621 - type: recall_at_3 value: 41.178 - type: recall_at_5 value: 45.257999999999996 - type: recall_at_10 value: 50.744 - type: recall_at_20 value: 56.001999999999995 - type: recall_at_100 value: 67.96 - type: recall_at_1000 value: 81.707 - type: precision_at_1 value: 29.621 - type: precision_at_3 value: 13.725999999999999 - type: precision_at_5 value: 9.052 - type: precision_at_10 value: 5.074 - type: precision_at_20 value: 2.8000000000000003 - type: precision_at_100 value: 0.6799999999999999 - type: precision_at_1000 value: 0.082 - type: mrr_at_1 value: 29.593799999999998 - type: mrr_at_3 value: 34.7254 - type: mrr_at_5 value: 35.6583 - type: mrr_at_10 value: 36.4022 - type: mrr_at_20 value: 36.7689 - type: mrr_at_100 value: 37.0681 - type: mrr_at_1000 value: 37.124 - type: nauc_ndcg_at_1_max value: 39.7113 - type: nauc_ndcg_at_1_std value: -1.3535 - type: nauc_ndcg_at_1_diff1 value: 57.7222 - type: nauc_ndcg_at_3_max value: 40.4493 - type: nauc_ndcg_at_3_std value: 1.4639 - type: nauc_ndcg_at_3_diff1 value: 52.145799999999994 - type: nauc_ndcg_at_5_max value: 40.1219 - type: nauc_ndcg_at_5_std value: 2.1448 - type: nauc_ndcg_at_5_diff1 value: 51.2694 - type: nauc_ndcg_at_10_max value: 39.4187 - type: nauc_ndcg_at_10_std value: 2.5085 - type: nauc_ndcg_at_10_diff1 value: 50.171699999999994 - type: nauc_ndcg_at_20_max value: 39.2822 - type: nauc_ndcg_at_20_std value: 3.1015 - type: nauc_ndcg_at_20_diff1 value: 49.8837 - type: nauc_ndcg_at_100_max value: 39.1352 - type: nauc_ndcg_at_100_std value: 3.8505 - type: nauc_ndcg_at_100_diff1 value: 49.7104 - type: nauc_ndcg_at_1000_max value: 39.1441 - type: nauc_ndcg_at_1000_std value: 4.1791 - type: nauc_ndcg_at_1000_diff1 value: 49.806200000000004 - type: nauc_map_at_1_max value: 39.7113 - type: nauc_map_at_1_std value: -1.3535 - type: nauc_map_at_1_diff1 value: 57.7222 - type: nauc_map_at_3_max value: 40.3518 - type: nauc_map_at_3_std value: 0.7879 - type: nauc_map_at_3_diff1 value: 53.4756 - type: nauc_map_at_5_max value: 40.1793 - type: nauc_map_at_5_std value: 1.1596 - type: nauc_map_at_5_diff1 value: 52.993900000000004 - type: nauc_map_at_10_max value: 39.8893 - type: nauc_map_at_10_std value: 1.3074000000000001 - type: nauc_map_at_10_diff1 value: 52.53679999999999 - type: nauc_map_at_20_max value: 39.8583 - type: nauc_map_at_20_std value: 1.4666000000000001 - type: nauc_map_at_20_diff1 value: 52.4664 - type: nauc_map_at_100_max value: 39.8303 - type: nauc_map_at_100_std value: 1.5578 - type: nauc_map_at_100_diff1 value: 52.44950000000001 - type: nauc_map_at_1000_max value: 39.827400000000004 - type: nauc_map_at_1000_std value: 1.568 - type: nauc_map_at_1000_diff1 value: 52.452600000000004 - type: nauc_recall_at_1_max value: 39.7113 - type: nauc_recall_at_1_std value: -1.3535 - type: nauc_recall_at_1_diff1 value: 57.7222 - type: nauc_recall_at_3_max value: 40.6926 - type: nauc_recall_at_3_std value: 3.3686000000000003 - type: nauc_recall_at_3_diff1 value: 48.4023 - type: nauc_recall_at_5_max value: 39.8681 - type: nauc_recall_at_5_std value: 5.0524 - type: nauc_recall_at_5_diff1 value: 46.2361 - type: nauc_recall_at_10_max value: 37.6778 - type: nauc_recall_at_10_std value: 6.2486 - type: nauc_recall_at_10_diff1 value: 42.7533 - type: nauc_recall_at_20_max value: 36.9831 - type: nauc_recall_at_20_std value: 8.9021 - type: nauc_recall_at_20_diff1 value: 41.1453 - type: nauc_recall_at_100_max value: 35.6903 - type: nauc_recall_at_100_std value: 15.161 - type: nauc_recall_at_100_diff1 value: 38.1673 - type: nauc_recall_at_1000_max value: 34.2718 - type: nauc_recall_at_1000_std value: 26.3982 - type: nauc_recall_at_1000_diff1 value: 33.3322 - type: nauc_precision_at_1_max value: 39.7113 - type: nauc_precision_at_1_std value: -1.3535 - type: nauc_precision_at_1_diff1 value: 57.7222 - type: nauc_precision_at_3_max value: 40.6926 - type: nauc_precision_at_3_std value: 3.3686000000000003 - type: nauc_precision_at_3_diff1 value: 48.4023 - type: nauc_precision_at_5_max value: 39.8681 - type: nauc_precision_at_5_std value: 5.0524 - type: nauc_precision_at_5_diff1 value: 46.2361 - type: nauc_precision_at_10_max value: 37.6778 - type: nauc_precision_at_10_std value: 6.2486 - type: nauc_precision_at_10_diff1 value: 42.7533 - type: nauc_precision_at_20_max value: 36.9831 - type: nauc_precision_at_20_std value: 8.9021 - type: nauc_precision_at_20_diff1 value: 41.1453 - type: nauc_precision_at_100_max value: 35.6903 - type: nauc_precision_at_100_std value: 15.161 - type: nauc_precision_at_100_diff1 value: 38.1673 - type: nauc_precision_at_1000_max value: 34.2718 - type: nauc_precision_at_1000_std value: 26.3982 - type: nauc_precision_at_1000_diff1 value: 33.3322 - type: nauc_mrr_at_1_max value: 39.6284 - type: nauc_mrr_at_1_std value: -1.345 - type: nauc_mrr_at_1_diff1 value: 57.828 - type: nauc_mrr_at_3_max value: 40.3036 - type: nauc_mrr_at_3_std value: 0.7952000000000001 - type: nauc_mrr_at_3_diff1 value: 53.524499999999996 - type: nauc_mrr_at_5_max value: 40.1366 - type: nauc_mrr_at_5_std value: 1.1708 - type: nauc_mrr_at_5_diff1 value: 53.0405 - type: nauc_mrr_at_10_max value: 39.848 - type: nauc_mrr_at_10_std value: 1.3195000000000001 - type: nauc_mrr_at_10_diff1 value: 52.5868 - type: nauc_mrr_at_20_max value: 39.815400000000004 - type: nauc_mrr_at_20_std value: 1.4787 - type: nauc_mrr_at_20_diff1 value: 52.513299999999994 - type: nauc_mrr_at_100_max value: 39.787299999999995 - type: nauc_mrr_at_100_std value: 1.5699999999999998 - type: nauc_mrr_at_100_diff1 value: 52.496500000000005 - type: nauc_mrr_at_1000_max value: 39.7844 - type: nauc_mrr_at_1000_std value: 1.5803 - type: nauc_mrr_at_1000_diff1 value: 52.4996 - type: main_score value: 39.856 - task: type: Retrieval dataset: name: MTEB COIRCodeSearchNetRetrieval (php) type: CoIR-Retrieval/CodeSearchNet config: php split: test revision: 4adc7bc41202b5c13543c9c886a25f340634dab3 metrics: - type: ndcg_at_1 value: 25.211 - type: ndcg_at_3 value: 31.994 - type: ndcg_at_5 value: 33.986 - type: ndcg_at_10 value: 36.086 - type: ndcg_at_20 value: 37.638 - type: ndcg_at_100 value: 40.268 - type: ndcg_at_1000 value: 42.309999999999995 - type: map_at_1 value: 25.211 - type: map_at_3 value: 30.346 - type: map_at_5 value: 31.452 - type: map_at_10 value: 32.323 - type: map_at_20 value: 32.751000000000005 - type: map_at_100 value: 33.097 - type: map_at_1000 value: 33.165 - type: recall_at_1 value: 25.211 - type: recall_at_3 value: 36.756 - type: recall_at_5 value: 41.587 - type: recall_at_10 value: 48.059000000000005 - type: recall_at_20 value: 54.189 - type: recall_at_100 value: 68.61 - type: recall_at_1000 value: 85.172 - type: precision_at_1 value: 25.211 - type: precision_at_3 value: 12.252 - type: precision_at_5 value: 8.317 - type: precision_at_10 value: 4.806 - type: precision_at_20 value: 2.709 - type: precision_at_100 value: 0.6859999999999999 - type: precision_at_1000 value: 0.08499999999999999 - type: mrr_at_1 value: 25.1962 - type: mrr_at_3 value: 30.335099999999997 - type: mrr_at_5 value: 31.4426 - type: mrr_at_10 value: 32.3121 - type: mrr_at_20 value: 32.741 - type: mrr_at_100 value: 33.0877 - type: mrr_at_1000 value: 33.1558 - type: nauc_ndcg_at_1_max value: 38.358799999999995 - type: nauc_ndcg_at_1_std value: 4.3283000000000005 - type: nauc_ndcg_at_1_diff1 value: 53.33520000000001 - type: nauc_ndcg_at_3_max value: 38.0766 - type: nauc_ndcg_at_3_std value: 6.0852 - type: nauc_ndcg_at_3_diff1 value: 45.5009 - type: nauc_ndcg_at_5_max value: 37.788199999999996 - type: nauc_ndcg_at_5_std value: 7.0073 - type: nauc_ndcg_at_5_diff1 value: 44.3577 - type: nauc_ndcg_at_10_max value: 37.674 - type: nauc_ndcg_at_10_std value: 7.954700000000001 - type: nauc_ndcg_at_10_diff1 value: 43.6869 - type: nauc_ndcg_at_20_max value: 37.4368 - type: nauc_ndcg_at_20_std value: 8.4592 - type: nauc_ndcg_at_20_diff1 value: 43.3112 - type: nauc_ndcg_at_100_max value: 37.5955 - type: nauc_ndcg_at_100_std value: 9.5313 - type: nauc_ndcg_at_100_diff1 value: 42.9187 - type: nauc_ndcg_at_1000_max value: 37.8056 - type: nauc_ndcg_at_1000_std value: 9.7477 - type: nauc_ndcg_at_1000_diff1 value: 43.3862 - type: nauc_map_at_1_max value: 38.358799999999995 - type: nauc_map_at_1_std value: 4.3283000000000005 - type: nauc_map_at_1_diff1 value: 53.33520000000001 - type: nauc_map_at_3_max value: 38.1738 - type: nauc_map_at_3_std value: 5.6814 - type: nauc_map_at_3_diff1 value: 47.229 - type: nauc_map_at_5_max value: 38.005100000000006 - type: nauc_map_at_5_std value: 6.1966 - type: nauc_map_at_5_diff1 value: 46.559200000000004 - type: nauc_map_at_10_max value: 37.9741 - type: nauc_map_at_10_std value: 6.5971 - type: nauc_map_at_10_diff1 value: 46.285 - type: nauc_map_at_20_max value: 37.9009 - type: nauc_map_at_20_std value: 6.7273 - type: nauc_map_at_20_diff1 value: 46.1825 - type: nauc_map_at_100_max value: 37.9135 - type: nauc_map_at_100_std value: 6.8602 - type: nauc_map_at_100_diff1 value: 46.1376 - type: nauc_map_at_1000_max value: 37.918 - type: nauc_map_at_1000_std value: 6.8636 - type: nauc_map_at_1000_diff1 value: 46.1515 - type: nauc_recall_at_1_max value: 38.358799999999995 - type: nauc_recall_at_1_std value: 4.3283000000000005 - type: nauc_recall_at_1_diff1 value: 53.33520000000001 - type: nauc_recall_at_3_max value: 37.7993 - type: nauc_recall_at_3_std value: 7.1854000000000005 - type: nauc_recall_at_3_diff1 value: 40.8217 - type: nauc_recall_at_5_max value: 37.1564 - type: nauc_recall_at_5_std value: 9.3324 - type: nauc_recall_at_5_diff1 value: 38.2991 - type: nauc_recall_at_10_max value: 36.721399999999996 - type: nauc_recall_at_10_std value: 12.1836 - type: nauc_recall_at_10_diff1 value: 36.1617 - type: nauc_recall_at_20_max value: 35.7969 - type: nauc_recall_at_20_std value: 14.4368 - type: nauc_recall_at_20_diff1 value: 34.3383 - type: nauc_recall_at_100_max value: 36.6044 - type: nauc_recall_at_100_std value: 23.055500000000002 - type: nauc_recall_at_100_diff1 value: 29.555500000000002 - type: nauc_recall_at_1000_max value: 39.7315 - type: nauc_recall_at_1000_std value: 38.601600000000005 - type: nauc_recall_at_1000_diff1 value: 26.7047 - type: nauc_precision_at_1_max value: 38.358799999999995 - type: nauc_precision_at_1_std value: 4.3283000000000005 - type: nauc_precision_at_1_diff1 value: 53.33520000000001 - type: nauc_precision_at_3_max value: 37.7993 - type: nauc_precision_at_3_std value: 7.1854000000000005 - type: nauc_precision_at_3_diff1 value: 40.8217 - type: nauc_precision_at_5_max value: 37.1564 - type: nauc_precision_at_5_std value: 9.3324 - type: nauc_precision_at_5_diff1 value: 38.2991 - type: nauc_precision_at_10_max value: 36.721399999999996 - type: nauc_precision_at_10_std value: 12.1836 - type: nauc_precision_at_10_diff1 value: 36.1617 - type: nauc_precision_at_20_max value: 35.7969 - type: nauc_precision_at_20_std value: 14.4368 - type: nauc_precision_at_20_diff1 value: 34.3383 - type: nauc_precision_at_100_max value: 36.6044 - type: nauc_precision_at_100_std value: 23.055500000000002 - type: nauc_precision_at_100_diff1 value: 29.555500000000002 - type: nauc_precision_at_1000_max value: 39.7315 - type: nauc_precision_at_1000_std value: 38.601600000000005 - type: nauc_precision_at_1000_diff1 value: 26.7047 - type: nauc_mrr_at_1_max value: 38.3753 - type: nauc_mrr_at_1_std value: 4.3651 - type: nauc_mrr_at_1_diff1 value: 53.3935 - type: nauc_mrr_at_3_max value: 38.183299999999996 - type: nauc_mrr_at_3_std value: 5.7071 - type: nauc_mrr_at_3_diff1 value: 47.2578 - type: nauc_mrr_at_5_max value: 38.0161 - type: nauc_mrr_at_5_std value: 6.2222 - type: nauc_mrr_at_5_diff1 value: 46.5907 - type: nauc_mrr_at_10_max value: 37.9882 - type: nauc_mrr_at_10_std value: 6.6221000000000005 - type: nauc_mrr_at_10_diff1 value: 46.3178 - type: nauc_mrr_at_20_max value: 37.912 - type: nauc_mrr_at_20_std value: 6.752700000000001 - type: nauc_mrr_at_20_diff1 value: 46.2141 - type: nauc_mrr_at_100_max value: 37.9247 - type: nauc_mrr_at_100_std value: 6.8857 - type: nauc_mrr_at_100_diff1 value: 46.169399999999996 - type: nauc_mrr_at_1000_max value: 37.9292 - type: nauc_mrr_at_1000_std value: 6.889099999999999 - type: nauc_mrr_at_1000_diff1 value: 46.1833 - type: main_score value: 36.086 - task: type: Retrieval dataset: name: MTEB CQADupstackAndroidRetrieval (default) type: mteb/cqadupstack-android config: default split: test revision: f46a197baaae43b4f621051089b82a364682dfeb metrics: - type: ndcg_at_1 value: 41.059 - type: ndcg_at_3 value: 45.091 - type: ndcg_at_5 value: 47.754000000000005 - type: ndcg_at_10 value: 50.403 - type: ndcg_at_20 value: 52.629999999999995 - type: ndcg_at_100 value: 55.669999999999995 - type: ndcg_at_1000 value: 57.645 - type: map_at_1 value: 33.304 - type: map_at_3 value: 40.428999999999995 - type: map_at_5 value: 42.638999999999996 - type: map_at_10 value: 44.239 - type: map_at_20 value: 45.144 - type: map_at_100 value: 45.783 - type: map_at_1000 value: 45.911 - type: recall_at_1 value: 33.304 - type: recall_at_3 value: 46.509 - type: recall_at_5 value: 53.849999999999994 - type: recall_at_10 value: 61.694 - type: recall_at_20 value: 69.708 - type: recall_at_100 value: 83.314 - type: recall_at_1000 value: 95.955 - type: precision_at_1 value: 41.059 - type: precision_at_3 value: 21.316 - type: precision_at_5 value: 15.651000000000002 - type: precision_at_10 value: 9.642000000000001 - type: precision_at_20 value: 5.744 - type: precision_at_100 value: 1.538 - type: precision_at_1000 value: 0.20500000000000002 - type: mrr_at_1 value: 41.058699999999995 - type: mrr_at_3 value: 47.258 - type: mrr_at_5 value: 49.082 - type: mrr_at_10 value: 50.0836 - type: mrr_at_20 value: 50.5221 - type: mrr_at_100 value: 50.8217 - type: mrr_at_1000 value: 50.8713 - type: nauc_ndcg_at_1_max value: 40.6525 - type: nauc_ndcg_at_1_std value: -9.376 - type: nauc_ndcg_at_1_diff1 value: 50.0125 - type: nauc_ndcg_at_3_max value: 40.9809 - type: nauc_ndcg_at_3_std value: -7.1297 - type: nauc_ndcg_at_3_diff1 value: 47.0051 - type: nauc_ndcg_at_5_max value: 40.037800000000004 - type: nauc_ndcg_at_5_std value: -4.3972999999999995 - type: nauc_ndcg_at_5_diff1 value: 45.8909 - type: nauc_ndcg_at_10_max value: 39.939400000000006 - type: nauc_ndcg_at_10_std value: -4.5747 - type: nauc_ndcg_at_10_diff1 value: 45.0088 - type: nauc_ndcg_at_20_max value: 40.144999999999996 - type: nauc_ndcg_at_20_std value: -4.2649 - type: nauc_ndcg_at_20_diff1 value: 45.6565 - type: nauc_ndcg_at_100_max value: 41.2015 - type: nauc_ndcg_at_100_std value: -3.0772 - type: nauc_ndcg_at_100_diff1 value: 45.8564 - type: nauc_ndcg_at_1000_max value: 41.2273 - type: nauc_ndcg_at_1000_std value: -3.8580000000000005 - type: nauc_ndcg_at_1000_diff1 value: 46.0075 - type: nauc_map_at_1_max value: 33.681400000000004 - type: nauc_map_at_1_std value: -10.792499999999999 - type: nauc_map_at_1_diff1 value: 51.6292 - type: nauc_map_at_3_max value: 38.5132 - type: nauc_map_at_3_std value: -9.085899999999999 - type: nauc_map_at_3_diff1 value: 48.516 - type: nauc_map_at_5_max value: 38.7849 - type: nauc_map_at_5_std value: -7.2336 - type: nauc_map_at_5_diff1 value: 47.9868 - type: nauc_map_at_10_max value: 39.3231 - type: nauc_map_at_10_std value: -7.1676 - type: nauc_map_at_10_diff1 value: 47.446 - type: nauc_map_at_20_max value: 39.589 - type: nauc_map_at_20_std value: -6.8943 - type: nauc_map_at_20_diff1 value: 47.4397 - type: nauc_map_at_100_max value: 39.875 - type: nauc_map_at_100_std value: -6.549199999999999 - type: nauc_map_at_100_diff1 value: 47.4459 - type: nauc_map_at_1000_max value: 39.8847 - type: nauc_map_at_1000_std value: -6.5965 - type: nauc_map_at_1000_diff1 value: 47.4298 - type: nauc_recall_at_1_max value: 33.681400000000004 - type: nauc_recall_at_1_std value: -10.792499999999999 - type: nauc_recall_at_1_diff1 value: 51.6292 - type: nauc_recall_at_3_max value: 37.3654 - type: nauc_recall_at_3_std value: -6.1476999999999995 - type: nauc_recall_at_3_diff1 value: 43.147400000000005 - type: nauc_recall_at_5_max value: 35.3328 - type: nauc_recall_at_5_std value: 1.0517 - type: nauc_recall_at_5_diff1 value: 39.7709 - type: nauc_recall_at_10_max value: 34.6109 - type: nauc_recall_at_10_std value: 1.5653000000000001 - type: nauc_recall_at_10_diff1 value: 35.5858 - type: nauc_recall_at_20_max value: 34.2941 - type: nauc_recall_at_20_std value: 3.9570000000000003 - type: nauc_recall_at_20_diff1 value: 36.910199999999996 - type: nauc_recall_at_100_max value: 41.6344 - type: nauc_recall_at_100_std value: 18.614 - type: nauc_recall_at_100_diff1 value: 35.9742 - type: nauc_recall_at_1000_max value: 53.67960000000001 - type: nauc_recall_at_1000_std value: 46.8911 - type: nauc_recall_at_1000_diff1 value: 35.167500000000004 - type: nauc_precision_at_1_max value: 40.6525 - type: nauc_precision_at_1_std value: -9.376 - type: nauc_precision_at_1_diff1 value: 50.0125 - type: nauc_precision_at_3_max value: 40.7269 - type: nauc_precision_at_3_std value: -1.2473 - type: nauc_precision_at_3_diff1 value: 31.521500000000003 - type: nauc_precision_at_5_max value: 34.9193 - type: nauc_precision_at_5_std value: 6.758699999999999 - type: nauc_precision_at_5_diff1 value: 20.958399999999997 - type: nauc_precision_at_10_max value: 29.1675 - type: nauc_precision_at_10_std value: 8.4146 - type: nauc_precision_at_10_diff1 value: 9.517000000000001 - type: nauc_precision_at_20_max value: 23.0603 - type: nauc_precision_at_20_std value: 9.5615 - type: nauc_precision_at_20_diff1 value: 3.3520000000000003 - type: nauc_precision_at_100_max value: 10.3906 - type: nauc_precision_at_100_std value: 8.8378 - type: nauc_precision_at_100_diff1 value: -8.2594 - type: nauc_precision_at_1000_max value: -4.7287 - type: nauc_precision_at_1000_std value: -2.5721000000000003 - type: nauc_precision_at_1000_diff1 value: -19.5341 - type: nauc_mrr_at_1_max value: 40.6525 - type: nauc_mrr_at_1_std value: -9.376 - type: nauc_mrr_at_1_diff1 value: 50.0125 - type: nauc_mrr_at_3_max value: 42.4409 - type: nauc_mrr_at_3_std value: -7.4642 - type: nauc_mrr_at_3_diff1 value: 47.773199999999996 - type: nauc_mrr_at_5_max value: 41.8687 - type: nauc_mrr_at_5_std value: -6.0165999999999995 - type: nauc_mrr_at_5_diff1 value: 46.929500000000004 - type: nauc_mrr_at_10_max value: 41.6607 - type: nauc_mrr_at_10_std value: -5.8776 - type: nauc_mrr_at_10_diff1 value: 46.5117 - type: nauc_mrr_at_20_max value: 41.6088 - type: nauc_mrr_at_20_std value: -6.0403 - type: nauc_mrr_at_20_diff1 value: 46.7355 - type: nauc_mrr_at_100_max value: 41.6881 - type: nauc_mrr_at_100_std value: -6.0445 - type: nauc_mrr_at_100_diff1 value: 46.7504 - type: nauc_mrr_at_1000_max value: 41.6981 - type: nauc_mrr_at_1000_std value: -6.0584 - type: nauc_mrr_at_1000_diff1 value: 46.7686 - type: main_score value: 50.403 - task: type: Retrieval dataset: name: MTEB CQADupstackEnglishRetrieval (default) type: mteb/cqadupstack-english config: default split: test revision: ad9991cb51e31e31e430383c75ffb2885547b5f0 metrics: - type: ndcg_at_1 value: 32.293 - type: ndcg_at_3 value: 35.357 - type: ndcg_at_5 value: 37.135 - type: ndcg_at_10 value: 39.682 - type: ndcg_at_20 value: 41.477000000000004 - type: ndcg_at_100 value: 44.594 - type: ndcg_at_1000 value: 46.938 - type: map_at_1 value: 25.084 - type: map_at_3 value: 31.134 - type: map_at_5 value: 32.693 - type: map_at_10 value: 34.072 - type: map_at_20 value: 34.719 - type: map_at_100 value: 35.327999999999996 - type: map_at_1000 value: 35.461 - type: recall_at_1 value: 25.084 - type: recall_at_3 value: 36.678 - type: recall_at_5 value: 41.839999999999996 - type: recall_at_10 value: 49.782 - type: recall_at_20 value: 56.442 - type: recall_at_100 value: 71.114 - type: recall_at_1000 value: 86.372 - type: precision_at_1 value: 32.293 - type: precision_at_3 value: 17.452 - type: precision_at_5 value: 12.446 - type: precision_at_10 value: 7.758 - type: precision_at_20 value: 4.634 - type: precision_at_100 value: 1.324 - type: precision_at_1000 value: 0.184 - type: mrr_at_1 value: 32.293 - type: mrr_at_3 value: 37.8344 - type: mrr_at_5 value: 39.0223 - type: mrr_at_10 value: 40.1805 - type: mrr_at_20 value: 40.6083 - type: mrr_at_100 value: 40.928799999999995 - type: mrr_at_1000 value: 40.9754 - type: nauc_ndcg_at_1_max value: 45.3161 - type: nauc_ndcg_at_1_std value: 4.444 - type: nauc_ndcg_at_1_diff1 value: 46.0858 - type: nauc_ndcg_at_3_max value: 46.1152 - type: nauc_ndcg_at_3_std value: 3.2603 - type: nauc_ndcg_at_3_diff1 value: 42.6324 - type: nauc_ndcg_at_5_max value: 46.3649 - type: nauc_ndcg_at_5_std value: 2.5442 - type: nauc_ndcg_at_5_diff1 value: 42.9534 - type: nauc_ndcg_at_10_max value: 45.9638 - type: nauc_ndcg_at_10_std value: 3.849 - type: nauc_ndcg_at_10_diff1 value: 42.3058 - type: nauc_ndcg_at_20_max value: 45.6402 - type: nauc_ndcg_at_20_std value: 4.6758 - type: nauc_ndcg_at_20_diff1 value: 41.8551 - type: nauc_ndcg_at_100_max value: 45.7963 - type: nauc_ndcg_at_100_std value: 6.154599999999999 - type: nauc_ndcg_at_100_diff1 value: 41.1414 - type: nauc_ndcg_at_1000_max value: 45.9794 - type: nauc_ndcg_at_1000_std value: 6.9567000000000005 - type: nauc_ndcg_at_1000_diff1 value: 40.8964 - type: nauc_map_at_1_max value: 40.1856 - type: nauc_map_at_1_std value: -4.0307 - type: nauc_map_at_1_diff1 value: 49.675999999999995 - type: nauc_map_at_3_max value: 43.8311 - type: nauc_map_at_3_std value: -1.2912 - type: nauc_map_at_3_diff1 value: 45.9441 - type: nauc_map_at_5_max value: 44.818400000000004 - type: nauc_map_at_5_std value: -0.7452000000000001 - type: nauc_map_at_5_diff1 value: 45.6591 - type: nauc_map_at_10_max value: 44.9988 - type: nauc_map_at_10_std value: 0.41960000000000003 - type: nauc_map_at_10_diff1 value: 45.1582 - type: nauc_map_at_20_max value: 45.0395 - type: nauc_map_at_20_std value: 0.9468000000000001 - type: nauc_map_at_20_diff1 value: 44.890600000000006 - type: nauc_map_at_100_max value: 45.311 - type: nauc_map_at_100_std value: 1.5421 - type: nauc_map_at_100_diff1 value: 44.7203 - type: nauc_map_at_1000_max value: 45.364399999999996 - type: nauc_map_at_1000_std value: 1.6643000000000001 - type: nauc_map_at_1000_diff1 value: 44.6926 - type: nauc_recall_at_1_max value: 40.1856 - type: nauc_recall_at_1_std value: -4.0307 - type: nauc_recall_at_1_diff1 value: 49.675999999999995 - type: nauc_recall_at_3_max value: 43.0698 - type: nauc_recall_at_3_std value: 0.4071 - type: nauc_recall_at_3_diff1 value: 39.6364 - type: nauc_recall_at_5_max value: 44.056200000000004 - type: nauc_recall_at_5_std value: 0.6597000000000001 - type: nauc_recall_at_5_diff1 value: 38.5431 - type: nauc_recall_at_10_max value: 42.5643 - type: nauc_recall_at_10_std value: 5.446899999999999 - type: nauc_recall_at_10_diff1 value: 35.3363 - type: nauc_recall_at_20_max value: 40.9176 - type: nauc_recall_at_20_std value: 8.6434 - type: nauc_recall_at_20_diff1 value: 33.0525 - type: nauc_recall_at_100_max value: 41.2899 - type: nauc_recall_at_100_std value: 17.3979 - type: nauc_recall_at_100_diff1 value: 28.0707 - type: nauc_recall_at_1000_max value: 43.2786 - type: nauc_recall_at_1000_std value: 33.6676 - type: nauc_recall_at_1000_diff1 value: 19.6489 - type: nauc_precision_at_1_max value: 45.3161 - type: nauc_precision_at_1_std value: 4.444 - type: nauc_precision_at_1_diff1 value: 46.0858 - type: nauc_precision_at_3_max value: 45.937400000000004 - type: nauc_precision_at_3_std value: 13.606599999999998 - type: nauc_precision_at_3_diff1 value: 28.8887 - type: nauc_precision_at_5_max value: 43.6409 - type: nauc_precision_at_5_std value: 15.3222 - type: nauc_precision_at_5_diff1 value: 23.5428 - type: nauc_precision_at_10_max value: 38.8973 - type: nauc_precision_at_10_std value: 21.049300000000002 - type: nauc_precision_at_10_diff1 value: 15.912200000000002 - type: nauc_precision_at_20_max value: 33.1485 - type: nauc_precision_at_20_std value: 26.1451 - type: nauc_precision_at_20_diff1 value: 7.7276 - type: nauc_precision_at_100_max value: 24.1577 - type: nauc_precision_at_100_std value: 31.4656 - type: nauc_precision_at_100_diff1 value: -4.0066999999999995 - type: nauc_precision_at_1000_max value: 12.3639 - type: nauc_precision_at_1000_std value: 28.9285 - type: nauc_precision_at_1000_diff1 value: -11.7577 - type: nauc_mrr_at_1_max value: 45.3161 - type: nauc_mrr_at_1_std value: 4.444 - type: nauc_mrr_at_1_diff1 value: 46.0858 - type: nauc_mrr_at_3_max value: 45.9129 - type: nauc_mrr_at_3_std value: 5.743 - type: nauc_mrr_at_3_diff1 value: 41.6507 - type: nauc_mrr_at_5_max value: 45.8273 - type: nauc_mrr_at_5_std value: 5.57 - type: nauc_mrr_at_5_diff1 value: 41.531400000000005 - type: nauc_mrr_at_10_max value: 45.8144 - type: nauc_mrr_at_10_std value: 6.263000000000001 - type: nauc_mrr_at_10_diff1 value: 41.2348 - type: nauc_mrr_at_20_max value: 45.7975 - type: nauc_mrr_at_20_std value: 6.392200000000001 - type: nauc_mrr_at_20_diff1 value: 41.259499999999996 - type: nauc_mrr_at_100_max value: 45.7286 - type: nauc_mrr_at_100_std value: 6.456099999999999 - type: nauc_mrr_at_100_diff1 value: 41.185100000000006 - type: nauc_mrr_at_1000_max value: 45.7325 - type: nauc_mrr_at_1000_std value: 6.4614 - type: nauc_mrr_at_1000_diff1 value: 41.188 - type: main_score value: 39.682 - task: type: Retrieval dataset: name: MTEB CQADupstackGamingRetrieval (default) type: mteb/cqadupstack-gaming config: default split: test revision: 4885aa143210c98657558c04aaf3dc47cfb54340 metrics: - type: ndcg_at_1 value: 41.379 - type: ndcg_at_3 value: 48.789 - type: ndcg_at_5 value: 51.535 - type: ndcg_at_10 value: 53.654999999999994 - type: ndcg_at_20 value: 55.559999999999995 - type: ndcg_at_100 value: 57.911 - type: ndcg_at_1000 value: 59.275 - type: map_at_1 value: 36.224000000000004 - type: map_at_3 value: 45.190999999999995 - type: map_at_5 value: 47.012 - type: map_at_10 value: 48.141 - type: map_at_20 value: 48.802 - type: map_at_100 value: 49.214 - type: map_at_1000 value: 49.278 - type: recall_at_1 value: 36.224000000000004 - type: recall_at_3 value: 53.513 - type: recall_at_5 value: 60.221000000000004 - type: recall_at_10 value: 66.346 - type: recall_at_20 value: 73.359 - type: recall_at_100 value: 84.77 - type: recall_at_1000 value: 94.547 - type: precision_at_1 value: 41.379 - type: precision_at_3 value: 21.902 - type: precision_at_5 value: 15.197 - type: precision_at_10 value: 8.639 - type: precision_at_20 value: 4.887 - type: precision_at_100 value: 1.164 - type: precision_at_1000 value: 0.133 - type: mrr_at_1 value: 41.3793 - type: mrr_at_3 value: 49.0282 - type: mrr_at_5 value: 50.7022 - type: mrr_at_10 value: 51.462399999999995 - type: mrr_at_20 value: 51.9372 - type: mrr_at_100 value: 52.1984 - type: mrr_at_1000 value: 52.2374 - type: nauc_ndcg_at_1_max value: 45.521499999999996 - type: nauc_ndcg_at_1_std value: -3.2632000000000003 - type: nauc_ndcg_at_1_diff1 value: 55.017799999999994 - type: nauc_ndcg_at_3_max value: 43.343399999999995 - type: nauc_ndcg_at_3_std value: -4.4684 - type: nauc_ndcg_at_3_diff1 value: 49.7562 - type: nauc_ndcg_at_5_max value: 44.034600000000005 - type: nauc_ndcg_at_5_std value: -2.8813 - type: nauc_ndcg_at_5_diff1 value: 48.7767 - type: nauc_ndcg_at_10_max value: 45.0674 - type: nauc_ndcg_at_10_std value: -1.332 - type: nauc_ndcg_at_10_diff1 value: 48.448600000000006 - type: nauc_ndcg_at_20_max value: 45.6717 - type: nauc_ndcg_at_20_std value: 0.0107 - type: nauc_ndcg_at_20_diff1 value: 48.6492 - type: nauc_ndcg_at_100_max value: 45.974 - type: nauc_ndcg_at_100_std value: 1.1665999999999999 - type: nauc_ndcg_at_100_diff1 value: 48.9852 - type: nauc_ndcg_at_1000_max value: 46.0653 - type: nauc_ndcg_at_1000_std value: 0.7539 - type: nauc_ndcg_at_1000_diff1 value: 49.453399999999995 - type: nauc_map_at_1_max value: 39.5162 - type: nauc_map_at_1_std value: -4.4784 - type: nauc_map_at_1_diff1 value: 54.076 - type: nauc_map_at_3_max value: 42.022999999999996 - type: nauc_map_at_3_std value: -5.5131 - type: nauc_map_at_3_diff1 value: 50.727199999999996 - type: nauc_map_at_5_max value: 42.700700000000005 - type: nauc_map_at_5_std value: -4.3487 - type: nauc_map_at_5_diff1 value: 50.058499999999995 - type: nauc_map_at_10_max value: 43.4533 - type: nauc_map_at_10_std value: -3.3632000000000004 - type: nauc_map_at_10_diff1 value: 49.8247 - type: nauc_map_at_20_max value: 43.7821 - type: nauc_map_at_20_std value: -2.8057 - type: nauc_map_at_20_diff1 value: 49.8795 - type: nauc_map_at_100_max value: 43.9125 - type: nauc_map_at_100_std value: -2.5162 - type: nauc_map_at_100_diff1 value: 49.9437 - type: nauc_map_at_1000_max value: 43.9371 - type: nauc_map_at_1000_std value: -2.5118 - type: nauc_map_at_1000_diff1 value: 49.973600000000005 - type: nauc_recall_at_1_max value: 39.5162 - type: nauc_recall_at_1_std value: -4.4784 - type: nauc_recall_at_1_diff1 value: 54.076 - type: nauc_recall_at_3_max value: 40.1719 - type: nauc_recall_at_3_std value: -5.8908000000000005 - type: nauc_recall_at_3_diff1 value: 46.1075 - type: nauc_recall_at_5_max value: 41.3221 - type: nauc_recall_at_5_std value: -1.7418 - type: nauc_recall_at_5_diff1 value: 42.4571 - type: nauc_recall_at_10_max value: 44.1382 - type: nauc_recall_at_10_std value: 3.0869 - type: nauc_recall_at_10_diff1 value: 40.6674 - type: nauc_recall_at_20_max value: 47.0264 - type: nauc_recall_at_20_std value: 10.7409 - type: nauc_recall_at_20_diff1 value: 39.8838 - type: nauc_recall_at_100_max value: 49.660700000000006 - type: nauc_recall_at_100_std value: 26.1413 - type: nauc_recall_at_100_diff1 value: 38.1192 - type: nauc_recall_at_1000_max value: 58.9341 - type: nauc_recall_at_1000_std value: 47.4146 - type: nauc_recall_at_1000_diff1 value: 39.7378 - type: nauc_precision_at_1_max value: 45.521499999999996 - type: nauc_precision_at_1_std value: -3.2632000000000003 - type: nauc_precision_at_1_diff1 value: 55.017799999999994 - type: nauc_precision_at_3_max value: 41.9576 - type: nauc_precision_at_3_std value: 0.3431 - type: nauc_precision_at_3_diff1 value: 33.5013 - type: nauc_precision_at_5_max value: 41.024 - type: nauc_precision_at_5_std value: 6.962400000000001 - type: nauc_precision_at_5_diff1 value: 26.0905 - type: nauc_precision_at_10_max value: 38.4505 - type: nauc_precision_at_10_std value: 13.459 - type: nauc_precision_at_10_diff1 value: 18.2984 - type: nauc_precision_at_20_max value: 35.6898 - type: nauc_precision_at_20_std value: 19.7287 - type: nauc_precision_at_20_diff1 value: 12.3455 - type: nauc_precision_at_100_max value: 29.284 - type: nauc_precision_at_100_std value: 26.509100000000004 - type: nauc_precision_at_100_diff1 value: 4.118200000000001 - type: nauc_precision_at_1000_max value: 22.5188 - type: nauc_precision_at_1000_std value: 26.6978 - type: nauc_precision_at_1000_diff1 value: -2.4383 - type: nauc_mrr_at_1_max value: 45.521499999999996 - type: nauc_mrr_at_1_std value: -3.2632000000000003 - type: nauc_mrr_at_1_diff1 value: 55.017799999999994 - type: nauc_mrr_at_3_max value: 45.2583 - type: nauc_mrr_at_3_std value: -4.0796 - type: nauc_mrr_at_3_diff1 value: 51.3842 - type: nauc_mrr_at_5_max value: 45.683099999999996 - type: nauc_mrr_at_5_std value: -3.0403 - type: nauc_mrr_at_5_diff1 value: 50.928 - type: nauc_mrr_at_10_max value: 46.0254 - type: nauc_mrr_at_10_std value: -2.5618 - type: nauc_mrr_at_10_diff1 value: 50.9016 - type: nauc_mrr_at_20_max value: 46.1397 - type: nauc_mrr_at_20_std value: -2.2378 - type: nauc_mrr_at_20_diff1 value: 50.983900000000006 - type: nauc_mrr_at_100_max value: 46.0813 - type: nauc_mrr_at_100_std value: -2.1819 - type: nauc_mrr_at_100_diff1 value: 50.9924 - type: nauc_mrr_at_1000_max value: 46.075700000000005 - type: nauc_mrr_at_1000_std value: -2.2086 - type: nauc_mrr_at_1000_diff1 value: 51.004400000000004 - type: main_score value: 53.654999999999994 - task: type: Retrieval dataset: name: MTEB CQADupstackGisRetrieval (default) type: mteb/cqadupstack-gis config: default split: test revision: 5003b3064772da1887988e05400cf3806fe491f2 metrics: - type: ndcg_at_1 value: 28.701 - type: ndcg_at_3 value: 35.095 - type: ndcg_at_5 value: 37.533 - type: ndcg_at_10 value: 40.224 - type: ndcg_at_20 value: 41.818 - type: ndcg_at_100 value: 44.651999999999994 - type: ndcg_at_1000 value: 47.05 - type: map_at_1 value: 26.251 - type: map_at_3 value: 32.49 - type: map_at_5 value: 33.931 - type: map_at_10 value: 35.154 - type: map_at_20 value: 35.641 - type: map_at_100 value: 36.032 - type: map_at_1000 value: 36.132 - type: recall_at_1 value: 26.251 - type: recall_at_3 value: 39.76 - type: recall_at_5 value: 45.739999999999995 - type: recall_at_10 value: 53.698 - type: recall_at_20 value: 59.48 - type: recall_at_100 value: 74.298 - type: recall_at_1000 value: 92.06299999999999 - type: precision_at_1 value: 28.701 - type: precision_at_3 value: 14.953 - type: precision_at_5 value: 10.328 - type: precision_at_10 value: 6.158 - type: precision_at_20 value: 3.469 - type: precision_at_100 value: 0.886 - type: precision_at_1000 value: 0.11199999999999999 - type: mrr_at_1 value: 28.700599999999998 - type: mrr_at_3 value: 34.9906 - type: mrr_at_5 value: 36.3917 - type: mrr_at_10 value: 37.4735 - type: mrr_at_20 value: 37.896 - type: mrr_at_100 value: 38.229600000000005 - type: mrr_at_1000 value: 38.3107 - type: nauc_ndcg_at_1_max value: 35.5663 - type: nauc_ndcg_at_1_std value: -11.130700000000001 - type: nauc_ndcg_at_1_diff1 value: 47.2971 - type: nauc_ndcg_at_3_max value: 33.591300000000004 - type: nauc_ndcg_at_3_std value: -8.8712 - type: nauc_ndcg_at_3_diff1 value: 43.9366 - type: nauc_ndcg_at_5_max value: 32.8546 - type: nauc_ndcg_at_5_std value: -7.764799999999999 - type: nauc_ndcg_at_5_diff1 value: 42.896699999999996 - type: nauc_ndcg_at_10_max value: 33.8862 - type: nauc_ndcg_at_10_std value: -5.8975 - type: nauc_ndcg_at_10_diff1 value: 42.0493 - type: nauc_ndcg_at_20_max value: 34.4891 - type: nauc_ndcg_at_20_std value: -4.7832 - type: nauc_ndcg_at_20_diff1 value: 41.857499999999995 - type: nauc_ndcg_at_100_max value: 34.2737 - type: nauc_ndcg_at_100_std value: -4.8904000000000005 - type: nauc_ndcg_at_100_diff1 value: 41.3476 - type: nauc_ndcg_at_1000_max value: 34.031800000000004 - type: nauc_ndcg_at_1000_std value: -5.5376 - type: nauc_ndcg_at_1000_diff1 value: 41.8603 - type: nauc_map_at_1_max value: 33.128299999999996 - type: nauc_map_at_1_std value: -12.1157 - type: nauc_map_at_1_diff1 value: 49.8448 - type: nauc_map_at_3_max value: 33.283699999999996 - type: nauc_map_at_3_std value: -9.7518 - type: nauc_map_at_3_diff1 value: 45.4875 - type: nauc_map_at_5_max value: 32.9355 - type: nauc_map_at_5_std value: -9.1755 - type: nauc_map_at_5_diff1 value: 44.8675 - type: nauc_map_at_10_max value: 33.5532 - type: nauc_map_at_10_std value: -8.3763 - type: nauc_map_at_10_diff1 value: 44.670700000000004 - type: nauc_map_at_20_max value: 33.8065 - type: nauc_map_at_20_std value: -8.0253 - type: nauc_map_at_20_diff1 value: 44.5987 - type: nauc_map_at_100_max value: 33.7647 - type: nauc_map_at_100_std value: -8.0399 - type: nauc_map_at_100_diff1 value: 44.5212 - type: nauc_map_at_1000_max value: 33.752700000000004 - type: nauc_map_at_1000_std value: -8.0557 - type: nauc_map_at_1000_diff1 value: 44.5285 - type: nauc_recall_at_1_max value: 33.128299999999996 - type: nauc_recall_at_1_std value: -12.1157 - type: nauc_recall_at_1_diff1 value: 49.8448 - type: nauc_recall_at_3_max value: 31.5403 - type: nauc_recall_at_3_std value: -6.862699999999999 - type: nauc_recall_at_3_diff1 value: 40.4438 - type: nauc_recall_at_5_max value: 29.549300000000002 - type: nauc_recall_at_5_std value: -4.8186 - type: nauc_recall_at_5_diff1 value: 37.7652 - type: nauc_recall_at_10_max value: 32.0106 - type: nauc_recall_at_10_std value: 1.1384999999999998 - type: nauc_recall_at_10_diff1 value: 34.4037 - type: nauc_recall_at_20_max value: 34.1547 - type: nauc_recall_at_20_std value: 6.0514 - type: nauc_recall_at_20_diff1 value: 33.4793 - type: nauc_recall_at_100_max value: 32.610099999999996 - type: nauc_recall_at_100_std value: 9.046899999999999 - type: nauc_recall_at_100_diff1 value: 27.256999999999998 - type: nauc_recall_at_1000_max value: 26.3079 - type: nauc_recall_at_1000_std value: 16.963900000000002 - type: nauc_recall_at_1000_diff1 value: 22.1857 - type: nauc_precision_at_1_max value: 35.5663 - type: nauc_precision_at_1_std value: -11.130700000000001 - type: nauc_precision_at_1_diff1 value: 47.2971 - type: nauc_precision_at_3_max value: 34.8919 - type: nauc_precision_at_3_std value: -4.6598 - type: nauc_precision_at_3_diff1 value: 36.1773 - type: nauc_precision_at_5_max value: 32.9054 - type: nauc_precision_at_5_std value: -2.0126999999999997 - type: nauc_precision_at_5_diff1 value: 32.6994 - type: nauc_precision_at_10_max value: 33.683600000000006 - type: nauc_precision_at_10_std value: 3.2531999999999996 - type: nauc_precision_at_10_diff1 value: 28.099800000000002 - type: nauc_precision_at_20_max value: 33.7297 - type: nauc_precision_at_20_std value: 7.0116 - type: nauc_precision_at_20_diff1 value: 23.663999999999998 - type: nauc_precision_at_100_max value: 26.119300000000003 - type: nauc_precision_at_100_std value: 7.8559 - type: nauc_precision_at_100_diff1 value: 9.9931 - type: nauc_precision_at_1000_max value: 11.0973 - type: nauc_precision_at_1000_std value: 4.6916 - type: nauc_precision_at_1000_diff1 value: -6.2033 - type: nauc_mrr_at_1_max value: 35.5663 - type: nauc_mrr_at_1_std value: -11.130700000000001 - type: nauc_mrr_at_1_diff1 value: 47.2971 - type: nauc_mrr_at_3_max value: 35.0322 - type: nauc_mrr_at_3_std value: -8.6242 - type: nauc_mrr_at_3_diff1 value: 43.435 - type: nauc_mrr_at_5_max value: 34.796899999999994 - type: nauc_mrr_at_5_std value: -8.1215 - type: nauc_mrr_at_5_diff1 value: 42.9234 - type: nauc_mrr_at_10_max value: 35.0315 - type: nauc_mrr_at_10_std value: -7.4498 - type: nauc_mrr_at_10_diff1 value: 42.348 - type: nauc_mrr_at_20_max value: 35.0761 - type: nauc_mrr_at_20_std value: -7.246700000000001 - type: nauc_mrr_at_20_diff1 value: 42.3282 - type: nauc_mrr_at_100_max value: 35.0173 - type: nauc_mrr_at_100_std value: -7.269699999999999 - type: nauc_mrr_at_100_diff1 value: 42.306 - type: nauc_mrr_at_1000_max value: 35.015 - type: nauc_mrr_at_1000_std value: -7.2973 - type: nauc_mrr_at_1000_diff1 value: 42.3292 - type: main_score value: 40.224 - task: type: Retrieval dataset: name: MTEB CQADupstackMathematicaRetrieval (default) type: mteb/cqadupstack-mathematica config: default split: test revision: 90fceea13679c63fe563ded68f3b6f06e50061de metrics: - type: ndcg_at_1 value: 20.398 - type: ndcg_at_3 value: 25.701 - type: ndcg_at_5 value: 27.503 - type: ndcg_at_10 value: 30.016 - type: ndcg_at_20 value: 31.941000000000003 - type: ndcg_at_100 value: 35.995 - type: ndcg_at_1000 value: 38.732 - type: map_at_1 value: 15.827 - type: map_at_3 value: 22.185 - type: map_at_5 value: 23.398 - type: map_at_10 value: 24.576 - type: map_at_20 value: 25.158 - type: map_at_100 value: 25.790000000000003 - type: map_at_1000 value: 25.906000000000002 - type: recall_at_1 value: 15.827 - type: recall_at_3 value: 29.404000000000003 - type: recall_at_5 value: 34.408 - type: recall_at_10 value: 41.802 - type: recall_at_20 value: 48.775 - type: recall_at_100 value: 68.643 - type: recall_at_1000 value: 88.022 - type: precision_at_1 value: 20.398 - type: precision_at_3 value: 12.769 - type: precision_at_5 value: 9.030000000000001 - type: precision_at_10 value: 5.684 - type: precision_at_20 value: 3.408 - type: precision_at_100 value: 1.004 - type: precision_at_1000 value: 0.13699999999999998 - type: mrr_at_1 value: 20.398 - type: mrr_at_3 value: 27.1144 - type: mrr_at_5 value: 28.4453 - type: mrr_at_10 value: 29.5935 - type: mrr_at_20 value: 30.0591 - type: mrr_at_100 value: 30.516399999999997 - type: mrr_at_1000 value: 30.5831 - type: nauc_ndcg_at_1_max value: 26.8727 - type: nauc_ndcg_at_1_std value: -2.0329 - type: nauc_ndcg_at_1_diff1 value: 28.792099999999998 - type: nauc_ndcg_at_3_max value: 29.210900000000002 - type: nauc_ndcg_at_3_std value: 1.357 - type: nauc_ndcg_at_3_diff1 value: 25.153399999999998 - type: nauc_ndcg_at_5_max value: 28.031499999999998 - type: nauc_ndcg_at_5_std value: 1.546 - type: nauc_ndcg_at_5_diff1 value: 23.6489 - type: nauc_ndcg_at_10_max value: 27.2909 - type: nauc_ndcg_at_10_std value: 1.8301 - type: nauc_ndcg_at_10_diff1 value: 21.7899 - type: nauc_ndcg_at_20_max value: 27.934900000000003 - type: nauc_ndcg_at_20_std value: 2.3472 - type: nauc_ndcg_at_20_diff1 value: 22.322 - type: nauc_ndcg_at_100_max value: 28.1958 - type: nauc_ndcg_at_100_std value: 3.5208000000000004 - type: nauc_ndcg_at_100_diff1 value: 23.156499999999998 - type: nauc_ndcg_at_1000_max value: 28.766000000000002 - type: nauc_ndcg_at_1000_std value: 3.4803 - type: nauc_ndcg_at_1000_diff1 value: 23.096600000000002 - type: nauc_map_at_1_max value: 26.271099999999997 - type: nauc_map_at_1_std value: -0.8499 - type: nauc_map_at_1_diff1 value: 32.0953 - type: nauc_map_at_3_max value: 28.1188 - type: nauc_map_at_3_std value: 0.42040000000000005 - type: nauc_map_at_3_diff1 value: 26.6573 - type: nauc_map_at_5_max value: 27.5138 - type: nauc_map_at_5_std value: 0.43010000000000004 - type: nauc_map_at_5_diff1 value: 25.6081 - type: nauc_map_at_10_max value: 27.313900000000004 - type: nauc_map_at_10_std value: 0.644 - type: nauc_map_at_10_diff1 value: 24.6459 - type: nauc_map_at_20_max value: 27.5519 - type: nauc_map_at_20_std value: 0.7802 - type: nauc_map_at_20_diff1 value: 24.7392 - type: nauc_map_at_100_max value: 27.717999999999996 - type: nauc_map_at_100_std value: 1.078 - type: nauc_map_at_100_diff1 value: 24.884500000000003 - type: nauc_map_at_1000_max value: 27.7366 - type: nauc_map_at_1000_std value: 1.0739 - type: nauc_map_at_1000_diff1 value: 24.9131 - type: nauc_recall_at_1_max value: 26.271099999999997 - type: nauc_recall_at_1_std value: -0.8499 - type: nauc_recall_at_1_diff1 value: 32.0953 - type: nauc_recall_at_3_max value: 28.034399999999998 - type: nauc_recall_at_3_std value: 2.7848 - type: nauc_recall_at_3_diff1 value: 21.845 - type: nauc_recall_at_5_max value: 25.510899999999996 - type: nauc_recall_at_5_std value: 3.2032 - type: nauc_recall_at_5_diff1 value: 18.1497 - type: nauc_recall_at_10_max value: 23.6985 - type: nauc_recall_at_10_std value: 4.2382 - type: nauc_recall_at_10_diff1 value: 13.4018 - type: nauc_recall_at_20_max value: 25.0105 - type: nauc_recall_at_20_std value: 6.2892 - type: nauc_recall_at_20_diff1 value: 14.6347 - type: nauc_recall_at_100_max value: 23.6484 - type: nauc_recall_at_100_std value: 12.826299999999998 - type: nauc_recall_at_100_diff1 value: 16.372999999999998 - type: nauc_recall_at_1000_max value: 34.1999 - type: nauc_recall_at_1000_std value: 26.1497 - type: nauc_recall_at_1000_diff1 value: 7.666199999999999 - type: nauc_precision_at_1_max value: 26.8727 - type: nauc_precision_at_1_std value: -2.0329 - type: nauc_precision_at_1_diff1 value: 28.792099999999998 - type: nauc_precision_at_3_max value: 31.689 - type: nauc_precision_at_3_std value: 4.5703000000000005 - type: nauc_precision_at_3_diff1 value: 20.0233 - type: nauc_precision_at_5_max value: 27.807 - type: nauc_precision_at_5_std value: 4.209899999999999 - type: nauc_precision_at_5_diff1 value: 15.3505 - type: nauc_precision_at_10_max value: 22.672800000000002 - type: nauc_precision_at_10_std value: 3.624 - type: nauc_precision_at_10_diff1 value: 8.4378 - type: nauc_precision_at_20_max value: 23.3401 - type: nauc_precision_at_20_std value: 3.6032 - type: nauc_precision_at_20_diff1 value: 9.2764 - type: nauc_precision_at_100_max value: 16.516000000000002 - type: nauc_precision_at_100_std value: 5.7479000000000005 - type: nauc_precision_at_100_diff1 value: 5.733499999999999 - type: nauc_precision_at_1000_max value: 6.1677 - type: nauc_precision_at_1000_std value: 0.4491 - type: nauc_precision_at_1000_diff1 value: 0.2477 - type: nauc_mrr_at_1_max value: 26.8727 - type: nauc_mrr_at_1_std value: -2.0329 - type: nauc_mrr_at_1_diff1 value: 28.792099999999998 - type: nauc_mrr_at_3_max value: 29.6131 - type: nauc_mrr_at_3_std value: 0.6053000000000001 - type: nauc_mrr_at_3_diff1 value: 25.8043 - type: nauc_mrr_at_5_max value: 29.0205 - type: nauc_mrr_at_5_std value: 0.8692 - type: nauc_mrr_at_5_diff1 value: 24.8413 - type: nauc_mrr_at_10_max value: 28.459400000000002 - type: nauc_mrr_at_10_std value: 0.5887 - type: nauc_mrr_at_10_diff1 value: 24.364 - type: nauc_mrr_at_20_max value: 28.5242 - type: nauc_mrr_at_20_std value: 0.6396 - type: nauc_mrr_at_20_diff1 value: 24.4579 - type: nauc_mrr_at_100_max value: 28.540599999999998 - type: nauc_mrr_at_100_std value: 0.7425 - type: nauc_mrr_at_100_diff1 value: 24.5761 - type: nauc_mrr_at_1000_max value: 28.5429 - type: nauc_mrr_at_1000_std value: 0.7348 - type: nauc_mrr_at_1000_diff1 value: 24.562800000000003 - type: main_score value: 30.016 - task: type: Retrieval dataset: name: MTEB CQADupstackPhysicsRetrieval (default) type: mteb/cqadupstack-physics config: default split: test revision: 79531abbd1fb92d06c6d6315a0cbbbf5bb247ea4 metrics: - type: ndcg_at_1 value: 34.937000000000005 - type: ndcg_at_3 value: 39.366 - type: ndcg_at_5 value: 41.980000000000004 - type: ndcg_at_10 value: 44.674 - type: ndcg_at_20 value: 46.671 - type: ndcg_at_100 value: 50.041999999999994 - type: ndcg_at_1000 value: 52.120999999999995 - type: map_at_1 value: 27.750000000000004 - type: map_at_3 value: 35.027 - type: map_at_5 value: 36.952 - type: map_at_10 value: 38.382 - type: map_at_20 value: 39.072 - type: map_at_100 value: 39.694 - type: map_at_1000 value: 39.81 - type: recall_at_1 value: 27.750000000000004 - type: recall_at_3 value: 42.321999999999996 - type: recall_at_5 value: 49.209 - type: recall_at_10 value: 57.282 - type: recall_at_20 value: 64.30399999999999 - type: recall_at_100 value: 80.143 - type: recall_at_1000 value: 93.664 - type: precision_at_1 value: 34.937000000000005 - type: precision_at_3 value: 18.993 - type: precision_at_5 value: 13.648 - type: precision_at_10 value: 8.412 - type: precision_at_20 value: 4.885 - type: precision_at_100 value: 1.302 - type: precision_at_1000 value: 0.167 - type: mrr_at_1 value: 34.937400000000004 - type: mrr_at_3 value: 41.7389 - type: mrr_at_5 value: 43.4184 - type: mrr_at_10 value: 44.4776 - type: mrr_at_20 value: 44.8859 - type: mrr_at_100 value: 45.2197 - type: mrr_at_1000 value: 45.2704 - type: nauc_ndcg_at_1_max value: 41.1314 - type: nauc_ndcg_at_1_std value: 0.6393 - type: nauc_ndcg_at_1_diff1 value: 52.494 - type: nauc_ndcg_at_3_max value: 38.8915 - type: nauc_ndcg_at_3_std value: -1.1358 - type: nauc_ndcg_at_3_diff1 value: 48.8256 - type: nauc_ndcg_at_5_max value: 38.6924 - type: nauc_ndcg_at_5_std value: -2.2843999999999998 - type: nauc_ndcg_at_5_diff1 value: 47.9194 - type: nauc_ndcg_at_10_max value: 37.8751 - type: nauc_ndcg_at_10_std value: -1.5187000000000002 - type: nauc_ndcg_at_10_diff1 value: 46.455400000000004 - type: nauc_ndcg_at_20_max value: 38.1022 - type: nauc_ndcg_at_20_std value: -0.7692 - type: nauc_ndcg_at_20_diff1 value: 46.5041 - type: nauc_ndcg_at_100_max value: 40.396100000000004 - type: nauc_ndcg_at_100_std value: 1.8087 - type: nauc_ndcg_at_100_diff1 value: 47.2332 - type: nauc_ndcg_at_1000_max value: 40.2539 - type: nauc_ndcg_at_1000_std value: 2.1609 - type: nauc_ndcg_at_1000_diff1 value: 47.185700000000004 - type: nauc_map_at_1_max value: 34.3255 - type: nauc_map_at_1_std value: -6.783599999999999 - type: nauc_map_at_1_diff1 value: 54.6668 - type: nauc_map_at_3_max value: 36.5777 - type: nauc_map_at_3_std value: -3.8482000000000003 - type: nauc_map_at_3_diff1 value: 50.1703 - type: nauc_map_at_5_max value: 37.229 - type: nauc_map_at_5_std value: -3.9170000000000003 - type: nauc_map_at_5_diff1 value: 49.5882 - type: nauc_map_at_10_max value: 37.318400000000004 - type: nauc_map_at_10_std value: -3.2477 - type: nauc_map_at_10_diff1 value: 48.8387 - type: nauc_map_at_20_max value: 37.5075 - type: nauc_map_at_20_std value: -2.8737 - type: nauc_map_at_20_diff1 value: 48.896699999999996 - type: nauc_map_at_100_max value: 37.965199999999996 - type: nauc_map_at_100_std value: -2.3644 - type: nauc_map_at_100_diff1 value: 48.9583 - type: nauc_map_at_1000_max value: 37.9824 - type: nauc_map_at_1000_std value: -2.2945 - type: nauc_map_at_1000_diff1 value: 48.9472 - type: nauc_recall_at_1_max value: 34.3255 - type: nauc_recall_at_1_std value: -6.783599999999999 - type: nauc_recall_at_1_diff1 value: 54.6668 - type: nauc_recall_at_3_max value: 33.823100000000004 - type: nauc_recall_at_3_std value: -3.7593 - type: nauc_recall_at_3_diff1 value: 44.3225 - type: nauc_recall_at_5_max value: 34.271499999999996 - type: nauc_recall_at_5_std value: -4.8704 - type: nauc_recall_at_5_diff1 value: 41.3594 - type: nauc_recall_at_10_max value: 32.2652 - type: nauc_recall_at_10_std value: -1.5755000000000001 - type: nauc_recall_at_10_diff1 value: 35.9057 - type: nauc_recall_at_20_max value: 32.1614 - type: nauc_recall_at_20_std value: 0.8789 - type: nauc_recall_at_20_diff1 value: 34.6074 - type: nauc_recall_at_100_max value: 44.527499999999996 - type: nauc_recall_at_100_std value: 17.735500000000002 - type: nauc_recall_at_100_diff1 value: 36.446 - type: nauc_recall_at_1000_max value: 47.751 - type: nauc_recall_at_1000_std value: 41.8399 - type: nauc_recall_at_1000_diff1 value: 26.7075 - type: nauc_precision_at_1_max value: 41.1314 - type: nauc_precision_at_1_std value: 0.6393 - type: nauc_precision_at_1_diff1 value: 52.494 - type: nauc_precision_at_3_max value: 40.7504 - type: nauc_precision_at_3_std value: 8.6914 - type: nauc_precision_at_3_diff1 value: 34.590900000000005 - type: nauc_precision_at_5_max value: 38.5891 - type: nauc_precision_at_5_std value: 8.7898 - type: nauc_precision_at_5_diff1 value: 27.122200000000003 - type: nauc_precision_at_10_max value: 32.5422 - type: nauc_precision_at_10_std value: 13.9757 - type: nauc_precision_at_10_diff1 value: 15.504000000000001 - type: nauc_precision_at_20_max value: 28.212799999999998 - type: nauc_precision_at_20_std value: 17.0921 - type: nauc_precision_at_20_diff1 value: 10.264800000000001 - type: nauc_precision_at_100_max value: 23.9818 - type: nauc_precision_at_100_std value: 24.7802 - type: nauc_precision_at_100_diff1 value: -0.1275 - type: nauc_precision_at_1000_max value: 11.8968 - type: nauc_precision_at_1000_std value: 24.0201 - type: nauc_precision_at_1000_diff1 value: -12.1507 - type: nauc_mrr_at_1_max value: 41.1314 - type: nauc_mrr_at_1_std value: 0.6393 - type: nauc_mrr_at_1_diff1 value: 52.494 - type: nauc_mrr_at_3_max value: 41.0145 - type: nauc_mrr_at_3_std value: 1.7641 - type: nauc_mrr_at_3_diff1 value: 49.3663 - type: nauc_mrr_at_5_max value: 41.4664 - type: nauc_mrr_at_5_std value: 1.6695000000000002 - type: nauc_mrr_at_5_diff1 value: 49.0033 - type: nauc_mrr_at_10_max value: 41.2351 - type: nauc_mrr_at_10_std value: 2.0388 - type: nauc_mrr_at_10_diff1 value: 48.7703 - type: nauc_mrr_at_20_max value: 41.2064 - type: nauc_mrr_at_20_std value: 2.081 - type: nauc_mrr_at_20_diff1 value: 48.6787 - type: nauc_mrr_at_100_max value: 41.3966 - type: nauc_mrr_at_100_std value: 2.2723 - type: nauc_mrr_at_100_diff1 value: 48.746 - type: nauc_mrr_at_1000_max value: 41.3803 - type: nauc_mrr_at_1000_std value: 2.2632 - type: nauc_mrr_at_1000_diff1 value: 48.7541 - type: main_score value: 44.674 - task: type: Retrieval dataset: name: MTEB CQADupstackProgrammersRetrieval (default) type: mteb/cqadupstack-programmers config: default split: test revision: 6184bc1440d2dbc7612be22b50686b8826d22b32 metrics: - type: ndcg_at_1 value: 29.909000000000002 - type: ndcg_at_3 value: 35.056 - type: ndcg_at_5 value: 37.076 - type: ndcg_at_10 value: 40.093 - type: ndcg_at_20 value: 42.254999999999995 - type: ndcg_at_100 value: 45.692 - type: ndcg_at_1000 value: 48.204 - type: map_at_1 value: 24.68 - type: map_at_3 value: 31.379 - type: map_at_5 value: 32.92 - type: map_at_10 value: 34.391 - type: map_at_20 value: 35.109 - type: map_at_100 value: 35.686 - type: map_at_1000 value: 35.804 - type: recall_at_1 value: 24.68 - type: recall_at_3 value: 38.190000000000005 - type: recall_at_5 value: 43.519999999999996 - type: recall_at_10 value: 52.364999999999995 - type: recall_at_20 value: 60.02499999999999 - type: recall_at_100 value: 76.229 - type: recall_at_1000 value: 93.31099999999999 - type: precision_at_1 value: 29.909000000000002 - type: precision_at_3 value: 16.667 - type: precision_at_5 value: 11.781 - type: precision_at_10 value: 7.340000000000001 - type: precision_at_20 value: 4.315 - type: precision_at_100 value: 1.18 - type: precision_at_1000 value: 0.158 - type: mrr_at_1 value: 29.9087 - type: mrr_at_3 value: 36.6438 - type: mrr_at_5 value: 37.939499999999995 - type: mrr_at_10 value: 39.1699 - type: mrr_at_20 value: 39.6872 - type: mrr_at_100 value: 40.0648 - type: mrr_at_1000 value: 40.1254 - type: nauc_ndcg_at_1_max value: 37.3397 - type: nauc_ndcg_at_1_std value: 5.9699 - type: nauc_ndcg_at_1_diff1 value: 46.6563 - type: nauc_ndcg_at_3_max value: 39.0153 - type: nauc_ndcg_at_3_std value: 8.5756 - type: nauc_ndcg_at_3_diff1 value: 41.2988 - type: nauc_ndcg_at_5_max value: 39.4932 - type: nauc_ndcg_at_5_std value: 9.4963 - type: nauc_ndcg_at_5_diff1 value: 40.0798 - type: nauc_ndcg_at_10_max value: 40.0787 - type: nauc_ndcg_at_10_std value: 10.312100000000001 - type: nauc_ndcg_at_10_diff1 value: 39.6584 - type: nauc_ndcg_at_20_max value: 40.9003 - type: nauc_ndcg_at_20_std value: 11.991100000000001 - type: nauc_ndcg_at_20_diff1 value: 39.4373 - type: nauc_ndcg_at_100_max value: 41.4069 - type: nauc_ndcg_at_100_std value: 13.6103 - type: nauc_ndcg_at_100_diff1 value: 40.0088 - type: nauc_ndcg_at_1000_max value: 41.505900000000004 - type: nauc_ndcg_at_1000_std value: 12.742400000000002 - type: nauc_ndcg_at_1000_diff1 value: 40.1457 - type: nauc_map_at_1_max value: 34.739 - type: nauc_map_at_1_std value: 0.9294 - type: nauc_map_at_1_diff1 value: 48.1138 - type: nauc_map_at_3_max value: 37.0441 - type: nauc_map_at_3_std value: 5.5666 - type: nauc_map_at_3_diff1 value: 42.7429 - type: nauc_map_at_5_max value: 37.891799999999996 - type: nauc_map_at_5_std value: 6.7185999999999995 - type: nauc_map_at_5_diff1 value: 41.9849 - type: nauc_map_at_10_max value: 38.556000000000004 - type: nauc_map_at_10_std value: 7.4627 - type: nauc_map_at_10_diff1 value: 41.8061 - type: nauc_map_at_20_max value: 38.8822 - type: nauc_map_at_20_std value: 8.0747 - type: nauc_map_at_20_diff1 value: 41.7518 - type: nauc_map_at_100_max value: 39.0912 - type: nauc_map_at_100_std value: 8.4627 - type: nauc_map_at_100_diff1 value: 41.8958 - type: nauc_map_at_1000_max value: 39.112700000000004 - type: nauc_map_at_1000_std value: 8.4459 - type: nauc_map_at_1000_diff1 value: 41.903400000000005 - type: nauc_recall_at_1_max value: 34.739 - type: nauc_recall_at_1_std value: 0.9294 - type: nauc_recall_at_1_diff1 value: 48.1138 - type: nauc_recall_at_3_max value: 37.3971 - type: nauc_recall_at_3_std value: 9.2075 - type: nauc_recall_at_3_diff1 value: 36.4624 - type: nauc_recall_at_5_max value: 38.1516 - type: nauc_recall_at_5_std value: 11.5318 - type: nauc_recall_at_5_diff1 value: 33.3421 - type: nauc_recall_at_10_max value: 38.8221 - type: nauc_recall_at_10_std value: 14.0268 - type: nauc_recall_at_10_diff1 value: 31.4088 - type: nauc_recall_at_20_max value: 40.9493 - type: nauc_recall_at_20_std value: 20.2136 - type: nauc_recall_at_20_diff1 value: 29.9447 - type: nauc_recall_at_100_max value: 43.149300000000004 - type: nauc_recall_at_100_std value: 33.7709 - type: nauc_recall_at_100_diff1 value: 29.3082 - type: nauc_recall_at_1000_max value: 55.435500000000005 - type: nauc_recall_at_1000_std value: 51.8958 - type: nauc_recall_at_1000_diff1 value: 19.3816 - type: nauc_precision_at_1_max value: 37.3397 - type: nauc_precision_at_1_std value: 5.9699 - type: nauc_precision_at_1_diff1 value: 46.6563 - type: nauc_precision_at_3_max value: 40.3693 - type: nauc_precision_at_3_std value: 17.0552 - type: nauc_precision_at_3_diff1 value: 29.498400000000004 - type: nauc_precision_at_5_max value: 39.7607 - type: nauc_precision_at_5_std value: 20.274 - type: nauc_precision_at_5_diff1 value: 23.061300000000003 - type: nauc_precision_at_10_max value: 38.0299 - type: nauc_precision_at_10_std value: 22.256899999999998 - type: nauc_precision_at_10_diff1 value: 17.0507 - type: nauc_precision_at_20_max value: 36.0867 - type: nauc_precision_at_20_std value: 25.936700000000002 - type: nauc_precision_at_20_diff1 value: 12.1754 - type: nauc_precision_at_100_max value: 24.1493 - type: nauc_precision_at_100_std value: 23.8361 - type: nauc_precision_at_100_diff1 value: 5.2714 - type: nauc_precision_at_1000_max value: 7.033499999999999 - type: nauc_precision_at_1000_std value: 9.0198 - type: nauc_precision_at_1000_diff1 value: -4.8427999999999995 - type: nauc_mrr_at_1_max value: 37.3397 - type: nauc_mrr_at_1_std value: 5.9699 - type: nauc_mrr_at_1_diff1 value: 46.6563 - type: nauc_mrr_at_3_max value: 40.2205 - type: nauc_mrr_at_3_std value: 9.8833 - type: nauc_mrr_at_3_diff1 value: 42.3963 - type: nauc_mrr_at_5_max value: 40.1911 - type: nauc_mrr_at_5_std value: 10.3282 - type: nauc_mrr_at_5_diff1 value: 41.796499999999995 - type: nauc_mrr_at_10_max value: 40.3748 - type: nauc_mrr_at_10_std value: 10.567699999999999 - type: nauc_mrr_at_10_diff1 value: 41.643299999999996 - type: nauc_mrr_at_20_max value: 40.4527 - type: nauc_mrr_at_20_std value: 10.8016 - type: nauc_mrr_at_20_diff1 value: 41.594300000000004 - type: nauc_mrr_at_100_max value: 40.395199999999996 - type: nauc_mrr_at_100_std value: 10.8396 - type: nauc_mrr_at_100_diff1 value: 41.706700000000005 - type: nauc_mrr_at_1000_max value: 40.3932 - type: nauc_mrr_at_1000_std value: 10.8097 - type: nauc_mrr_at_1000_diff1 value: 41.7124 - type: main_score value: 40.093 - task: type: Retrieval dataset: name: MTEB CQADupstackRetrieval (default) type: CQADupstackRetrieval_is_a_combined_dataset config: default split: test revision: 160c094312a0e1facb97e55eeddb698c0abe3571 metrics: - type: ndcg_at_1 value: 30.19958333333333 - type: ndcg_at_3 value: 35.01541666666667 - type: ndcg_at_5 value: 37.22058333333334 - type: ndcg_at_10 value: 39.84525000000001 - type: ndcg_at_20 value: 41.81666666666667 - type: ndcg_at_100 value: 44.973 - type: ndcg_at_1000 value: 47.338583333333325 - type: map_at_1 value: 25.296916666666668 - type: map_at_3 value: 31.593166666666665 - type: map_at_5 value: 33.145916666666665 - type: map_at_10 value: 34.45275 - type: map_at_20 value: 35.10883333333334 - type: map_at_100 value: 35.647499999999994 - type: map_at_1000 value: 35.768166666666666 - type: recall_at_1 value: 25.296916666666668 - type: recall_at_3 value: 38.05166666666666 - type: recall_at_5 value: 43.82625 - type: recall_at_10 value: 51.58916666666668 - type: recall_at_20 value: 58.77308333333334 - type: recall_at_100 value: 74.15658333333333 - type: recall_at_1000 value: 90.51333333333335 - type: precision_at_1 value: 30.19958333333333 - type: precision_at_3 value: 16.167999999999996 - type: precision_at_5 value: 11.49225 - type: precision_at_10 value: 7.057666666666666 - type: precision_at_20 value: 4.174083333333333 - type: precision_at_100 value: 1.1363333333333332 - type: precision_at_1000 value: 0.15383333333333332 - type: mrr_at_1 value: 30.199658333333335 - type: mrr_at_3 value: 36.21564166666667 - type: mrr_at_5 value: 37.627291666666665 - type: mrr_at_10 value: 38.70535 - type: mrr_at_20 value: 39.193799999999996 - type: mrr_at_100 value: 39.55041666666666 - type: mrr_at_1000 value: 39.61140833333333 - type: nauc_ndcg_at_1_max value: 39.3715 - type: nauc_ndcg_at_1_std value: -1.2167000000000008 - type: nauc_ndcg_at_1_diff1 value: 47.05770833333333 - type: nauc_ndcg_at_3_max value: 38.67278333333333 - type: nauc_ndcg_at_3_std value: -0.10360000000000005 - type: nauc_ndcg_at_3_diff1 value: 42.23506666666667 - type: nauc_ndcg_at_5_max value: 38.421591666666664 - type: nauc_ndcg_at_5_std value: 0.9004833333333335 - type: nauc_ndcg_at_5_diff1 value: 41.46895 - type: nauc_ndcg_at_10_max value: 38.31713333333333 - type: nauc_ndcg_at_10_std value: 1.6739333333333335 - type: nauc_ndcg_at_10_diff1 value: 40.52259166666667 - type: nauc_ndcg_at_20_max value: 38.61266666666667 - type: nauc_ndcg_at_20_std value: 2.7783666666666673 - type: nauc_ndcg_at_20_diff1 value: 40.28085833333333 - type: nauc_ndcg_at_100_max value: 39.27558333333333 - type: nauc_ndcg_at_100_std value: 3.9398000000000004 - type: nauc_ndcg_at_100_diff1 value: 40.39787499999999 - type: nauc_ndcg_at_1000_max value: 39.44075 - type: nauc_ndcg_at_1000_std value: 3.9607833333333327 - type: nauc_ndcg_at_1000_diff1 value: 40.683225 - type: nauc_map_at_1_max value: 35.66645 - type: nauc_map_at_1_std value: -4.276391666666667 - type: nauc_map_at_1_diff1 value: 48.810141666666674 - type: nauc_map_at_3_max value: 37.424108333333336 - type: nauc_map_at_3_std value: -2.064866666666667 - type: nauc_map_at_3_diff1 value: 44.115075 - type: nauc_map_at_5_max value: 37.693016666666665 - type: nauc_map_at_5_std value: -1.1872749999999994 - type: nauc_map_at_5_diff1 value: 43.554458333333336 - type: nauc_map_at_10_max value: 37.9333 - type: nauc_map_at_10_std value: -0.6246583333333332 - type: nauc_map_at_10_diff1 value: 43.05175 - type: nauc_map_at_20_max value: 38.11316666666667 - type: nauc_map_at_20_std value: -0.17139166666666622 - type: nauc_map_at_20_diff1 value: 42.929925000000004 - type: nauc_map_at_100_max value: 38.296825 - type: nauc_map_at_100_std value: 0.1448500000000002 - type: nauc_map_at_100_diff1 value: 42.91681666666667 - type: nauc_map_at_1000_max value: 38.308891666666675 - type: nauc_map_at_1000_std value: 0.17599166666666682 - type: nauc_map_at_1000_diff1 value: 42.91478333333333 - type: nauc_recall_at_1_max value: 35.66645 - type: nauc_recall_at_1_std value: -4.276391666666667 - type: nauc_recall_at_1_diff1 value: 48.810141666666674 - type: nauc_recall_at_3_max value: 36.144949999999994 - type: nauc_recall_at_3_std value: -0.07622500000000007 - type: nauc_recall_at_3_diff1 value: 38.39805833333333 - type: nauc_recall_at_5_max value: 35.599016666666664 - type: nauc_recall_at_5_std value: 2.6147583333333335 - type: nauc_recall_at_5_diff1 value: 35.84809166666666 - type: nauc_recall_at_10_max value: 34.73115833333333 - type: nauc_recall_at_10_std value: 5.2187166666666664 - type: nauc_recall_at_10_diff1 value: 32.22850833333333 - type: nauc_recall_at_20_max value: 35.11221666666667 - type: nauc_recall_at_20_std value: 9.564958333333331 - type: nauc_recall_at_20_diff1 value: 30.415991666666663 - type: nauc_recall_at_100_max value: 37.735958333333336 - type: nauc_recall_at_100_std value: 19.1386 - type: nauc_recall_at_100_diff1 value: 28.129675 - type: nauc_recall_at_1000_max value: 43.71879166666667 - type: nauc_recall_at_1000_std value: 39.80074166666667 - type: nauc_recall_at_1000_diff1 value: 23.800666666666668 - type: nauc_precision_at_1_max value: 39.3715 - type: nauc_precision_at_1_std value: -1.2167000000000008 - type: nauc_precision_at_1_diff1 value: 47.05770833333333 - type: nauc_precision_at_3_max value: 39.00785833333333 - type: nauc_precision_at_3_std value: 5.753050000000001 - type: nauc_precision_at_3_diff1 value: 31.4196 - type: nauc_precision_at_5_max value: 36.98677500000001 - type: nauc_precision_at_5_std value: 9.464608333333333 - type: nauc_precision_at_5_diff1 value: 25.906116666666662 - type: nauc_precision_at_10_max value: 33.26575833333333 - type: nauc_precision_at_10_std value: 12.540025 - type: nauc_precision_at_10_diff1 value: 18.274116666666668 - type: nauc_precision_at_20_max value: 30.13705833333334 - type: nauc_precision_at_20_std value: 16.549291666666665 - type: nauc_precision_at_20_diff1 value: 12.541983333333334 - type: nauc_precision_at_100_max value: 22.078525000000003 - type: nauc_precision_at_100_std value: 19.263416666666664 - type: nauc_precision_at_100_diff1 value: 2.293625 - type: nauc_precision_at_1000_max value: 8.336641666666667 - type: nauc_precision_at_1000_std value: 14.828683333333334 - type: nauc_precision_at_1000_diff1 value: -8.852525 - type: nauc_mrr_at_1_max value: 39.3715 - type: nauc_mrr_at_1_std value: -1.2167000000000008 - type: nauc_mrr_at_1_diff1 value: 47.05770833333333 - type: nauc_mrr_at_3_max value: 39.90615 - type: nauc_mrr_at_3_std value: 0.7366500000000004 - type: nauc_mrr_at_3_diff1 value: 42.96046666666666 - type: nauc_mrr_at_5_max value: 39.78708333333334 - type: nauc_mrr_at_5_std value: 1.3970916666666666 - type: nauc_mrr_at_5_diff1 value: 42.44258333333333 - type: nauc_mrr_at_10_max value: 39.65595 - type: nauc_mrr_at_10_std value: 1.6633916666666666 - type: nauc_mrr_at_10_diff1 value: 42.084358333333334 - type: nauc_mrr_at_20_max value: 39.67735 - type: nauc_mrr_at_20_std value: 1.8360749999999995 - type: nauc_mrr_at_20_diff1 value: 42.04530833333333 - type: nauc_mrr_at_100_max value: 39.71681666666667 - type: nauc_mrr_at_100_std value: 1.8971666666666671 - type: nauc_mrr_at_100_diff1 value: 42.075141666666674 - type: nauc_mrr_at_1000_max value: 39.72038333333334 - type: nauc_mrr_at_1000_std value: 1.8916749999999996 - type: nauc_mrr_at_1000_diff1 value: 42.091208333333334 - type: main_score value: 39.84525000000001 - task: type: Retrieval dataset: name: MTEB CQADupstackRetrieval (default) type: CQADupstackRetrieval_is_a_combined_dataset config: default split: test revision: CQADupstackRetrieval_is_a_combined_dataset metrics: - type: main_score value: 39.84525000000001 - type: ndcg_at_10 value: 39.84525000000001 - task: type: Retrieval dataset: name: MTEB CQADupstackStatsRetrieval (default) type: mteb/cqadupstack-stats config: default split: test revision: 65ac3a16b8e91f9cee4c9828cc7c335575432a2a metrics: - type: ndcg_at_1 value: 27.454 - type: ndcg_at_3 value: 31.464 - type: ndcg_at_5 value: 33.533 - type: ndcg_at_10 value: 35.477 - type: ndcg_at_20 value: 37.092999999999996 - type: ndcg_at_100 value: 39.808 - type: ndcg_at_1000 value: 42.309000000000005 - type: map_at_1 value: 24.489 - type: map_at_3 value: 29.204 - type: map_at_5 value: 30.496000000000002 - type: map_at_10 value: 31.415 - type: map_at_20 value: 31.897 - type: map_at_100 value: 32.259 - type: map_at_1000 value: 32.361000000000004 - type: recall_at_1 value: 24.489 - type: recall_at_3 value: 34.333999999999996 - type: recall_at_5 value: 39.550999999999995 - type: recall_at_10 value: 45.275999999999996 - type: recall_at_20 value: 51.241 - type: recall_at_100 value: 65.398 - type: recall_at_1000 value: 83.685 - type: precision_at_1 value: 27.454 - type: precision_at_3 value: 13.344000000000001 - type: precision_at_5 value: 9.417 - type: precision_at_10 value: 5.567 - type: precision_at_20 value: 3.221 - type: precision_at_100 value: 0.845 - type: precision_at_1000 value: 0.11499999999999999 - type: mrr_at_1 value: 27.454 - type: mrr_at_3 value: 32.1063 - type: mrr_at_5 value: 33.2797 - type: mrr_at_10 value: 34.0563 - type: mrr_at_20 value: 34.4952 - type: mrr_at_100 value: 34.8327 - type: mrr_at_1000 value: 34.9002 - type: nauc_ndcg_at_1_max value: 45.7913 - type: nauc_ndcg_at_1_std value: 10.6304 - type: nauc_ndcg_at_1_diff1 value: 51.58160000000001 - type: nauc_ndcg_at_3_max value: 42.992599999999996 - type: nauc_ndcg_at_3_std value: 10.1454 - type: nauc_ndcg_at_3_diff1 value: 45.330799999999996 - type: nauc_ndcg_at_5_max value: 43.081399999999995 - type: nauc_ndcg_at_5_std value: 11.7829 - type: nauc_ndcg_at_5_diff1 value: 45.8734 - type: nauc_ndcg_at_10_max value: 45.2554 - type: nauc_ndcg_at_10_std value: 14.2953 - type: nauc_ndcg_at_10_diff1 value: 45.908 - type: nauc_ndcg_at_20_max value: 45.7565 - type: nauc_ndcg_at_20_std value: 15.1327 - type: nauc_ndcg_at_20_diff1 value: 45.512 - type: nauc_ndcg_at_100_max value: 45.602599999999995 - type: nauc_ndcg_at_100_std value: 15.6507 - type: nauc_ndcg_at_100_diff1 value: 44.3626 - type: nauc_ndcg_at_1000_max value: 45.6835 - type: nauc_ndcg_at_1000_std value: 16.3352 - type: nauc_ndcg_at_1000_diff1 value: 44.9838 - type: nauc_map_at_1_max value: 41.989900000000006 - type: nauc_map_at_1_std value: 5.3356 - type: nauc_map_at_1_diff1 value: 52.711200000000005 - type: nauc_map_at_3_max value: 42.363 - type: nauc_map_at_3_std value: 8.1615 - type: nauc_map_at_3_diff1 value: 47.1827 - type: nauc_map_at_5_max value: 42.6039 - type: nauc_map_at_5_std value: 9.500300000000001 - type: nauc_map_at_5_diff1 value: 47.4177 - type: nauc_map_at_10_max value: 43.703399999999995 - type: nauc_map_at_10_std value: 10.729 - type: nauc_map_at_10_diff1 value: 47.4334 - type: nauc_map_at_20_max value: 43.9336 - type: nauc_map_at_20_std value: 11.0612 - type: nauc_map_at_20_diff1 value: 47.321600000000004 - type: nauc_map_at_100_max value: 43.978899999999996 - type: nauc_map_at_100_std value: 11.148299999999999 - type: nauc_map_at_100_diff1 value: 47.1738 - type: nauc_map_at_1000_max value: 43.985400000000006 - type: nauc_map_at_1000_std value: 11.1754 - type: nauc_map_at_1000_diff1 value: 47.197 - type: nauc_recall_at_1_max value: 41.989900000000006 - type: nauc_recall_at_1_std value: 5.3356 - type: nauc_recall_at_1_diff1 value: 52.711200000000005 - type: nauc_recall_at_3_max value: 40.8671 - type: nauc_recall_at_3_std value: 9.4511 - type: nauc_recall_at_3_diff1 value: 41.2041 - type: nauc_recall_at_5_max value: 40.9279 - type: nauc_recall_at_5_std value: 13.688600000000001 - type: nauc_recall_at_5_diff1 value: 41.9126 - type: nauc_recall_at_10_max value: 46.1436 - type: nauc_recall_at_10_std value: 20.8837 - type: nauc_recall_at_10_diff1 value: 41.0814 - type: nauc_recall_at_20_max value: 47.245599999999996 - type: nauc_recall_at_20_std value: 23.405 - type: nauc_recall_at_20_diff1 value: 38.864599999999996 - type: nauc_recall_at_100_max value: 45.457 - type: nauc_recall_at_100_std value: 28.075 - type: nauc_recall_at_100_diff1 value: 30.213600000000003 - type: nauc_recall_at_1000_max value: 48.8291 - type: nauc_recall_at_1000_std value: 47.8416 - type: nauc_recall_at_1000_diff1 value: 30.387199999999996 - type: nauc_precision_at_1_max value: 45.7913 - type: nauc_precision_at_1_std value: 10.6304 - type: nauc_precision_at_1_diff1 value: 51.58160000000001 - type: nauc_precision_at_3_max value: 44.710899999999995 - type: nauc_precision_at_3_std value: 17.7458 - type: nauc_precision_at_3_diff1 value: 36.7588 - type: nauc_precision_at_5_max value: 44.0582 - type: nauc_precision_at_5_std value: 22.7864 - type: nauc_precision_at_5_diff1 value: 35.3597 - type: nauc_precision_at_10_max value: 45.5849 - type: nauc_precision_at_10_std value: 28.758899999999997 - type: nauc_precision_at_10_diff1 value: 30.3452 - type: nauc_precision_at_20_max value: 43.6996 - type: nauc_precision_at_20_std value: 30.314799999999998 - type: nauc_precision_at_20_diff1 value: 25.916299999999996 - type: nauc_precision_at_100_max value: 33.6976 - type: nauc_precision_at_100_std value: 28.7876 - type: nauc_precision_at_100_diff1 value: 11.670300000000001 - type: nauc_precision_at_1000_max value: 14.089599999999999 - type: nauc_precision_at_1000_std value: 23.8288 - type: nauc_precision_at_1000_diff1 value: -1.8387 - type: nauc_mrr_at_1_max value: 45.7913 - type: nauc_mrr_at_1_std value: 10.6304 - type: nauc_mrr_at_1_diff1 value: 51.58160000000001 - type: nauc_mrr_at_3_max value: 45.5677 - type: nauc_mrr_at_3_std value: 12.692800000000002 - type: nauc_mrr_at_3_diff1 value: 46.578599999999994 - type: nauc_mrr_at_5_max value: 45.4634 - type: nauc_mrr_at_5_std value: 13.386999999999999 - type: nauc_mrr_at_5_diff1 value: 46.7306 - type: nauc_mrr_at_10_max value: 46.1532 - type: nauc_mrr_at_10_std value: 14.3297 - type: nauc_mrr_at_10_diff1 value: 46.6835 - type: nauc_mrr_at_20_max value: 46.1552 - type: nauc_mrr_at_20_std value: 14.492099999999999 - type: nauc_mrr_at_20_diff1 value: 46.611000000000004 - type: nauc_mrr_at_100_max value: 46.1171 - type: nauc_mrr_at_100_std value: 14.4984 - type: nauc_mrr_at_100_diff1 value: 46.4837 - type: nauc_mrr_at_1000_max value: 46.1231 - type: nauc_mrr_at_1000_std value: 14.516000000000002 - type: nauc_mrr_at_1000_diff1 value: 46.5135 - type: main_score value: 35.477 - task: type: Retrieval dataset: name: MTEB CQADupstackTexRetrieval (default) type: mteb/cqadupstack-tex config: default split: test revision: 46989137a86843e03a6195de44b09deda022eec7 metrics: - type: ndcg_at_1 value: 21.266 - type: ndcg_at_3 value: 25.337 - type: ndcg_at_5 value: 27.18 - type: ndcg_at_10 value: 29.452 - type: ndcg_at_20 value: 31.226 - type: ndcg_at_100 value: 34.409 - type: ndcg_at_1000 value: 37.577 - type: map_at_1 value: 17.363 - type: map_at_3 value: 22.448999999999998 - type: map_at_5 value: 23.686 - type: map_at_10 value: 24.769 - type: map_at_20 value: 25.295 - type: map_at_100 value: 25.790999999999997 - type: map_at_1000 value: 25.929000000000002 - type: recall_at_1 value: 17.363 - type: recall_at_3 value: 28.022000000000002 - type: recall_at_5 value: 32.817 - type: recall_at_10 value: 39.639 - type: recall_at_20 value: 46.245999999999995 - type: recall_at_100 value: 61.934 - type: recall_at_1000 value: 84.507 - type: precision_at_1 value: 21.266 - type: precision_at_3 value: 12.056000000000001 - type: precision_at_5 value: 8.727 - type: precision_at_10 value: 5.382 - type: precision_at_20 value: 3.2300000000000004 - type: precision_at_100 value: 0.922 - type: precision_at_1000 value: 0.13799999999999998 - type: mrr_at_1 value: 21.266299999999998 - type: mrr_at_3 value: 26.5887 - type: mrr_at_5 value: 27.7931 - type: mrr_at_10 value: 28.7136 - type: mrr_at_20 value: 29.1995 - type: mrr_at_100 value: 29.5953 - type: mrr_at_1000 value: 29.677999999999997 - type: nauc_ndcg_at_1_max value: 32.1973 - type: nauc_ndcg_at_1_std value: -3.8459 - type: nauc_ndcg_at_1_diff1 value: 40.2485 - type: nauc_ndcg_at_3_max value: 31.338300000000004 - type: nauc_ndcg_at_3_std value: -3.2641000000000004 - type: nauc_ndcg_at_3_diff1 value: 34.212199999999996 - type: nauc_ndcg_at_5_max value: 30.9515 - type: nauc_ndcg_at_5_std value: -2.5583 - type: nauc_ndcg_at_5_diff1 value: 33.3896 - type: nauc_ndcg_at_10_max value: 31.1472 - type: nauc_ndcg_at_10_std value: -1.4321000000000002 - type: nauc_ndcg_at_10_diff1 value: 33.057700000000004 - type: nauc_ndcg_at_20_max value: 31.513099999999998 - type: nauc_ndcg_at_20_std value: 0.4013 - type: nauc_ndcg_at_20_diff1 value: 32.2353 - type: nauc_ndcg_at_100_max value: 31.8931 - type: nauc_ndcg_at_100_std value: 2.0259 - type: nauc_ndcg_at_100_diff1 value: 31.966499999999996 - type: nauc_ndcg_at_1000_max value: 32.1421 - type: nauc_ndcg_at_1000_std value: 1.9602000000000002 - type: nauc_ndcg_at_1000_diff1 value: 32.6747 - type: nauc_map_at_1_max value: 28.973 - type: nauc_map_at_1_std value: -4.6768 - type: nauc_map_at_1_diff1 value: 40.726600000000005 - type: nauc_map_at_3_max value: 29.9942 - type: nauc_map_at_3_std value: -3.7635 - type: nauc_map_at_3_diff1 value: 35.5655 - type: nauc_map_at_5_max value: 30.157099999999996 - type: nauc_map_at_5_std value: -3.3414 - type: nauc_map_at_5_diff1 value: 35.085699999999996 - type: nauc_map_at_10_max value: 30.4178 - type: nauc_map_at_10_std value: -2.7081999999999997 - type: nauc_map_at_10_diff1 value: 34.834700000000005 - type: nauc_map_at_20_max value: 30.5785 - type: nauc_map_at_20_std value: -2.1469 - type: nauc_map_at_20_diff1 value: 34.6132 - type: nauc_map_at_100_max value: 30.755100000000002 - type: nauc_map_at_100_std value: -1.846 - type: nauc_map_at_100_diff1 value: 34.5596 - type: nauc_map_at_1000_max value: 30.818800000000003 - type: nauc_map_at_1000_std value: -1.8256000000000001 - type: nauc_map_at_1000_diff1 value: 34.602199999999996 - type: nauc_recall_at_1_max value: 28.973 - type: nauc_recall_at_1_std value: -4.6768 - type: nauc_recall_at_1_diff1 value: 40.726600000000005 - type: nauc_recall_at_3_max value: 28.962300000000003 - type: nauc_recall_at_3_std value: -2.8797 - type: nauc_recall_at_3_diff1 value: 29.9765 - type: nauc_recall_at_5_max value: 28.193 - type: nauc_recall_at_5_std value: -1.6741 - type: nauc_recall_at_5_diff1 value: 27.825100000000003 - type: nauc_recall_at_10_max value: 28.266099999999998 - type: nauc_recall_at_10_std value: 0.9544 - type: nauc_recall_at_10_diff1 value: 26.365499999999997 - type: nauc_recall_at_20_max value: 28.839 - type: nauc_recall_at_20_std value: 6.809 - type: nauc_recall_at_20_diff1 value: 22.761400000000002 - type: nauc_recall_at_100_max value: 29.2235 - type: nauc_recall_at_100_std value: 15.3679 - type: nauc_recall_at_100_diff1 value: 19.3302 - type: nauc_recall_at_1000_max value: 27.954800000000002 - type: nauc_recall_at_1000_std value: 25.5618 - type: nauc_recall_at_1000_diff1 value: 17.749100000000002 - type: nauc_precision_at_1_max value: 32.1973 - type: nauc_precision_at_1_std value: -3.8459 - type: nauc_precision_at_1_diff1 value: 40.2485 - type: nauc_precision_at_3_max value: 33.3915 - type: nauc_precision_at_3_std value: -1.7868 - type: nauc_precision_at_3_diff1 value: 29.0619 - type: nauc_precision_at_5_max value: 33.0357 - type: nauc_precision_at_5_std value: 0.7308 - type: nauc_precision_at_5_diff1 value: 25.966299999999997 - type: nauc_precision_at_10_max value: 33.1657 - type: nauc_precision_at_10_std value: 4.3635 - type: nauc_precision_at_10_diff1 value: 23.5546 - type: nauc_precision_at_20_max value: 32.9354 - type: nauc_precision_at_20_std value: 10.2754 - type: nauc_precision_at_20_diff1 value: 18.9755 - type: nauc_precision_at_100_max value: 30.0047 - type: nauc_precision_at_100_std value: 14.9007 - type: nauc_precision_at_100_diff1 value: 10.6748 - type: nauc_precision_at_1000_max value: 24.2685 - type: nauc_precision_at_1000_std value: 10.8307 - type: nauc_precision_at_1000_diff1 value: 1.3375 - type: nauc_mrr_at_1_max value: 32.1973 - type: nauc_mrr_at_1_std value: -3.8459 - type: nauc_mrr_at_1_diff1 value: 40.2485 - type: nauc_mrr_at_3_max value: 32.670100000000005 - type: nauc_mrr_at_3_std value: -2.7189 - type: nauc_mrr_at_3_diff1 value: 35.8073 - type: nauc_mrr_at_5_max value: 32.4756 - type: nauc_mrr_at_5_std value: -2.2318000000000002 - type: nauc_mrr_at_5_diff1 value: 35.2567 - type: nauc_mrr_at_10_max value: 32.594699999999996 - type: nauc_mrr_at_10_std value: -1.8573 - type: nauc_mrr_at_10_diff1 value: 35.268100000000004 - type: nauc_mrr_at_20_max value: 32.7337 - type: nauc_mrr_at_20_std value: -1.3544 - type: nauc_mrr_at_20_diff1 value: 35.0493 - type: nauc_mrr_at_100_max value: 32.775999999999996 - type: nauc_mrr_at_100_std value: -1.2326 - type: nauc_mrr_at_100_diff1 value: 35.0304 - type: nauc_mrr_at_1000_max value: 32.7772 - type: nauc_mrr_at_1000_std value: -1.2438 - type: nauc_mrr_at_1000_diff1 value: 35.0535 - type: main_score value: 29.452 - task: type: Retrieval dataset: name: MTEB CQADupstackUnixRetrieval (default) type: mteb/cqadupstack-unix config: default split: test revision: 6c6430d3a6d36f8d2a829195bc5dc94d7e063e53 metrics: - type: ndcg_at_1 value: 32.556000000000004 - type: ndcg_at_3 value: 36.928 - type: ndcg_at_5 value: 39.116 - type: ndcg_at_10 value: 41.801 - type: ndcg_at_20 value: 44.080999999999996 - type: ndcg_at_100 value: 47.138999999999996 - type: ndcg_at_1000 value: 49.372 - type: map_at_1 value: 27.062 - type: map_at_3 value: 33.616 - type: map_at_5 value: 35.181000000000004 - type: map_at_10 value: 36.431000000000004 - type: map_at_20 value: 37.15 - type: map_at_100 value: 37.662 - type: map_at_1000 value: 37.763999999999996 - type: recall_at_1 value: 27.062 - type: recall_at_3 value: 40.199 - type: recall_at_5 value: 46.025 - type: recall_at_10 value: 53.973000000000006 - type: recall_at_20 value: 61.989000000000004 - type: recall_at_100 value: 76.537 - type: recall_at_1000 value: 92.087 - type: precision_at_1 value: 32.556000000000004 - type: precision_at_3 value: 16.915 - type: precision_at_5 value: 11.791 - type: precision_at_10 value: 7.034 - type: precision_at_20 value: 4.1739999999999995 - type: precision_at_100 value: 1.089 - type: precision_at_1000 value: 0.13799999999999998 - type: mrr_at_1 value: 32.556000000000004 - type: mrr_at_3 value: 38.339600000000004 - type: mrr_at_5 value: 39.696799999999996 - type: mrr_at_10 value: 40.7987 - type: mrr_at_20 value: 41.3962 - type: mrr_at_100 value: 41.7337 - type: mrr_at_1000 value: 41.794399999999996 - type: nauc_ndcg_at_1_max value: 43.5112 - type: nauc_ndcg_at_1_std value: -9.9968 - type: nauc_ndcg_at_1_diff1 value: 54.4148 - type: nauc_ndcg_at_3_max value: 44.4173 - type: nauc_ndcg_at_3_std value: -4.9704999999999995 - type: nauc_ndcg_at_3_diff1 value: 49.746 - type: nauc_ndcg_at_5_max value: 43.944100000000006 - type: nauc_ndcg_at_5_std value: -3.8952 - type: nauc_ndcg_at_5_diff1 value: 48.2127 - type: nauc_ndcg_at_10_max value: 43.0905 - type: nauc_ndcg_at_10_std value: -3.6698 - type: nauc_ndcg_at_10_diff1 value: 46.8763 - type: nauc_ndcg_at_20_max value: 42.6245 - type: nauc_ndcg_at_20_std value: -4.1508 - type: nauc_ndcg_at_20_diff1 value: 46.0823 - type: nauc_ndcg_at_100_max value: 42.9829 - type: nauc_ndcg_at_100_std value: -3.2881 - type: nauc_ndcg_at_100_diff1 value: 46.9669 - type: nauc_ndcg_at_1000_max value: 43.3769 - type: nauc_ndcg_at_1000_std value: -2.6679 - type: nauc_ndcg_at_1000_diff1 value: 47.3983 - type: nauc_map_at_1_max value: 41.5528 - type: nauc_map_at_1_std value: -11.307599999999999 - type: nauc_map_at_1_diff1 value: 54.931700000000006 - type: nauc_map_at_3_max value: 43.2776 - type: nauc_map_at_3_std value: -7.421800000000001 - type: nauc_map_at_3_diff1 value: 51.1883 - type: nauc_map_at_5_max value: 43.4821 - type: nauc_map_at_5_std value: -6.2339 - type: nauc_map_at_5_diff1 value: 50.1494 - type: nauc_map_at_10_max value: 43.3333 - type: nauc_map_at_10_std value: -6.065 - type: nauc_map_at_10_diff1 value: 49.661100000000005 - type: nauc_map_at_20_max value: 43.231 - type: nauc_map_at_20_std value: -6.2244 - type: nauc_map_at_20_diff1 value: 49.407000000000004 - type: nauc_map_at_100_max value: 43.3803 - type: nauc_map_at_100_std value: -5.9752 - type: nauc_map_at_100_diff1 value: 49.5411 - type: nauc_map_at_1000_max value: 43.4007 - type: nauc_map_at_1000_std value: -5.9336 - type: nauc_map_at_1000_diff1 value: 49.5578 - type: nauc_recall_at_1_max value: 41.5528 - type: nauc_recall_at_1_std value: -11.307599999999999 - type: nauc_recall_at_1_diff1 value: 54.931700000000006 - type: nauc_recall_at_3_max value: 42.6893 - type: nauc_recall_at_3_std value: -2.3828 - type: nauc_recall_at_3_diff1 value: 46.050999999999995 - type: nauc_recall_at_5_max value: 41.6989 - type: nauc_recall_at_5_std value: 1.0116 - type: nauc_recall_at_5_diff1 value: 41.5014 - type: nauc_recall_at_10_max value: 37.9823 - type: nauc_recall_at_10_std value: 1.9809 - type: nauc_recall_at_10_diff1 value: 36.3968 - type: nauc_recall_at_20_max value: 35.5843 - type: nauc_recall_at_20_std value: 0.1044 - type: nauc_recall_at_20_diff1 value: 32.377 - type: nauc_recall_at_100_max value: 35.316900000000004 - type: nauc_recall_at_100_std value: 5.6158 - type: nauc_recall_at_100_diff1 value: 34.8474 - type: nauc_recall_at_1000_max value: 40.3589 - type: nauc_recall_at_1000_std value: 36.2315 - type: nauc_recall_at_1000_diff1 value: 32.7652 - type: nauc_precision_at_1_max value: 43.5112 - type: nauc_precision_at_1_std value: -9.9968 - type: nauc_precision_at_1_diff1 value: 54.4148 - type: nauc_precision_at_3_max value: 43.5357 - type: nauc_precision_at_3_std value: 1.8129 - type: nauc_precision_at_3_diff1 value: 39.4033 - type: nauc_precision_at_5_max value: 41.2383 - type: nauc_precision_at_5_std value: 5.952500000000001 - type: nauc_precision_at_5_diff1 value: 32.6387 - type: nauc_precision_at_10_max value: 35.8673 - type: nauc_precision_at_10_std value: 6.9601 - type: nauc_precision_at_10_diff1 value: 25.1842 - type: nauc_precision_at_20_max value: 28.9362 - type: nauc_precision_at_20_std value: 7.607800000000001 - type: nauc_precision_at_20_diff1 value: 16.7232 - type: nauc_precision_at_100_max value: 18.434800000000003 - type: nauc_precision_at_100_std value: 12.987000000000002 - type: nauc_precision_at_100_diff1 value: 6.9893 - type: nauc_precision_at_1000_max value: 1.0569 - type: nauc_precision_at_1000_std value: 12.5503 - type: nauc_precision_at_1000_diff1 value: -7.3416 - type: nauc_mrr_at_1_max value: 43.5112 - type: nauc_mrr_at_1_std value: -9.9968 - type: nauc_mrr_at_1_diff1 value: 54.4148 - type: nauc_mrr_at_3_max value: 44.642900000000004 - type: nauc_mrr_at_3_std value: -5.3517 - type: nauc_mrr_at_3_diff1 value: 50.2935 - type: nauc_mrr_at_5_max value: 44.4732 - type: nauc_mrr_at_5_std value: -4.608099999999999 - type: nauc_mrr_at_5_diff1 value: 49.346000000000004 - type: nauc_mrr_at_10_max value: 43.9489 - type: nauc_mrr_at_10_std value: -4.5868 - type: nauc_mrr_at_10_diff1 value: 48.7018 - type: nauc_mrr_at_20_max value: 43.7826 - type: nauc_mrr_at_20_std value: -4.8502 - type: nauc_mrr_at_20_diff1 value: 48.5755 - type: nauc_mrr_at_100_max value: 43.7991 - type: nauc_mrr_at_100_std value: -4.8094 - type: nauc_mrr_at_100_diff1 value: 48.7361 - type: nauc_mrr_at_1000_max value: 43.8348 - type: nauc_mrr_at_1000_std value: -4.7897 - type: nauc_mrr_at_1000_diff1 value: 48.7638 - type: main_score value: 41.801 - task: type: Retrieval dataset: name: MTEB CQADupstackWebmastersRetrieval (default) type: mteb/cqadupstack-webmasters config: default split: test revision: 160c094312a0e1facb97e55eeddb698c0abe3571 metrics: - type: ndcg_at_1 value: 30.631999999999998 - type: ndcg_at_3 value: 34.528999999999996 - type: ndcg_at_5 value: 36.547000000000004 - type: ndcg_at_10 value: 40.105000000000004 - type: ndcg_at_20 value: 42.34 - type: ndcg_at_100 value: 45.712 - type: ndcg_at_1000 value: 48.314 - type: map_at_1 value: 25.19 - type: map_at_3 value: 30.656 - type: map_at_5 value: 32.161 - type: map_at_10 value: 33.928000000000004 - type: map_at_20 value: 34.782999999999994 - type: map_at_100 value: 35.493 - type: map_at_1000 value: 35.713 - type: recall_at_1 value: 25.19 - type: recall_at_3 value: 36.007 - type: recall_at_5 value: 41.772 - type: recall_at_10 value: 52.117999999999995 - type: recall_at_20 value: 60.458 - type: recall_at_100 value: 77.34400000000001 - type: recall_at_1000 value: 93.77 - type: precision_at_1 value: 30.631999999999998 - type: precision_at_3 value: 15.942 - type: precision_at_5 value: 11.462 - type: precision_at_10 value: 7.826 - type: precision_at_20 value: 4.9799999999999995 - type: precision_at_100 value: 1.528 - type: precision_at_1000 value: 0.242 - type: mrr_at_1 value: 30.632399999999997 - type: mrr_at_3 value: 35.8037 - type: mrr_at_5 value: 37.2661 - type: mrr_at_10 value: 38.8381 - type: mrr_at_20 value: 39.4229 - type: mrr_at_100 value: 39.7673 - type: mrr_at_1000 value: 39.8227 - type: nauc_ndcg_at_1_max value: 45.7418 - type: nauc_ndcg_at_1_std value: 7.7497 - type: nauc_ndcg_at_1_diff1 value: 47.656 - type: nauc_ndcg_at_3_max value: 45.6597 - type: nauc_ndcg_at_3_std value: 9.6418 - type: nauc_ndcg_at_3_diff1 value: 43.1631 - type: nauc_ndcg_at_5_max value: 44.893100000000004 - type: nauc_ndcg_at_5_std value: 12.2393 - type: nauc_ndcg_at_5_diff1 value: 42.7159 - type: nauc_ndcg_at_10_max value: 43.6388 - type: nauc_ndcg_at_10_std value: 12.0574 - type: nauc_ndcg_at_10_diff1 value: 41.4018 - type: nauc_ndcg_at_20_max value: 43.8549 - type: nauc_ndcg_at_20_std value: 14.065900000000001 - type: nauc_ndcg_at_20_diff1 value: 41.056 - type: nauc_ndcg_at_100_max value: 44.770700000000005 - type: nauc_ndcg_at_100_std value: 14.8343 - type: nauc_ndcg_at_100_diff1 value: 42.2405 - type: nauc_ndcg_at_1000_max value: 45.524100000000004 - type: nauc_ndcg_at_1000_std value: 14.578199999999999 - type: nauc_ndcg_at_1000_diff1 value: 42.3126 - type: nauc_map_at_1_max value: 44.1517 - type: nauc_map_at_1_std value: 3.4579 - type: nauc_map_at_1_diff1 value: 53.915 - type: nauc_map_at_3_max value: 45.8324 - type: nauc_map_at_3_std value: 6.8385 - type: nauc_map_at_3_diff1 value: 47.8444 - type: nauc_map_at_5_max value: 45.4063 - type: nauc_map_at_5_std value: 8.3539 - type: nauc_map_at_5_diff1 value: 47.0671 - type: nauc_map_at_10_max value: 45.0727 - type: nauc_map_at_10_std value: 8.6699 - type: nauc_map_at_10_diff1 value: 46.050200000000004 - type: nauc_map_at_20_max value: 45.2504 - type: nauc_map_at_20_std value: 9.7359 - type: nauc_map_at_20_diff1 value: 45.711200000000005 - type: nauc_map_at_100_max value: 45.2055 - type: nauc_map_at_100_std value: 10.2755 - type: nauc_map_at_100_diff1 value: 45.5556 - type: nauc_map_at_1000_max value: 45.1304 - type: nauc_map_at_1000_std value: 10.3956 - type: nauc_map_at_1000_diff1 value: 45.4084 - type: nauc_recall_at_1_max value: 44.1517 - type: nauc_recall_at_1_std value: 3.4579 - type: nauc_recall_at_1_diff1 value: 53.915 - type: nauc_recall_at_3_max value: 44.349199999999996 - type: nauc_recall_at_3_std value: 9.464599999999999 - type: nauc_recall_at_3_diff1 value: 41.302499999999995 - type: nauc_recall_at_5_max value: 42.2726 - type: nauc_recall_at_5_std value: 14.7778 - type: nauc_recall_at_5_diff1 value: 38.1663 - type: nauc_recall_at_10_max value: 37.0689 - type: nauc_recall_at_10_std value: 14.760699999999998 - type: nauc_recall_at_10_diff1 value: 32.1674 - type: nauc_recall_at_20_max value: 36.1879 - type: nauc_recall_at_20_std value: 22.6902 - type: nauc_recall_at_20_diff1 value: 28.933999999999997 - type: nauc_recall_at_100_max value: 38.5222 - type: nauc_recall_at_100_std value: 31.595299999999998 - type: nauc_recall_at_100_diff1 value: 30.495499999999996 - type: nauc_recall_at_1000_max value: 59.5012 - type: nauc_recall_at_1000_std value: 61.421499999999995 - type: nauc_recall_at_1000_diff1 value: 30.153000000000002 - type: nauc_precision_at_1_max value: 45.7418 - type: nauc_precision_at_1_std value: 7.7497 - type: nauc_precision_at_1_diff1 value: 47.656 - type: nauc_precision_at_3_max value: 41.5197 - type: nauc_precision_at_3_std value: 14.416200000000002 - type: nauc_precision_at_3_diff1 value: 27.4448 - type: nauc_precision_at_5_max value: 37.372699999999995 - type: nauc_precision_at_5_std value: 20.4825 - type: nauc_precision_at_5_diff1 value: 20.4335 - type: nauc_precision_at_10_max value: 26.792899999999996 - type: nauc_precision_at_10_std value: 20.895 - type: nauc_precision_at_10_diff1 value: 6.9729 - type: nauc_precision_at_20_max value: 19.3562 - type: nauc_precision_at_20_std value: 26.9338 - type: nauc_precision_at_20_diff1 value: -2.5024 - type: nauc_precision_at_100_max value: 2.6254 - type: nauc_precision_at_100_std value: 24.4194 - type: nauc_precision_at_100_diff1 value: -14.6956 - type: nauc_precision_at_1000_max value: -7.0939000000000005 - type: nauc_precision_at_1000_std value: 17.2116 - type: nauc_precision_at_1000_diff1 value: -23.3519 - type: nauc_mrr_at_1_max value: 45.7418 - type: nauc_mrr_at_1_std value: 7.7497 - type: nauc_mrr_at_1_diff1 value: 47.656 - type: nauc_mrr_at_3_max value: 44.974799999999995 - type: nauc_mrr_at_3_std value: 10.0484 - type: nauc_mrr_at_3_diff1 value: 42.5053 - type: nauc_mrr_at_5_max value: 45.0004 - type: nauc_mrr_at_5_std value: 11.505700000000001 - type: nauc_mrr_at_5_diff1 value: 42.0568 - type: nauc_mrr_at_10_max value: 44.5236 - type: nauc_mrr_at_10_std value: 11.6009 - type: nauc_mrr_at_10_diff1 value: 41.5394 - type: nauc_mrr_at_20_max value: 44.568400000000004 - type: nauc_mrr_at_20_std value: 11.9612 - type: nauc_mrr_at_20_diff1 value: 41.4954 - type: nauc_mrr_at_100_max value: 44.6377 - type: nauc_mrr_at_100_std value: 12.0293 - type: nauc_mrr_at_100_diff1 value: 41.6504 - type: nauc_mrr_at_1000_max value: 44.650099999999995 - type: nauc_mrr_at_1000_std value: 12.0106 - type: nauc_mrr_at_1000_diff1 value: 41.6595 - type: main_score value: 40.105000000000004 - task: type: Retrieval dataset: name: MTEB CQADupstackWordpressRetrieval (default) type: mteb/cqadupstack-wordpress config: default split: test revision: 4ffe81d471b1924886b33c7567bfb200e9eec5c4 metrics: - type: ndcg_at_1 value: 21.811 - type: ndcg_at_3 value: 27.472 - type: ndcg_at_5 value: 29.755 - type: ndcg_at_10 value: 32.561 - type: ndcg_at_20 value: 34.708 - type: ndcg_at_100 value: 38.052 - type: ndcg_at_1000 value: 40.526 - type: map_at_1 value: 20.339 - type: map_at_3 value: 25.358000000000004 - type: map_at_5 value: 26.682 - type: map_at_10 value: 27.935 - type: map_at_20 value: 28.536 - type: map_at_100 value: 29.038000000000004 - type: map_at_1000 value: 29.149 - type: recall_at_1 value: 20.339 - type: recall_at_3 value: 31.682 - type: recall_at_5 value: 36.962 - type: recall_at_10 value: 45.095 - type: recall_at_20 value: 53.25 - type: recall_at_100 value: 70.155 - type: recall_at_1000 value: 88.177 - type: precision_at_1 value: 21.811 - type: precision_at_3 value: 11.706999999999999 - type: precision_at_5 value: 8.429 - type: precision_at_10 value: 5.25 - type: precision_at_20 value: 3.1419999999999995 - type: precision_at_100 value: 0.8540000000000001 - type: precision_at_1000 value: 0.117 - type: mrr_at_1 value: 21.8115 - type: mrr_at_3 value: 27.1411 - type: mrr_at_5 value: 28.490399999999998 - type: mrr_at_10 value: 29.616500000000002 - type: mrr_at_20 value: 30.215999999999998 - type: mrr_at_100 value: 30.6966 - type: mrr_at_1000 value: 30.767899999999997 - type: nauc_ndcg_at_1_max value: 32.8162 - type: nauc_ndcg_at_1_std value: -4.388199999999999 - type: nauc_ndcg_at_1_diff1 value: 44.436 - type: nauc_ndcg_at_3_max value: 28.517 - type: nauc_ndcg_at_3_std value: -4.3836 - type: nauc_ndcg_at_3_diff1 value: 35.7606 - type: nauc_ndcg_at_5_max value: 28.68 - type: nauc_ndcg_at_5_std value: -3.0216 - type: nauc_ndcg_at_5_diff1 value: 35.27 - type: nauc_ndcg_at_10_max value: 26.572200000000002 - type: nauc_ndcg_at_10_std value: -3.8319 - type: nauc_ndcg_at_10_diff1 value: 33.311099999999996 - type: nauc_ndcg_at_20_max value: 26.7196 - type: nauc_ndcg_at_20_std value: -1.3162 - type: nauc_ndcg_at_20_diff1 value: 32.202999999999996 - type: nauc_ndcg_at_100_max value: 28.8134 - type: nauc_ndcg_at_100_std value: -0.2386 - type: nauc_ndcg_at_100_diff1 value: 31.5089 - type: nauc_ndcg_at_1000_max value: 28.732799999999997 - type: nauc_ndcg_at_1000_std value: 0.6251 - type: nauc_ndcg_at_1000_diff1 value: 32.1837 - type: nauc_map_at_1_max value: 29.4829 - type: nauc_map_at_1_std value: -6.0044 - type: nauc_map_at_1_diff1 value: 43.3353 - type: nauc_map_at_3_max value: 28.230499999999996 - type: nauc_map_at_3_std value: -5.0899 - type: nauc_map_at_3_diff1 value: 37.3547 - type: nauc_map_at_5_max value: 28.7927 - type: nauc_map_at_5_std value: -4.254899999999999 - type: nauc_map_at_5_diff1 value: 37.1805 - type: nauc_map_at_10_max value: 28.1557 - type: nauc_map_at_10_std value: -4.4931 - type: nauc_map_at_10_diff1 value: 36.2513 - type: nauc_map_at_20_max value: 28.205799999999996 - type: nauc_map_at_20_std value: -3.6852000000000005 - type: nauc_map_at_20_diff1 value: 35.9099 - type: nauc_map_at_100_max value: 28.604499999999998 - type: nauc_map_at_100_std value: -3.4775 - type: nauc_map_at_100_diff1 value: 35.802 - type: nauc_map_at_1000_max value: 28.6008 - type: nauc_map_at_1000_std value: -3.4255 - type: nauc_map_at_1000_diff1 value: 35.8238 - type: nauc_recall_at_1_max value: 29.4829 - type: nauc_recall_at_1_std value: -6.0044 - type: nauc_recall_at_1_diff1 value: 43.3353 - type: nauc_recall_at_3_max value: 25.4695 - type: nauc_recall_at_3_std value: -4.3068 - type: nauc_recall_at_3_diff1 value: 30.2776 - type: nauc_recall_at_5_max value: 25.901400000000002 - type: nauc_recall_at_5_std value: -1.4424 - type: nauc_recall_at_5_diff1 value: 29.3842 - type: nauc_recall_at_10_max value: 19.203200000000002 - type: nauc_recall_at_10_std value: -3.8822 - type: nauc_recall_at_10_diff1 value: 24.0215 - type: nauc_recall_at_20_max value: 18.9758 - type: nauc_recall_at_20_std value: 4.9965 - type: nauc_recall_at_20_diff1 value: 19.5423 - type: nauc_recall_at_100_max value: 27.7916 - type: nauc_recall_at_100_std value: 13.4764 - type: nauc_recall_at_100_diff1 value: 11.1211 - type: nauc_recall_at_1000_max value: 28.3949 - type: nauc_recall_at_1000_std value: 41.7299 - type: nauc_recall_at_1000_diff1 value: 4.0583 - type: nauc_precision_at_1_max value: 32.8162 - type: nauc_precision_at_1_std value: -4.388199999999999 - type: nauc_precision_at_1_diff1 value: 44.436 - type: nauc_precision_at_3_max value: 28.614 - type: nauc_precision_at_3_std value: -1.5110000000000001 - type: nauc_precision_at_3_diff1 value: 30.165 - type: nauc_precision_at_5_max value: 29.49 - type: nauc_precision_at_5_std value: 3.3188000000000004 - type: nauc_precision_at_5_diff1 value: 27.6501 - type: nauc_precision_at_10_max value: 24.334500000000002 - type: nauc_precision_at_10_std value: 3.4701000000000004 - type: nauc_precision_at_10_diff1 value: 20.4126 - type: nauc_precision_at_20_max value: 23.4494 - type: nauc_precision_at_20_std value: 14.380799999999999 - type: nauc_precision_at_20_diff1 value: 12.5855 - type: nauc_precision_at_100_max value: 25.5811 - type: nauc_precision_at_100_std value: 21.0337 - type: nauc_precision_at_100_diff1 value: 0.1621 - type: nauc_precision_at_1000_max value: 1.3693 - type: nauc_precision_at_1000_std value: 22.288 - type: nauc_precision_at_1000_diff1 value: -18.3564 - type: nauc_mrr_at_1_max value: 32.8162 - type: nauc_mrr_at_1_std value: -4.388199999999999 - type: nauc_mrr_at_1_diff1 value: 44.436 - type: nauc_mrr_at_3_max value: 31.5259 - type: nauc_mrr_at_3_std value: -3.6585 - type: nauc_mrr_at_3_diff1 value: 38.5309 - type: nauc_mrr_at_5_max value: 31.1784 - type: nauc_mrr_at_5_std value: -2.5462 - type: nauc_mrr_at_5_diff1 value: 37.9675 - type: nauc_mrr_at_10_max value: 30.0497 - type: nauc_mrr_at_10_std value: -3.0947999999999998 - type: nauc_mrr_at_10_diff1 value: 37.0458 - type: nauc_mrr_at_20_max value: 30.082900000000002 - type: nauc_mrr_at_20_std value: -2.6054 - type: nauc_mrr_at_20_diff1 value: 36.774499999999996 - type: nauc_mrr_at_100_max value: 30.424200000000003 - type: nauc_mrr_at_100_std value: -2.5341 - type: nauc_mrr_at_100_diff1 value: 36.7384 - type: nauc_mrr_at_1000_max value: 30.4217 - type: nauc_mrr_at_1000_std value: -2.4978 - type: nauc_mrr_at_1000_diff1 value: 36.7847 - type: main_score value: 32.561 - task: type: Retrieval dataset: name: MTEB ClimateFEVER (default) type: mteb/climate-fever config: default split: test revision: 47f2ac6acb640fc46020b02a5b59fdda04d39380 metrics: - type: ndcg_at_1 value: 23.388 - type: ndcg_at_3 value: 20.198 - type: ndcg_at_5 value: 21.959 - type: ndcg_at_10 value: 24.97 - type: ndcg_at_20 value: 27.26 - type: ndcg_at_100 value: 31.244 - type: ndcg_at_1000 value: 34.694 - type: map_at_1 value: 10.738 - type: map_at_3 value: 14.707 - type: map_at_5 value: 16.123 - type: map_at_10 value: 17.45 - type: map_at_20 value: 18.251 - type: map_at_100 value: 18.979 - type: map_at_1000 value: 19.154 - type: recall_at_1 value: 10.738 - type: recall_at_3 value: 18.590999999999998 - type: recall_at_5 value: 23.427 - type: recall_at_10 value: 30.144 - type: recall_at_20 value: 36.586999999999996 - type: recall_at_100 value: 51.757 - type: recall_at_1000 value: 71.353 - type: precision_at_1 value: 23.388 - type: precision_at_3 value: 14.527999999999999 - type: precision_at_5 value: 11.375 - type: precision_at_10 value: 7.674 - type: precision_at_20 value: 4.824 - type: precision_at_100 value: 1.4460000000000002 - type: precision_at_1000 value: 0.208 - type: mrr_at_1 value: 23.3876 - type: mrr_at_3 value: 30.640600000000003 - type: mrr_at_5 value: 32.7416 - type: mrr_at_10 value: 34.2082 - type: mrr_at_20 value: 34.696 - type: mrr_at_100 value: 35.0613 - type: mrr_at_1000 value: 35.1158 - type: nauc_ndcg_at_1_max value: 34.0809 - type: nauc_ndcg_at_1_std value: 11.4587 - type: nauc_ndcg_at_1_diff1 value: 24.7702 - type: nauc_ndcg_at_3_max value: 36.7061 - type: nauc_ndcg_at_3_std value: 15.8194 - type: nauc_ndcg_at_3_diff1 value: 22.0991 - type: nauc_ndcg_at_5_max value: 40.0278 - type: nauc_ndcg_at_5_std value: 19.442 - type: nauc_ndcg_at_5_diff1 value: 22.0353 - type: nauc_ndcg_at_10_max value: 41.8522 - type: nauc_ndcg_at_10_std value: 22.2665 - type: nauc_ndcg_at_10_diff1 value: 21.9219 - type: nauc_ndcg_at_20_max value: 42.111599999999996 - type: nauc_ndcg_at_20_std value: 24.7003 - type: nauc_ndcg_at_20_diff1 value: 21.1493 - type: nauc_ndcg_at_100_max value: 41.4285 - type: nauc_ndcg_at_100_std value: 26.8766 - type: nauc_ndcg_at_100_diff1 value: 20.658 - type: nauc_ndcg_at_1000_max value: 41.7107 - type: nauc_ndcg_at_1000_std value: 27.8879 - type: nauc_ndcg_at_1000_diff1 value: 20.249 - type: nauc_map_at_1_max value: 39.0907 - type: nauc_map_at_1_std value: 10.9155 - type: nauc_map_at_1_diff1 value: 27.478799999999996 - type: nauc_map_at_3_max value: 39.1964 - type: nauc_map_at_3_std value: 14.1844 - type: nauc_map_at_3_diff1 value: 24.5869 - type: nauc_map_at_5_max value: 40.8907 - type: nauc_map_at_5_std value: 16.6955 - type: nauc_map_at_5_diff1 value: 24.1453 - type: nauc_map_at_10_max value: 41.8968 - type: nauc_map_at_10_std value: 18.4835 - type: nauc_map_at_10_diff1 value: 24.0071 - type: nauc_map_at_20_max value: 42.1779 - type: nauc_map_at_20_std value: 19.831599999999998 - type: nauc_map_at_20_diff1 value: 23.6712 - type: nauc_map_at_100_max value: 42.0617 - type: nauc_map_at_100_std value: 20.524700000000003 - type: nauc_map_at_100_diff1 value: 23.5193 - type: nauc_map_at_1000_max value: 42.080400000000004 - type: nauc_map_at_1000_std value: 20.6099 - type: nauc_map_at_1000_diff1 value: 23.48 - type: nauc_recall_at_1_max value: 39.0907 - type: nauc_recall_at_1_std value: 10.9155 - type: nauc_recall_at_1_diff1 value: 27.478799999999996 - type: nauc_recall_at_3_max value: 36.479099999999995 - type: nauc_recall_at_3_std value: 16.370199999999997 - type: nauc_recall_at_3_diff1 value: 21.0061 - type: nauc_recall_at_5_max value: 39.3227 - type: nauc_recall_at_5_std value: 21.753800000000002 - type: nauc_recall_at_5_diff1 value: 18.6069 - type: nauc_recall_at_10_max value: 40.7894 - type: nauc_recall_at_10_std value: 25.6917 - type: nauc_recall_at_10_diff1 value: 17.7339 - type: nauc_recall_at_20_max value: 39.6829 - type: nauc_recall_at_20_std value: 30.0384 - type: nauc_recall_at_20_diff1 value: 15.3931 - type: nauc_recall_at_100_max value: 34.9178 - type: nauc_recall_at_100_std value: 34.6884 - type: nauc_recall_at_100_diff1 value: 13.1482 - type: nauc_recall_at_1000_max value: 34.3804 - type: nauc_recall_at_1000_std value: 41.778 - type: nauc_recall_at_1000_diff1 value: 9.3052 - type: nauc_precision_at_1_max value: 34.0809 - type: nauc_precision_at_1_std value: 11.4587 - type: nauc_precision_at_1_diff1 value: 24.7702 - type: nauc_precision_at_3_max value: 31.784000000000002 - type: nauc_precision_at_3_std value: 18.567700000000002 - type: nauc_precision_at_3_diff1 value: 16.1653 - type: nauc_precision_at_5_max value: 34.9086 - type: nauc_precision_at_5_std value: 25.0212 - type: nauc_precision_at_5_diff1 value: 14.2787 - type: nauc_precision_at_10_max value: 35.1734 - type: nauc_precision_at_10_std value: 30.2243 - type: nauc_precision_at_10_diff1 value: 11.4396 - type: nauc_precision_at_20_max value: 31.347599999999996 - type: nauc_precision_at_20_std value: 33.2444 - type: nauc_precision_at_20_diff1 value: 8.0151 - type: nauc_precision_at_100_max value: 21.0705 - type: nauc_precision_at_100_std value: 33.561800000000005 - type: nauc_precision_at_100_diff1 value: 3.1647000000000003 - type: nauc_precision_at_1000_max value: 10.1267 - type: nauc_precision_at_1000_std value: 28.746199999999998 - type: nauc_precision_at_1000_diff1 value: -4.6774000000000004 - type: nauc_mrr_at_1_max value: 34.0809 - type: nauc_mrr_at_1_std value: 11.4587 - type: nauc_mrr_at_1_diff1 value: 24.7702 - type: nauc_mrr_at_3_max value: 33.799 - type: nauc_mrr_at_3_std value: 16.0923 - type: nauc_mrr_at_3_diff1 value: 20.8456 - type: nauc_mrr_at_5_max value: 35.1249 - type: nauc_mrr_at_5_std value: 17.8145 - type: nauc_mrr_at_5_diff1 value: 20.467299999999998 - type: nauc_mrr_at_10_max value: 35.856500000000004 - type: nauc_mrr_at_10_std value: 18.4864 - type: nauc_mrr_at_10_diff1 value: 20.6532 - type: nauc_mrr_at_20_max value: 35.787200000000006 - type: nauc_mrr_at_20_std value: 18.607599999999998 - type: nauc_mrr_at_20_diff1 value: 20.6192 - type: nauc_mrr_at_100_max value: 35.7134 - type: nauc_mrr_at_100_std value: 18.5964 - type: nauc_mrr_at_100_diff1 value: 20.5979 - type: nauc_mrr_at_1000_max value: 35.713499999999996 - type: nauc_mrr_at_1000_std value: 18.5792 - type: nauc_mrr_at_1000_diff1 value: 20.610300000000002 - type: main_score value: 24.97 - task: type: Retrieval dataset: name: MTEB CodeFeedbackMT (default) type: CoIR-Retrieval/codefeedback-mt config: default split: test revision: b0f12fa0c0dd67f59c95a5c33d02aeeb4c398c5f metrics: - type: ndcg_at_1 value: 15.403 - type: ndcg_at_3 value: 20.087 - type: ndcg_at_5 value: 21.72 - type: ndcg_at_10 value: 23.458000000000002 - type: ndcg_at_20 value: 24.990000000000002 - type: ndcg_at_100 value: 27.933000000000003 - type: ndcg_at_1000 value: 30.642999999999997 - type: map_at_1 value: 15.403 - type: map_at_3 value: 18.925 - type: map_at_5 value: 19.832 - type: map_at_10 value: 20.549999999999997 - type: map_at_20 value: 20.97 - type: map_at_100 value: 21.358 - type: map_at_1000 value: 21.447 - type: recall_at_1 value: 15.403 - type: recall_at_3 value: 23.454 - type: recall_at_5 value: 27.416 - type: recall_at_10 value: 32.786 - type: recall_at_20 value: 38.849000000000004 - type: recall_at_100 value: 54.99699999999999 - type: recall_at_1000 value: 77.096 - type: precision_at_1 value: 15.403 - type: precision_at_3 value: 7.818 - type: precision_at_5 value: 5.483 - type: precision_at_10 value: 3.279 - type: precision_at_20 value: 1.942 - type: precision_at_100 value: 0.5499999999999999 - type: precision_at_1000 value: 0.077 - type: mrr_at_1 value: 15.4026 - type: mrr_at_3 value: 18.925 - type: mrr_at_5 value: 19.8322 - type: mrr_at_10 value: 20.5497 - type: mrr_at_20 value: 20.9696 - type: mrr_at_100 value: 21.3582 - type: mrr_at_1000 value: 21.4471 - type: nauc_ndcg_at_1_max value: 14.524799999999999 - type: nauc_ndcg_at_1_std value: -14.704500000000001 - type: nauc_ndcg_at_1_diff1 value: 45.3337 - type: nauc_ndcg_at_3_max value: 12.3014 - type: nauc_ndcg_at_3_std value: -14.977199999999998 - type: nauc_ndcg_at_3_diff1 value: 37.6118 - type: nauc_ndcg_at_5_max value: 12.015099999999999 - type: nauc_ndcg_at_5_std value: -14.844399999999998 - type: nauc_ndcg_at_5_diff1 value: 36.439 - type: nauc_ndcg_at_10_max value: 11.886800000000001 - type: nauc_ndcg_at_10_std value: -14.274600000000001 - type: nauc_ndcg_at_10_diff1 value: 35.0552 - type: nauc_ndcg_at_20_max value: 11.843 - type: nauc_ndcg_at_20_std value: -13.729099999999999 - type: nauc_ndcg_at_20_diff1 value: 34.172999999999995 - type: nauc_ndcg_at_100_max value: 12.570700000000002 - type: nauc_ndcg_at_100_std value: -11.956999999999999 - type: nauc_ndcg_at_100_diff1 value: 33.5916 - type: nauc_ndcg_at_1000_max value: 13.3025 - type: nauc_ndcg_at_1000_std value: -10.6411 - type: nauc_ndcg_at_1000_diff1 value: 33.535900000000005 - type: nauc_map_at_1_max value: 14.524799999999999 - type: nauc_map_at_1_std value: -14.704500000000001 - type: nauc_map_at_1_diff1 value: 45.3337 - type: nauc_map_at_3_max value: 12.7833 - type: nauc_map_at_3_std value: -14.9312 - type: nauc_map_at_3_diff1 value: 39.2273 - type: nauc_map_at_5_max value: 12.606200000000001 - type: nauc_map_at_5_std value: -14.846200000000001 - type: nauc_map_at_5_diff1 value: 38.5015 - type: nauc_map_at_10_max value: 12.5202 - type: nauc_map_at_10_std value: -14.5979 - type: nauc_map_at_10_diff1 value: 37.8521 - type: nauc_map_at_20_max value: 12.5101 - type: nauc_map_at_20_std value: -14.444899999999999 - type: nauc_map_at_20_diff1 value: 37.5942 - type: nauc_map_at_100_max value: 12.601399999999998 - type: nauc_map_at_100_std value: -14.2092 - type: nauc_map_at_100_diff1 value: 37.4992 - type: nauc_map_at_1000_max value: 12.6334 - type: nauc_map_at_1000_std value: -14.1545 - type: nauc_map_at_1000_diff1 value: 37.4959 - type: nauc_recall_at_1_max value: 14.524799999999999 - type: nauc_recall_at_1_std value: -14.704500000000001 - type: nauc_recall_at_1_diff1 value: 45.3337 - type: nauc_recall_at_3_max value: 11.0823 - type: nauc_recall_at_3_std value: -15.088899999999999 - type: nauc_recall_at_3_diff1 value: 33.5456 - type: nauc_recall_at_5_max value: 10.5617 - type: nauc_recall_at_5_std value: -14.8289 - type: nauc_recall_at_5_diff1 value: 31.3732 - type: nauc_recall_at_10_max value: 10.4061 - type: nauc_recall_at_10_std value: -13.3346 - type: nauc_recall_at_10_diff1 value: 28.131099999999996 - type: nauc_recall_at_20_max value: 10.2817 - type: nauc_recall_at_20_std value: -11.5314 - type: nauc_recall_at_20_diff1 value: 25.3998 - type: nauc_recall_at_100_max value: 13.818 - type: nauc_recall_at_100_std value: -2.6188 - type: nauc_recall_at_100_diff1 value: 22.0747 - type: nauc_recall_at_1000_max value: 21.893099999999997 - type: nauc_recall_at_1000_std value: 16.1546 - type: nauc_recall_at_1000_diff1 value: 15.1476 - type: nauc_precision_at_1_max value: 14.524799999999999 - type: nauc_precision_at_1_std value: -14.704500000000001 - type: nauc_precision_at_1_diff1 value: 45.3337 - type: nauc_precision_at_3_max value: 11.0823 - type: nauc_precision_at_3_std value: -15.088899999999999 - type: nauc_precision_at_3_diff1 value: 33.5456 - type: nauc_precision_at_5_max value: 10.5617 - type: nauc_precision_at_5_std value: -14.8289 - type: nauc_precision_at_5_diff1 value: 31.3732 - type: nauc_precision_at_10_max value: 10.4061 - type: nauc_precision_at_10_std value: -13.3346 - type: nauc_precision_at_10_diff1 value: 28.131099999999996 - type: nauc_precision_at_20_max value: 10.2817 - type: nauc_precision_at_20_std value: -11.5314 - type: nauc_precision_at_20_diff1 value: 25.3998 - type: nauc_precision_at_100_max value: 13.818 - type: nauc_precision_at_100_std value: -2.6188 - type: nauc_precision_at_100_diff1 value: 22.0747 - type: nauc_precision_at_1000_max value: 21.893099999999997 - type: nauc_precision_at_1000_std value: 16.1546 - type: nauc_precision_at_1000_diff1 value: 15.1476 - type: nauc_mrr_at_1_max value: 14.524799999999999 - type: nauc_mrr_at_1_std value: -14.704500000000001 - type: nauc_mrr_at_1_diff1 value: 45.3337 - type: nauc_mrr_at_3_max value: 12.7833 - type: nauc_mrr_at_3_std value: -14.9312 - type: nauc_mrr_at_3_diff1 value: 39.2273 - type: nauc_mrr_at_5_max value: 12.606200000000001 - type: nauc_mrr_at_5_std value: -14.846200000000001 - type: nauc_mrr_at_5_diff1 value: 38.5015 - type: nauc_mrr_at_10_max value: 12.5202 - type: nauc_mrr_at_10_std value: -14.5979 - type: nauc_mrr_at_10_diff1 value: 37.8521 - type: nauc_mrr_at_20_max value: 12.5101 - type: nauc_mrr_at_20_std value: -14.444899999999999 - type: nauc_mrr_at_20_diff1 value: 37.5942 - type: nauc_mrr_at_100_max value: 12.601399999999998 - type: nauc_mrr_at_100_std value: -14.2092 - type: nauc_mrr_at_100_diff1 value: 37.4992 - type: nauc_mrr_at_1000_max value: 12.6334 - type: nauc_mrr_at_1000_std value: -14.1545 - type: nauc_mrr_at_1000_diff1 value: 37.4959 - type: main_score value: 23.458000000000002 - task: type: Retrieval dataset: name: MTEB CodeFeedbackST (default) type: CoIR-Retrieval/codefeedback-st config: default split: test revision: d213819e87aab9010628da8b73ab4eb337c89340 metrics: - type: ndcg_at_1 value: 42.781000000000006 - type: ndcg_at_3 value: 53.547999999999995 - type: ndcg_at_5 value: 56.184999999999995 - type: ndcg_at_10 value: 58.455 - type: ndcg_at_20 value: 59.897 - type: ndcg_at_100 value: 61.806000000000004 - type: ndcg_at_1000 value: 62.769 - type: map_at_1 value: 42.781000000000006 - type: map_at_3 value: 50.92100000000001 - type: map_at_5 value: 52.38699999999999 - type: map_at_10 value: 53.335 - type: map_at_20 value: 53.733 - type: map_at_100 value: 53.998999999999995 - type: map_at_1000 value: 54.035 - type: recall_at_1 value: 42.781000000000006 - type: recall_at_3 value: 61.141999999999996 - type: recall_at_5 value: 67.533 - type: recall_at_10 value: 74.491 - type: recall_at_20 value: 80.17999999999999 - type: recall_at_100 value: 90.443 - type: recall_at_1000 value: 98.051 - type: precision_at_1 value: 42.781000000000006 - type: precision_at_3 value: 20.381 - type: precision_at_5 value: 13.507 - type: precision_at_10 value: 7.449 - type: precision_at_20 value: 4.009 - type: precision_at_100 value: 0.9039999999999999 - type: precision_at_1000 value: 0.098 - type: mrr_at_1 value: 42.8193 - type: mrr_at_3 value: 50.9333 - type: mrr_at_5 value: 52.4157 - type: mrr_at_10 value: 53.3551 - type: mrr_at_20 value: 53.7532 - type: mrr_at_100 value: 54.0192 - type: mrr_at_1000 value: 54.0547 - type: nauc_ndcg_at_1_max value: 8.1476 - type: nauc_ndcg_at_1_std value: -18.415599999999998 - type: nauc_ndcg_at_1_diff1 value: 61.467499999999994 - type: nauc_ndcg_at_3_max value: 14.5702 - type: nauc_ndcg_at_3_std value: -18.4765 - type: nauc_ndcg_at_3_diff1 value: 54.7928 - type: nauc_ndcg_at_5_max value: 15.2642 - type: nauc_ndcg_at_5_std value: -18.2014 - type: nauc_ndcg_at_5_diff1 value: 53.9847 - type: nauc_ndcg_at_10_max value: 15.0742 - type: nauc_ndcg_at_10_std value: -17.8811 - type: nauc_ndcg_at_10_diff1 value: 53.9565 - type: nauc_ndcg_at_20_max value: 14.7067 - type: nauc_ndcg_at_20_std value: -17.618000000000002 - type: nauc_ndcg_at_20_diff1 value: 54.041399999999996 - type: nauc_ndcg_at_100_max value: 14.4373 - type: nauc_ndcg_at_100_std value: -17.1309 - type: nauc_ndcg_at_100_diff1 value: 54.5959 - type: nauc_ndcg_at_1000_max value: 14.1768 - type: nauc_ndcg_at_1000_std value: -17.2829 - type: nauc_ndcg_at_1000_diff1 value: 55.053799999999995 - type: nauc_map_at_1_max value: 8.1476 - type: nauc_map_at_1_std value: -18.415599999999998 - type: nauc_map_at_1_diff1 value: 61.467499999999994 - type: nauc_map_at_3_max value: 12.961400000000001 - type: nauc_map_at_3_std value: -18.4454 - type: nauc_map_at_3_diff1 value: 56.42 - type: nauc_map_at_5_max value: 13.295599999999999 - type: nauc_map_at_5_std value: -18.293599999999998 - type: nauc_map_at_5_diff1 value: 56.033 - type: nauc_map_at_10_max value: 13.189600000000002 - type: nauc_map_at_10_std value: -18.169 - type: nauc_map_at_10_diff1 value: 56.0467 - type: nauc_map_at_20_max value: 13.0847 - type: nauc_map_at_20_std value: -18.1099 - type: nauc_map_at_20_diff1 value: 56.0909 - type: nauc_map_at_100_max value: 13.0384 - type: nauc_map_at_100_std value: -18.0582 - type: nauc_map_at_100_diff1 value: 56.1735 - type: nauc_map_at_1000_max value: 13.03 - type: nauc_map_at_1000_std value: -18.0598 - type: nauc_map_at_1000_diff1 value: 56.1901 - type: nauc_recall_at_1_max value: 8.1476 - type: nauc_recall_at_1_std value: -18.415599999999998 - type: nauc_recall_at_1_diff1 value: 61.467499999999994 - type: nauc_recall_at_3_max value: 19.6416 - type: nauc_recall_at_3_std value: -18.582099999999997 - type: nauc_recall_at_3_diff1 value: 49.6845 - type: nauc_recall_at_5_max value: 22.2239 - type: nauc_recall_at_5_std value: -17.847099999999998 - type: nauc_recall_at_5_diff1 value: 46.739999999999995 - type: nauc_recall_at_10_max value: 22.8276 - type: nauc_recall_at_10_std value: -16.486600000000003 - type: nauc_recall_at_10_diff1 value: 45.2586 - type: nauc_recall_at_20_max value: 22.2364 - type: nauc_recall_at_20_std value: -14.5036 - type: nauc_recall_at_20_diff1 value: 43.7903 - type: nauc_recall_at_100_max value: 25.254700000000003 - type: nauc_recall_at_100_std value: -3.9357 - type: nauc_recall_at_100_diff1 value: 42.6367 - type: nauc_recall_at_1000_max value: 38.3787 - type: nauc_recall_at_1000_std value: 27.075100000000003 - type: nauc_recall_at_1000_diff1 value: 44.277 - type: nauc_precision_at_1_max value: 8.1476 - type: nauc_precision_at_1_std value: -18.415599999999998 - type: nauc_precision_at_1_diff1 value: 61.467499999999994 - type: nauc_precision_at_3_max value: 19.6416 - type: nauc_precision_at_3_std value: -18.582099999999997 - type: nauc_precision_at_3_diff1 value: 49.6845 - type: nauc_precision_at_5_max value: 22.2239 - type: nauc_precision_at_5_std value: -17.847099999999998 - type: nauc_precision_at_5_diff1 value: 46.739999999999995 - type: nauc_precision_at_10_max value: 22.8276 - type: nauc_precision_at_10_std value: -16.486600000000003 - type: nauc_precision_at_10_diff1 value: 45.2586 - type: nauc_precision_at_20_max value: 22.2364 - type: nauc_precision_at_20_std value: -14.5036 - type: nauc_precision_at_20_diff1 value: 43.7903 - type: nauc_precision_at_100_max value: 25.254700000000003 - type: nauc_precision_at_100_std value: -3.9357 - type: nauc_precision_at_100_diff1 value: 42.6367 - type: nauc_precision_at_1000_max value: 38.3787 - type: nauc_precision_at_1000_std value: 27.075100000000003 - type: nauc_precision_at_1000_diff1 value: 44.277 - type: nauc_mrr_at_1_max value: 7.7336 - type: nauc_mrr_at_1_std value: -18.2617 - type: nauc_mrr_at_1_diff1 value: 61.3653 - type: nauc_mrr_at_3_max value: 12.6751 - type: nauc_mrr_at_3_std value: -18.3779 - type: nauc_mrr_at_3_diff1 value: 56.383 - type: nauc_mrr_at_5_max value: 13.031200000000002 - type: nauc_mrr_at_5_std value: -18.252499999999998 - type: nauc_mrr_at_5_diff1 value: 55.9734 - type: nauc_mrr_at_10_max value: 12.934399999999998 - type: nauc_mrr_at_10_std value: -18.0918 - type: nauc_mrr_at_10_diff1 value: 55.9883 - type: nauc_mrr_at_20_max value: 12.8271 - type: nauc_mrr_at_20_std value: -18.0345 - type: nauc_mrr_at_20_diff1 value: 56.033100000000005 - type: nauc_mrr_at_100_max value: 12.7798 - type: nauc_mrr_at_100_std value: -17.983 - type: nauc_mrr_at_100_diff1 value: 56.115700000000004 - type: nauc_mrr_at_1000_max value: 12.771099999999999 - type: nauc_mrr_at_1000_std value: -17.9844 - type: nauc_mrr_at_1000_diff1 value: 56.1323 - type: main_score value: 58.455 - task: type: Retrieval dataset: name: MTEB CodeSearchNetCCRetrieval (python) type: CoIR-Retrieval/CodeSearchNet-ccr config: python split: test revision: 6e1effa2c03723c5fde48ee912b5ee08d4f211e8 metrics: - type: ndcg_at_1 value: 29.32 - type: ndcg_at_3 value: 37.202 - type: ndcg_at_5 value: 39.399 - type: ndcg_at_10 value: 41.583 - type: ndcg_at_20 value: 43.156 - type: ndcg_at_100 value: 45.506 - type: ndcg_at_1000 value: 47.28 - type: map_at_1 value: 29.32 - type: map_at_3 value: 35.266999999999996 - type: map_at_5 value: 36.489 - type: map_at_10 value: 37.399 - type: map_at_20 value: 37.829 - type: map_at_100 value: 38.149 - type: map_at_1000 value: 38.208 - type: recall_at_1 value: 29.32 - type: recall_at_3 value: 42.801 - type: recall_at_5 value: 48.123 - type: recall_at_10 value: 54.82599999999999 - type: recall_at_20 value: 61.06700000000001 - type: recall_at_100 value: 73.817 - type: recall_at_1000 value: 88.189 - type: precision_at_1 value: 29.32 - type: precision_at_3 value: 14.267 - type: precision_at_5 value: 9.625 - type: precision_at_10 value: 5.483 - type: precision_at_20 value: 3.053 - type: precision_at_100 value: 0.738 - type: precision_at_1000 value: 0.08800000000000001 - type: mrr_at_1 value: 29.3203 - type: mrr_at_3 value: 35.2661 - type: mrr_at_5 value: 36.4878 - type: mrr_at_10 value: 37.398399999999995 - type: mrr_at_20 value: 37.8278 - type: mrr_at_100 value: 38.1474 - type: mrr_at_1000 value: 38.2072 - type: nauc_ndcg_at_1_max value: 27.608 - type: nauc_ndcg_at_1_std value: -0.1578 - type: nauc_ndcg_at_1_diff1 value: 50.7213 - type: nauc_ndcg_at_3_max value: 28.488799999999998 - type: nauc_ndcg_at_3_std value: 0.8798 - type: nauc_ndcg_at_3_diff1 value: 46.4513 - type: nauc_ndcg_at_5_max value: 28.2088 - type: nauc_ndcg_at_5_std value: 1.536 - type: nauc_ndcg_at_5_diff1 value: 45.5291 - type: nauc_ndcg_at_10_max value: 28.076600000000003 - type: nauc_ndcg_at_10_std value: 2.4101999999999997 - type: nauc_ndcg_at_10_diff1 value: 45.0789 - type: nauc_ndcg_at_20_max value: 28.1814 - type: nauc_ndcg_at_20_std value: 3.1981999999999995 - type: nauc_ndcg_at_20_diff1 value: 44.8012 - type: nauc_ndcg_at_100_max value: 27.9818 - type: nauc_ndcg_at_100_std value: 3.8790999999999998 - type: nauc_ndcg_at_100_diff1 value: 44.7506 - type: nauc_ndcg_at_1000_max value: 28.1483 - type: nauc_ndcg_at_1000_std value: 3.8562 - type: nauc_ndcg_at_1000_diff1 value: 45.1726 - type: nauc_map_at_1_max value: 27.608 - type: nauc_map_at_1_std value: -0.1578 - type: nauc_map_at_1_diff1 value: 50.7213 - type: nauc_map_at_3_max value: 28.3097 - type: nauc_map_at_3_std value: 0.6224000000000001 - type: nauc_map_at_3_diff1 value: 47.4366 - type: nauc_map_at_5_max value: 28.157500000000002 - type: nauc_map_at_5_std value: 0.9838 - type: nauc_map_at_5_diff1 value: 46.9294 - type: nauc_map_at_10_max value: 28.097 - type: nauc_map_at_10_std value: 1.3426 - type: nauc_map_at_10_diff1 value: 46.7574 - type: nauc_map_at_20_max value: 28.124100000000002 - type: nauc_map_at_20_std value: 1.5459 - type: nauc_map_at_20_diff1 value: 46.6828 - type: nauc_map_at_100_max value: 28.0887 - type: nauc_map_at_100_std value: 1.6311 - type: nauc_map_at_100_diff1 value: 46.684599999999996 - type: nauc_map_at_1000_max value: 28.0938 - type: nauc_map_at_1000_std value: 1.6345999999999998 - type: nauc_map_at_1000_diff1 value: 46.6979 - type: nauc_recall_at_1_max value: 27.608 - type: nauc_recall_at_1_std value: -0.1578 - type: nauc_recall_at_1_diff1 value: 50.7213 - type: nauc_recall_at_3_max value: 28.982000000000003 - type: nauc_recall_at_3_std value: 1.6101 - type: nauc_recall_at_3_diff1 value: 43.6847 - type: nauc_recall_at_5_max value: 28.297800000000002 - type: nauc_recall_at_5_std value: 3.2162 - type: nauc_recall_at_5_diff1 value: 41.402899999999995 - type: nauc_recall_at_10_max value: 27.915499999999998 - type: nauc_recall_at_10_std value: 6.0788 - type: nauc_recall_at_10_diff1 value: 39.7106 - type: nauc_recall_at_20_max value: 28.3661 - type: nauc_recall_at_20_std value: 9.8068 - type: nauc_recall_at_20_diff1 value: 38.153 - type: nauc_recall_at_100_max value: 27.114300000000004 - type: nauc_recall_at_100_std value: 17.0125 - type: nauc_recall_at_100_diff1 value: 35.6053 - type: nauc_recall_at_1000_max value: 29.8655 - type: nauc_recall_at_1000_std value: 28.480800000000002 - type: nauc_recall_at_1000_diff1 value: 35.9375 - type: nauc_precision_at_1_max value: 27.608 - type: nauc_precision_at_1_std value: -0.1578 - type: nauc_precision_at_1_diff1 value: 50.7213 - type: nauc_precision_at_3_max value: 28.982000000000003 - type: nauc_precision_at_3_std value: 1.6101 - type: nauc_precision_at_3_diff1 value: 43.6847 - type: nauc_precision_at_5_max value: 28.297800000000002 - type: nauc_precision_at_5_std value: 3.2162 - type: nauc_precision_at_5_diff1 value: 41.402899999999995 - type: nauc_precision_at_10_max value: 27.915499999999998 - type: nauc_precision_at_10_std value: 6.0788 - type: nauc_precision_at_10_diff1 value: 39.7106 - type: nauc_precision_at_20_max value: 28.3661 - type: nauc_precision_at_20_std value: 9.8068 - type: nauc_precision_at_20_diff1 value: 38.153 - type: nauc_precision_at_100_max value: 27.114300000000004 - type: nauc_precision_at_100_std value: 17.0125 - type: nauc_precision_at_100_diff1 value: 35.6053 - type: nauc_precision_at_1000_max value: 29.8655 - type: nauc_precision_at_1000_std value: 28.480800000000002 - type: nauc_precision_at_1000_diff1 value: 35.9375 - type: nauc_mrr_at_1_max value: 27.608 - type: nauc_mrr_at_1_std value: -0.1578 - type: nauc_mrr_at_1_diff1 value: 50.7213 - type: nauc_mrr_at_3_max value: 28.310200000000002 - type: nauc_mrr_at_3_std value: 0.6187 - type: nauc_mrr_at_3_diff1 value: 47.4396 - type: nauc_mrr_at_5_max value: 28.1581 - type: nauc_mrr_at_5_std value: 0.9801000000000001 - type: nauc_mrr_at_5_diff1 value: 46.9325 - type: nauc_mrr_at_10_max value: 28.097499999999997 - type: nauc_mrr_at_10_std value: 1.3393 - type: nauc_mrr_at_10_diff1 value: 46.760600000000004 - type: nauc_mrr_at_20_max value: 28.124700000000004 - type: nauc_mrr_at_20_std value: 1.5427 - type: nauc_mrr_at_20_diff1 value: 46.686 - type: nauc_mrr_at_100_max value: 28.0893 - type: nauc_mrr_at_100_std value: 1.6274 - type: nauc_mrr_at_100_diff1 value: 46.6879 - type: nauc_mrr_at_1000_max value: 28.0943 - type: nauc_mrr_at_1000_std value: 1.6312 - type: nauc_mrr_at_1000_diff1 value: 46.7012 - type: main_score value: 41.583 - task: type: Retrieval dataset: name: MTEB CodeSearchNetCCRetrieval (javascript) type: CoIR-Retrieval/CodeSearchNet-ccr config: javascript split: test revision: 6e1effa2c03723c5fde48ee912b5ee08d4f211e8 metrics: - type: ndcg_at_1 value: 30.294999999999998 - type: ndcg_at_3 value: 38.102000000000004 - type: ndcg_at_5 value: 40.164 - type: ndcg_at_10 value: 42.041000000000004 - type: ndcg_at_20 value: 43.464000000000006 - type: ndcg_at_100 value: 45.791 - type: ndcg_at_1000 value: 47.689 - type: map_at_1 value: 30.294999999999998 - type: map_at_3 value: 36.19 - type: map_at_5 value: 37.332 - type: map_at_10 value: 38.112 - type: map_at_20 value: 38.507999999999996 - type: map_at_100 value: 38.812999999999995 - type: map_at_1000 value: 38.875 - type: recall_at_1 value: 30.294999999999998 - type: recall_at_3 value: 43.634 - type: recall_at_5 value: 48.648 - type: recall_at_10 value: 54.421 - type: recall_at_20 value: 60.012 - type: recall_at_100 value: 72.80499999999999 - type: recall_at_1000 value: 88.271 - type: precision_at_1 value: 30.294999999999998 - type: precision_at_3 value: 14.545 - type: precision_at_5 value: 9.73 - type: precision_at_10 value: 5.442 - type: precision_at_20 value: 3.001 - type: precision_at_100 value: 0.728 - type: precision_at_1000 value: 0.08800000000000001 - type: mrr_at_1 value: 30.294700000000002 - type: mrr_at_3 value: 36.1845 - type: mrr_at_5 value: 37.3271 - type: mrr_at_10 value: 38.1071 - type: mrr_at_20 value: 38.502700000000004 - type: mrr_at_100 value: 38.8081 - type: mrr_at_1000 value: 38.8701 - type: nauc_ndcg_at_1_max value: 26.3264 - type: nauc_ndcg_at_1_std value: -4.8982 - type: nauc_ndcg_at_1_diff1 value: 50.14189999999999 - type: nauc_ndcg_at_3_max value: 27.4968 - type: nauc_ndcg_at_3_std value: -4.1065 - type: nauc_ndcg_at_3_diff1 value: 46.0956 - type: nauc_ndcg_at_5_max value: 28.409299999999998 - type: nauc_ndcg_at_5_std value: -3.7853 - type: nauc_ndcg_at_5_diff1 value: 45.6748 - type: nauc_ndcg_at_10_max value: 27.942 - type: nauc_ndcg_at_10_std value: -3.3216 - type: nauc_ndcg_at_10_diff1 value: 44.6236 - type: nauc_ndcg_at_20_max value: 27.47 - type: nauc_ndcg_at_20_std value: -3.1514 - type: nauc_ndcg_at_20_diff1 value: 44.74 - type: nauc_ndcg_at_100_max value: 27.4711 - type: nauc_ndcg_at_100_std value: -3.0054999999999996 - type: nauc_ndcg_at_100_diff1 value: 44.5073 - type: nauc_ndcg_at_1000_max value: 27.7016 - type: nauc_ndcg_at_1000_std value: -3.0528 - type: nauc_ndcg_at_1000_diff1 value: 44.8851 - type: nauc_map_at_1_max value: 26.3264 - type: nauc_map_at_1_std value: -4.8982 - type: nauc_map_at_1_diff1 value: 50.14189999999999 - type: nauc_map_at_3_max value: 27.279500000000002 - type: nauc_map_at_3_std value: -4.2798 - type: nauc_map_at_3_diff1 value: 47.0454 - type: nauc_map_at_5_max value: 27.776600000000002 - type: nauc_map_at_5_std value: -4.1068 - type: nauc_map_at_5_diff1 value: 46.8171 - type: nauc_map_at_10_max value: 27.589399999999998 - type: nauc_map_at_10_std value: -3.8844 - type: nauc_map_at_10_diff1 value: 46.4013 - type: nauc_map_at_20_max value: 27.455099999999998 - type: nauc_map_at_20_std value: -3.8475 - type: nauc_map_at_20_diff1 value: 46.4395 - type: nauc_map_at_100_max value: 27.470299999999998 - type: nauc_map_at_100_std value: -3.8240000000000003 - type: nauc_map_at_100_diff1 value: 46.4176 - type: nauc_map_at_1000_max value: 27.473 - type: nauc_map_at_1000_std value: -3.8289999999999997 - type: nauc_map_at_1000_diff1 value: 46.427 - type: nauc_recall_at_1_max value: 26.3264 - type: nauc_recall_at_1_std value: -4.8982 - type: nauc_recall_at_1_diff1 value: 50.14189999999999 - type: nauc_recall_at_3_max value: 28.089599999999997 - type: nauc_recall_at_3_std value: -3.6197 - type: nauc_recall_at_3_diff1 value: 43.4007 - type: nauc_recall_at_5_max value: 30.3494 - type: nauc_recall_at_5_std value: -2.8134 - type: nauc_recall_at_5_diff1 value: 42.3267 - type: nauc_recall_at_10_max value: 28.9106 - type: nauc_recall_at_10_std value: -1.4690999999999999 - type: nauc_recall_at_10_diff1 value: 38.7477 - type: nauc_recall_at_20_max value: 27.0023 - type: nauc_recall_at_20_std value: -0.5613 - type: nauc_recall_at_20_diff1 value: 38.874700000000004 - type: nauc_recall_at_100_max value: 26.4945 - type: nauc_recall_at_100_std value: 1.1353 - type: nauc_recall_at_100_diff1 value: 35.297200000000004 - type: nauc_recall_at_1000_max value: 31.464100000000002 - type: nauc_recall_at_1000_std value: 5.16 - type: nauc_recall_at_1000_diff1 value: 34.5536 - type: nauc_precision_at_1_max value: 26.3264 - type: nauc_precision_at_1_std value: -4.8982 - type: nauc_precision_at_1_diff1 value: 50.14189999999999 - type: nauc_precision_at_3_max value: 28.089599999999997 - type: nauc_precision_at_3_std value: -3.6197 - type: nauc_precision_at_3_diff1 value: 43.4007 - type: nauc_precision_at_5_max value: 30.3494 - type: nauc_precision_at_5_std value: -2.8134 - type: nauc_precision_at_5_diff1 value: 42.3267 - type: nauc_precision_at_10_max value: 28.9106 - type: nauc_precision_at_10_std value: -1.4690999999999999 - type: nauc_precision_at_10_diff1 value: 38.7477 - type: nauc_precision_at_20_max value: 27.0023 - type: nauc_precision_at_20_std value: -0.5613 - type: nauc_precision_at_20_diff1 value: 38.874700000000004 - type: nauc_precision_at_100_max value: 26.4945 - type: nauc_precision_at_100_std value: 1.1353 - type: nauc_precision_at_100_diff1 value: 35.297200000000004 - type: nauc_precision_at_1000_max value: 31.464100000000002 - type: nauc_precision_at_1000_std value: 5.16 - type: nauc_precision_at_1000_diff1 value: 34.5536 - type: nauc_mrr_at_1_max value: 26.464199999999998 - type: nauc_mrr_at_1_std value: -4.6967 - type: nauc_mrr_at_1_diff1 value: 50.14189999999999 - type: nauc_mrr_at_3_max value: 27.3495 - type: nauc_mrr_at_3_std value: -4.1872 - type: nauc_mrr_at_3_diff1 value: 47.0534 - type: nauc_mrr_at_5_max value: 27.8469 - type: nauc_mrr_at_5_std value: -4.0137 - type: nauc_mrr_at_5_diff1 value: 46.8252 - type: nauc_mrr_at_10_max value: 27.660200000000003 - type: nauc_mrr_at_10_std value: -3.7907 - type: nauc_mrr_at_10_diff1 value: 46.4094 - type: nauc_mrr_at_20_max value: 27.526699999999998 - type: nauc_mrr_at_20_std value: -3.7532 - type: nauc_mrr_at_20_diff1 value: 46.448 - type: nauc_mrr_at_100_max value: 27.5422 - type: nauc_mrr_at_100_std value: -3.7292 - type: nauc_mrr_at_100_diff1 value: 46.4261 - type: nauc_mrr_at_1000_max value: 27.544999999999998 - type: nauc_mrr_at_1000_std value: -3.734 - type: nauc_mrr_at_1000_diff1 value: 46.435500000000005 - type: main_score value: 42.041000000000004 - task: type: Retrieval dataset: name: MTEB CodeSearchNetCCRetrieval (go) type: CoIR-Retrieval/CodeSearchNet-ccr config: go split: test revision: 6e1effa2c03723c5fde48ee912b5ee08d4f211e8 metrics: - type: ndcg_at_1 value: 23.615 - type: ndcg_at_3 value: 29.892999999999997 - type: ndcg_at_5 value: 31.953 - type: ndcg_at_10 value: 33.861000000000004 - type: ndcg_at_20 value: 35.402 - type: ndcg_at_100 value: 37.891000000000005 - type: ndcg_at_1000 value: 40.036 - type: map_at_1 value: 23.615 - type: map_at_3 value: 28.366999999999997 - type: map_at_5 value: 29.511 - type: map_at_10 value: 30.304 - type: map_at_20 value: 30.732 - type: map_at_100 value: 31.062 - type: map_at_1000 value: 31.133 - type: recall_at_1 value: 23.615 - type: recall_at_3 value: 34.302 - type: recall_at_5 value: 39.301 - type: recall_at_10 value: 45.174 - type: recall_at_20 value: 51.231 - type: recall_at_100 value: 64.849 - type: recall_at_1000 value: 82.307 - type: precision_at_1 value: 23.615 - type: precision_at_3 value: 11.434 - type: precision_at_5 value: 7.86 - type: precision_at_10 value: 4.517 - type: precision_at_20 value: 2.562 - type: precision_at_100 value: 0.6479999999999999 - type: precision_at_1000 value: 0.082 - type: mrr_at_1 value: 23.5902 - type: mrr_at_3 value: 28.353 - type: mrr_at_5 value: 29.4987 - type: mrr_at_10 value: 30.292099999999998 - type: mrr_at_20 value: 30.72 - type: mrr_at_100 value: 31.049599999999998 - type: mrr_at_1000 value: 31.120399999999997 - type: nauc_ndcg_at_1_max value: 29.1258 - type: nauc_ndcg_at_1_std value: 1.0401 - type: nauc_ndcg_at_1_diff1 value: 47.328199999999995 - type: nauc_ndcg_at_3_max value: 27.8848 - type: nauc_ndcg_at_3_std value: 0.2671 - type: nauc_ndcg_at_3_diff1 value: 41.4436 - type: nauc_ndcg_at_5_max value: 27.475300000000004 - type: nauc_ndcg_at_5_std value: -0.1773 - type: nauc_ndcg_at_5_diff1 value: 40.184999999999995 - type: nauc_ndcg_at_10_max value: 27.1682 - type: nauc_ndcg_at_10_std value: -0.0666 - type: nauc_ndcg_at_10_diff1 value: 39.698 - type: nauc_ndcg_at_20_max value: 26.822699999999998 - type: nauc_ndcg_at_20_std value: 0.3046 - type: nauc_ndcg_at_20_diff1 value: 39.0465 - type: nauc_ndcg_at_100_max value: 26.55 - type: nauc_ndcg_at_100_std value: 0.9386 - type: nauc_ndcg_at_100_diff1 value: 38.4816 - type: nauc_ndcg_at_1000_max value: 26.8464 - type: nauc_ndcg_at_1000_std value: 1.601 - type: nauc_ndcg_at_1000_diff1 value: 38.75 - type: nauc_map_at_1_max value: 29.1258 - type: nauc_map_at_1_std value: 1.0401 - type: nauc_map_at_1_diff1 value: 47.328199999999995 - type: nauc_map_at_3_max value: 28.1313 - type: nauc_map_at_3_std value: 0.4596 - type: nauc_map_at_3_diff1 value: 42.743199999999995 - type: nauc_map_at_5_max value: 27.91 - type: nauc_map_at_5_std value: 0.1926 - type: nauc_map_at_5_diff1 value: 42.0283 - type: nauc_map_at_10_max value: 27.7964 - type: nauc_map_at_10_std value: 0.2326 - type: nauc_map_at_10_diff1 value: 41.8324 - type: nauc_map_at_20_max value: 27.6958 - type: nauc_map_at_20_std value: 0.3369 - type: nauc_map_at_20_diff1 value: 41.6458 - type: nauc_map_at_100_max value: 27.6475 - type: nauc_map_at_100_std value: 0.4118 - type: nauc_map_at_100_diff1 value: 41.5667 - type: nauc_map_at_1000_max value: 27.654899999999998 - type: nauc_map_at_1000_std value: 0.43439999999999995 - type: nauc_map_at_1000_diff1 value: 41.578199999999995 - type: nauc_recall_at_1_max value: 29.1258 - type: nauc_recall_at_1_std value: 1.0401 - type: nauc_recall_at_1_diff1 value: 47.328199999999995 - type: nauc_recall_at_3_max value: 27.232200000000002 - type: nauc_recall_at_3_std value: -0.25980000000000003 - type: nauc_recall_at_3_diff1 value: 37.946200000000005 - type: nauc_recall_at_5_max value: 26.266000000000002 - type: nauc_recall_at_5_std value: -1.2084 - type: nauc_recall_at_5_diff1 value: 35.1318 - type: nauc_recall_at_10_max value: 25.2762 - type: nauc_recall_at_10_std value: -0.8635 - type: nauc_recall_at_10_diff1 value: 33.6001 - type: nauc_recall_at_20_max value: 23.9389 - type: nauc_recall_at_20_std value: 0.5331 - type: nauc_recall_at_20_diff1 value: 30.9907 - type: nauc_recall_at_100_max value: 21.9631 - type: nauc_recall_at_100_std value: 4.6604 - type: nauc_recall_at_100_diff1 value: 26.1225 - type: nauc_recall_at_1000_max value: 23.450699999999998 - type: nauc_recall_at_1000_std value: 17.0092 - type: nauc_recall_at_1000_diff1 value: 21.3813 - type: nauc_precision_at_1_max value: 29.1258 - type: nauc_precision_at_1_std value: 1.0401 - type: nauc_precision_at_1_diff1 value: 47.328199999999995 - type: nauc_precision_at_3_max value: 27.232200000000002 - type: nauc_precision_at_3_std value: -0.25980000000000003 - type: nauc_precision_at_3_diff1 value: 37.946200000000005 - type: nauc_precision_at_5_max value: 26.266000000000002 - type: nauc_precision_at_5_std value: -1.2084 - type: nauc_precision_at_5_diff1 value: 35.1318 - type: nauc_precision_at_10_max value: 25.2762 - type: nauc_precision_at_10_std value: -0.8635 - type: nauc_precision_at_10_diff1 value: 33.6001 - type: nauc_precision_at_20_max value: 23.9389 - type: nauc_precision_at_20_std value: 0.5331 - type: nauc_precision_at_20_diff1 value: 30.9907 - type: nauc_precision_at_100_max value: 21.9631 - type: nauc_precision_at_100_std value: 4.6604 - type: nauc_precision_at_100_diff1 value: 26.1225 - type: nauc_precision_at_1000_max value: 23.450699999999998 - type: nauc_precision_at_1000_std value: 17.0092 - type: nauc_precision_at_1000_diff1 value: 21.3813 - type: nauc_mrr_at_1_max value: 29.1731 - type: nauc_mrr_at_1_std value: 1.0801 - type: nauc_mrr_at_1_diff1 value: 47.429 - type: nauc_mrr_at_3_max value: 28.1768 - type: nauc_mrr_at_3_std value: 0.4919 - type: nauc_mrr_at_3_diff1 value: 42.830200000000005 - type: nauc_mrr_at_5_max value: 27.9396 - type: nauc_mrr_at_5_std value: 0.2168 - type: nauc_mrr_at_5_diff1 value: 42.0956 - type: nauc_mrr_at_10_max value: 27.8301 - type: nauc_mrr_at_10_std value: 0.2567 - type: nauc_mrr_at_10_diff1 value: 41.8926 - type: nauc_mrr_at_20_max value: 27.7297 - type: nauc_mrr_at_20_std value: 0.3648 - type: nauc_mrr_at_20_diff1 value: 41.7068 - type: nauc_mrr_at_100_max value: 27.6788 - type: nauc_mrr_at_100_std value: 0.43550000000000005 - type: nauc_mrr_at_100_diff1 value: 41.626000000000005 - type: nauc_mrr_at_1000_max value: 27.6876 - type: nauc_mrr_at_1000_std value: 0.4594 - type: nauc_mrr_at_1000_diff1 value: 41.6377 - type: main_score value: 33.861000000000004 - task: type: Retrieval dataset: name: MTEB CodeSearchNetCCRetrieval (ruby) type: CoIR-Retrieval/CodeSearchNet-ccr config: ruby split: test revision: 6e1effa2c03723c5fde48ee912b5ee08d4f211e8 metrics: - type: ndcg_at_1 value: 32.99 - type: ndcg_at_3 value: 40.416999999999994 - type: ndcg_at_5 value: 42.492000000000004 - type: ndcg_at_10 value: 44.528 - type: ndcg_at_20 value: 46.135999999999996 - type: ndcg_at_100 value: 48.33 - type: ndcg_at_1000 value: 50.047 - type: map_at_1 value: 32.99 - type: map_at_3 value: 38.647 - type: map_at_5 value: 39.789 - type: map_at_10 value: 40.62 - type: map_at_20 value: 41.062 - type: map_at_100 value: 41.366 - type: map_at_1000 value: 41.422 - type: recall_at_1 value: 32.99 - type: recall_at_3 value: 45.519 - type: recall_at_5 value: 50.595 - type: recall_at_10 value: 56.93899999999999 - type: recall_at_20 value: 63.283 - type: recall_at_100 value: 75.099 - type: recall_at_1000 value: 89.13600000000001 - type: precision_at_1 value: 32.99 - type: precision_at_3 value: 15.173 - type: precision_at_5 value: 10.119 - type: precision_at_10 value: 5.694 - type: precision_at_20 value: 3.164 - type: precision_at_100 value: 0.751 - type: precision_at_1000 value: 0.089 - type: mrr_at_1 value: 33.068999999999996 - type: mrr_at_3 value: 38.6862 - type: mrr_at_5 value: 39.8282 - type: mrr_at_10 value: 40.6593 - type: mrr_at_20 value: 41.1016 - type: mrr_at_100 value: 41.4058 - type: mrr_at_1000 value: 41.4614 - type: nauc_ndcg_at_1_max value: 34.985699999999994 - type: nauc_ndcg_at_1_std value: -7.5317 - type: nauc_ndcg_at_1_diff1 value: 55.82899999999999 - type: nauc_ndcg_at_3_max value: 34.3163 - type: nauc_ndcg_at_3_std value: -7.0863 - type: nauc_ndcg_at_3_diff1 value: 50.0509 - type: nauc_ndcg_at_5_max value: 33.7316 - type: nauc_ndcg_at_5_std value: -7.3946 - type: nauc_ndcg_at_5_diff1 value: 48.7525 - type: nauc_ndcg_at_10_max value: 34.6192 - type: nauc_ndcg_at_10_std value: -6.7839 - type: nauc_ndcg_at_10_diff1 value: 48.6166 - type: nauc_ndcg_at_20_max value: 34.334399999999995 - type: nauc_ndcg_at_20_std value: -7.0675 - type: nauc_ndcg_at_20_diff1 value: 48.0635 - type: nauc_ndcg_at_100_max value: 34.6406 - type: nauc_ndcg_at_100_std value: -6.8653 - type: nauc_ndcg_at_100_diff1 value: 48.617 - type: nauc_ndcg_at_1000_max value: 34.2365 - type: nauc_ndcg_at_1000_std value: -7.0976 - type: nauc_ndcg_at_1000_diff1 value: 48.464200000000005 - type: nauc_map_at_1_max value: 34.985699999999994 - type: nauc_map_at_1_std value: -7.5317 - type: nauc_map_at_1_diff1 value: 55.82899999999999 - type: nauc_map_at_3_max value: 34.577000000000005 - type: nauc_map_at_3_std value: -7.1427000000000005 - type: nauc_map_at_3_diff1 value: 51.4256 - type: nauc_map_at_5_max value: 34.2296 - type: nauc_map_at_5_std value: -7.322299999999999 - type: nauc_map_at_5_diff1 value: 50.709700000000005 - type: nauc_map_at_10_max value: 34.633900000000004 - type: nauc_map_at_10_std value: -7.056900000000001 - type: nauc_map_at_10_diff1 value: 50.714099999999995 - type: nauc_map_at_20_max value: 34.5386 - type: nauc_map_at_20_std value: -7.142900000000001 - type: nauc_map_at_20_diff1 value: 50.568900000000006 - type: nauc_map_at_100_max value: 34.5697 - type: nauc_map_at_100_std value: -7.1189 - type: nauc_map_at_100_diff1 value: 50.6351 - type: nauc_map_at_1000_max value: 34.558499999999995 - type: nauc_map_at_1000_std value: -7.1173 - type: nauc_map_at_1000_diff1 value: 50.6277 - type: nauc_recall_at_1_max value: 34.985699999999994 - type: nauc_recall_at_1_std value: -7.5317 - type: nauc_recall_at_1_diff1 value: 55.82899999999999 - type: nauc_recall_at_3_max value: 33.5265 - type: nauc_recall_at_3_std value: -6.9448 - type: nauc_recall_at_3_diff1 value: 46.1063 - type: nauc_recall_at_5_max value: 32.1817 - type: nauc_recall_at_5_std value: -7.6609 - type: nauc_recall_at_5_diff1 value: 42.8551 - type: nauc_recall_at_10_max value: 34.7502 - type: nauc_recall_at_10_std value: -5.7719 - type: nauc_recall_at_10_diff1 value: 41.7549 - type: nauc_recall_at_20_max value: 33.6546 - type: nauc_recall_at_20_std value: -6.862500000000001 - type: nauc_recall_at_20_diff1 value: 38.6947 - type: nauc_recall_at_100_max value: 36.095699999999994 - type: nauc_recall_at_100_std value: -5.2094000000000005 - type: nauc_recall_at_100_diff1 value: 40.336800000000004 - type: nauc_recall_at_1000_max value: 27.8549 - type: nauc_recall_at_1000_std value: -10.570699999999999 - type: nauc_recall_at_1000_diff1 value: 28.6812 - type: nauc_precision_at_1_max value: 34.985699999999994 - type: nauc_precision_at_1_std value: -7.5317 - type: nauc_precision_at_1_diff1 value: 55.82899999999999 - type: nauc_precision_at_3_max value: 33.5265 - type: nauc_precision_at_3_std value: -6.9448 - type: nauc_precision_at_3_diff1 value: 46.1063 - type: nauc_precision_at_5_max value: 32.1817 - type: nauc_precision_at_5_std value: -7.6609 - type: nauc_precision_at_5_diff1 value: 42.8551 - type: nauc_precision_at_10_max value: 34.7502 - type: nauc_precision_at_10_std value: -5.7719 - type: nauc_precision_at_10_diff1 value: 41.7549 - type: nauc_precision_at_20_max value: 33.6546 - type: nauc_precision_at_20_std value: -6.862500000000001 - type: nauc_precision_at_20_diff1 value: 38.6947 - type: nauc_precision_at_100_max value: 36.095699999999994 - type: nauc_precision_at_100_std value: -5.2094000000000005 - type: nauc_precision_at_100_diff1 value: 40.336800000000004 - type: nauc_precision_at_1000_max value: 27.8549 - type: nauc_precision_at_1000_std value: -10.570699999999999 - type: nauc_precision_at_1000_diff1 value: 28.6812 - type: nauc_mrr_at_1_max value: 35.099599999999995 - type: nauc_mrr_at_1_std value: -7.268199999999999 - type: nauc_mrr_at_1_diff1 value: 55.5813 - type: nauc_mrr_at_3_max value: 34.6335 - type: nauc_mrr_at_3_std value: -7.012300000000001 - type: nauc_mrr_at_3_diff1 value: 51.3038 - type: nauc_mrr_at_5_max value: 34.2864 - type: nauc_mrr_at_5_std value: -7.1912 - type: nauc_mrr_at_5_diff1 value: 50.5873 - type: nauc_mrr_at_10_max value: 34.6912 - type: nauc_mrr_at_10_std value: -6.9247000000000005 - type: nauc_mrr_at_10_diff1 value: 50.5908 - type: nauc_mrr_at_20_max value: 34.596199999999996 - type: nauc_mrr_at_20_std value: -7.01 - type: nauc_mrr_at_20_diff1 value: 50.4448 - type: nauc_mrr_at_100_max value: 34.6274 - type: nauc_mrr_at_100_std value: -6.984999999999999 - type: nauc_mrr_at_100_diff1 value: 50.510200000000005 - type: nauc_mrr_at_1000_max value: 34.6163 - type: nauc_mrr_at_1000_std value: -6.9832 - type: nauc_mrr_at_1000_diff1 value: 50.5026 - type: main_score value: 44.528 - task: type: Retrieval dataset: name: MTEB CodeSearchNetCCRetrieval (java) type: CoIR-Retrieval/CodeSearchNet-ccr config: java split: test revision: 6e1effa2c03723c5fde48ee912b5ee08d4f211e8 metrics: - type: ndcg_at_1 value: 26.407999999999998 - type: ndcg_at_3 value: 33.356 - type: ndcg_at_5 value: 35.143 - type: ndcg_at_10 value: 37.008 - type: ndcg_at_20 value: 38.394 - type: ndcg_at_100 value: 40.726 - type: ndcg_at_1000 value: 42.648 - type: map_at_1 value: 26.407999999999998 - type: map_at_3 value: 31.663000000000004 - type: map_at_5 value: 32.651 - type: map_at_10 value: 33.424 - type: map_at_20 value: 33.808 - type: map_at_100 value: 34.121 - type: map_at_1000 value: 34.184 - type: recall_at_1 value: 26.407999999999998 - type: recall_at_3 value: 38.247 - type: recall_at_5 value: 42.602000000000004 - type: recall_at_10 value: 48.352000000000004 - type: recall_at_20 value: 53.811 - type: recall_at_100 value: 66.508 - type: recall_at_1000 value: 82.173 - type: precision_at_1 value: 26.407999999999998 - type: precision_at_3 value: 12.748999999999999 - type: precision_at_5 value: 8.52 - type: precision_at_10 value: 4.835 - type: precision_at_20 value: 2.691 - type: precision_at_100 value: 0.6649999999999999 - type: precision_at_1000 value: 0.082 - type: mrr_at_1 value: 26.4263 - type: mrr_at_3 value: 31.673499999999997 - type: mrr_at_5 value: 32.6607 - type: mrr_at_10 value: 33.4314 - type: mrr_at_20 value: 33.8153 - type: mrr_at_100 value: 34.1293 - type: mrr_at_1000 value: 34.192499999999995 - type: nauc_ndcg_at_1_max value: 29.026600000000002 - type: nauc_ndcg_at_1_std value: -5.3401 - type: nauc_ndcg_at_1_diff1 value: 51.7505 - type: nauc_ndcg_at_3_max value: 30.0657 - type: nauc_ndcg_at_3_std value: -4.2413 - type: nauc_ndcg_at_3_diff1 value: 46.476600000000005 - type: nauc_ndcg_at_5_max value: 29.7155 - type: nauc_ndcg_at_5_std value: -3.8619 - type: nauc_ndcg_at_5_diff1 value: 45.5131 - type: nauc_ndcg_at_10_max value: 29.4459 - type: nauc_ndcg_at_10_std value: -3.3680000000000003 - type: nauc_ndcg_at_10_diff1 value: 44.6258 - type: nauc_ndcg_at_20_max value: 29.135499999999997 - type: nauc_ndcg_at_20_std value: -3.0517 - type: nauc_ndcg_at_20_diff1 value: 44.1 - type: nauc_ndcg_at_100_max value: 29.131400000000003 - type: nauc_ndcg_at_100_std value: -2.03 - type: nauc_ndcg_at_100_diff1 value: 43.7972 - type: nauc_ndcg_at_1000_max value: 29.285099999999996 - type: nauc_ndcg_at_1000_std value: -1.9141 - type: nauc_ndcg_at_1000_diff1 value: 44.1738 - type: nauc_map_at_1_max value: 29.026600000000002 - type: nauc_map_at_1_std value: -5.3401 - type: nauc_map_at_1_diff1 value: 51.7505 - type: nauc_map_at_3_max value: 29.8237 - type: nauc_map_at_3_std value: -4.5517 - type: nauc_map_at_3_diff1 value: 47.6757 - type: nauc_map_at_5_max value: 29.624200000000002 - type: nauc_map_at_5_std value: -4.338100000000001 - type: nauc_map_at_5_diff1 value: 47.1309 - type: nauc_map_at_10_max value: 29.5078 - type: nauc_map_at_10_std value: -4.1374 - type: nauc_map_at_10_diff1 value: 46.7589 - type: nauc_map_at_20_max value: 29.421000000000003 - type: nauc_map_at_20_std value: -4.0543000000000005 - type: nauc_map_at_20_diff1 value: 46.6131 - type: nauc_map_at_100_max value: 29.411199999999997 - type: nauc_map_at_100_std value: -3.9336 - type: nauc_map_at_100_diff1 value: 46.578199999999995 - type: nauc_map_at_1000_max value: 29.4134 - type: nauc_map_at_1000_std value: -3.9301000000000004 - type: nauc_map_at_1000_diff1 value: 46.5892 - type: nauc_recall_at_1_max value: 29.026600000000002 - type: nauc_recall_at_1_std value: -5.3401 - type: nauc_recall_at_1_diff1 value: 51.7505 - type: nauc_recall_at_3_max value: 30.7299 - type: nauc_recall_at_3_std value: -3.3682999999999996 - type: nauc_recall_at_3_diff1 value: 43.1978 - type: nauc_recall_at_5_max value: 29.9215 - type: nauc_recall_at_5_std value: -2.4814 - type: nauc_recall_at_5_diff1 value: 40.9532 - type: nauc_recall_at_10_max value: 29.1323 - type: nauc_recall_at_10_std value: -0.9436999999999999 - type: nauc_recall_at_10_diff1 value: 38.221199999999996 - type: nauc_recall_at_20_max value: 27.889999999999997 - type: nauc_recall_at_20_std value: 0.4464 - type: nauc_recall_at_20_diff1 value: 35.8795 - type: nauc_recall_at_100_max value: 27.8094 - type: nauc_recall_at_100_std value: 7.914499999999999 - type: nauc_recall_at_100_diff1 value: 32.3117 - type: nauc_recall_at_1000_max value: 29.6608 - type: nauc_recall_at_1000_std value: 15.9532 - type: nauc_recall_at_1000_diff1 value: 31.069799999999997 - type: nauc_precision_at_1_max value: 29.026600000000002 - type: nauc_precision_at_1_std value: -5.3401 - type: nauc_precision_at_1_diff1 value: 51.7505 - type: nauc_precision_at_3_max value: 30.7299 - type: nauc_precision_at_3_std value: -3.3682999999999996 - type: nauc_precision_at_3_diff1 value: 43.1978 - type: nauc_precision_at_5_max value: 29.9215 - type: nauc_precision_at_5_std value: -2.4814 - type: nauc_precision_at_5_diff1 value: 40.9532 - type: nauc_precision_at_10_max value: 29.1323 - type: nauc_precision_at_10_std value: -0.9436999999999999 - type: nauc_precision_at_10_diff1 value: 38.221199999999996 - type: nauc_precision_at_20_max value: 27.889999999999997 - type: nauc_precision_at_20_std value: 0.4464 - type: nauc_precision_at_20_diff1 value: 35.8795 - type: nauc_precision_at_100_max value: 27.8094 - type: nauc_precision_at_100_std value: 7.914499999999999 - type: nauc_precision_at_100_diff1 value: 32.3117 - type: nauc_precision_at_1000_max value: 29.6608 - type: nauc_precision_at_1000_std value: 15.9532 - type: nauc_precision_at_1000_diff1 value: 31.069799999999997 - type: nauc_mrr_at_1_max value: 29.0947 - type: nauc_mrr_at_1_std value: -5.2643 - type: nauc_mrr_at_1_diff1 value: 51.678000000000004 - type: nauc_mrr_at_3_max value: 29.8523 - type: nauc_mrr_at_3_std value: -4.5234000000000005 - type: nauc_mrr_at_3_diff1 value: 47.653099999999995 - type: nauc_mrr_at_5_max value: 29.648799999999998 - type: nauc_mrr_at_5_std value: -4.3013 - type: nauc_mrr_at_5_diff1 value: 47.105799999999995 - type: nauc_mrr_at_10_max value: 29.5336 - type: nauc_mrr_at_10_std value: -4.1075 - type: nauc_mrr_at_10_diff1 value: 46.733799999999995 - type: nauc_mrr_at_20_max value: 29.451500000000003 - type: nauc_mrr_at_20_std value: -4.0183 - type: nauc_mrr_at_20_diff1 value: 46.5858 - type: nauc_mrr_at_100_max value: 29.440699999999996 - type: nauc_mrr_at_100_std value: -3.8987000000000003 - type: nauc_mrr_at_100_diff1 value: 46.5526 - type: nauc_mrr_at_1000_max value: 29.442899999999998 - type: nauc_mrr_at_1000_std value: -3.8952 - type: nauc_mrr_at_1000_diff1 value: 46.563500000000005 - type: main_score value: 37.008 - task: type: Retrieval dataset: name: MTEB CodeSearchNetCCRetrieval (php) type: CoIR-Retrieval/CodeSearchNet-ccr config: php split: test revision: 6e1effa2c03723c5fde48ee912b5ee08d4f211e8 metrics: - type: ndcg_at_1 value: 21.022 - type: ndcg_at_3 value: 27.082 - type: ndcg_at_5 value: 28.956 - type: ndcg_at_10 value: 30.791 - type: ndcg_at_20 value: 32.301 - type: ndcg_at_100 value: 34.794000000000004 - type: ndcg_at_1000 value: 37.082 - type: map_at_1 value: 21.022 - type: map_at_3 value: 25.593 - type: map_at_5 value: 26.634999999999998 - type: map_at_10 value: 27.395000000000003 - type: map_at_20 value: 27.811000000000003 - type: map_at_100 value: 28.143 - type: map_at_1000 value: 28.218 - type: recall_at_1 value: 21.022 - type: recall_at_3 value: 31.39 - type: recall_at_5 value: 35.935 - type: recall_at_10 value: 41.593999999999994 - type: recall_at_20 value: 47.552 - type: recall_at_100 value: 61.18900000000001 - type: recall_at_1000 value: 79.827 - type: precision_at_1 value: 21.022 - type: precision_at_3 value: 10.463000000000001 - type: precision_at_5 value: 7.187 - type: precision_at_10 value: 4.159 - type: precision_at_20 value: 2.378 - type: precision_at_100 value: 0.612 - type: precision_at_1000 value: 0.08 - type: mrr_at_1 value: 21.0218 - type: mrr_at_3 value: 25.588699999999996 - type: mrr_at_5 value: 26.631899999999998 - type: mrr_at_10 value: 27.3915 - type: mrr_at_20 value: 27.807900000000004 - type: mrr_at_100 value: 28.138800000000003 - type: mrr_at_1000 value: 28.2141 - type: nauc_ndcg_at_1_max value: 22.1861 - type: nauc_ndcg_at_1_std value: -3.218 - type: nauc_ndcg_at_1_diff1 value: 46.4989 - type: nauc_ndcg_at_3_max value: 21.7282 - type: nauc_ndcg_at_3_std value: -2.1185 - type: nauc_ndcg_at_3_diff1 value: 40.8096 - type: nauc_ndcg_at_5_max value: 21.339199999999998 - type: nauc_ndcg_at_5_std value: -1.6541000000000001 - type: nauc_ndcg_at_5_diff1 value: 39.6483 - type: nauc_ndcg_at_10_max value: 20.9441 - type: nauc_ndcg_at_10_std value: -0.8141 - type: nauc_ndcg_at_10_diff1 value: 38.5517 - type: nauc_ndcg_at_20_max value: 20.7702 - type: nauc_ndcg_at_20_std value: -0.293 - type: nauc_ndcg_at_20_diff1 value: 38.2386 - type: nauc_ndcg_at_100_max value: 20.569100000000002 - type: nauc_ndcg_at_100_std value: 0.8404 - type: nauc_ndcg_at_100_diff1 value: 37.6899 - type: nauc_ndcg_at_1000_max value: 20.72 - type: nauc_ndcg_at_1000_std value: 0.9279000000000001 - type: nauc_ndcg_at_1000_diff1 value: 37.9486 - type: nauc_map_at_1_max value: 22.1861 - type: nauc_map_at_1_std value: -3.218 - type: nauc_map_at_1_diff1 value: 46.4989 - type: nauc_map_at_3_max value: 21.86 - type: nauc_map_at_3_std value: -2.4015999999999997 - type: nauc_map_at_3_diff1 value: 42.0695 - type: nauc_map_at_5_max value: 21.6404 - type: nauc_map_at_5_std value: -2.1305 - type: nauc_map_at_5_diff1 value: 41.3954 - type: nauc_map_at_10_max value: 21.4897 - type: nauc_map_at_10_std value: -1.76 - type: nauc_map_at_10_diff1 value: 40.9264 - type: nauc_map_at_20_max value: 21.4368 - type: nauc_map_at_20_std value: -1.6178000000000001 - type: nauc_map_at_20_diff1 value: 40.847 - type: nauc_map_at_100_max value: 21.3978 - type: nauc_map_at_100_std value: -1.4705 - type: nauc_map_at_100_diff1 value: 40.775 - type: nauc_map_at_1000_max value: 21.4068 - type: nauc_map_at_1000_std value: -1.4657 - type: nauc_map_at_1000_diff1 value: 40.7824 - type: nauc_recall_at_1_max value: 22.1861 - type: nauc_recall_at_1_std value: -3.218 - type: nauc_recall_at_1_diff1 value: 46.4989 - type: nauc_recall_at_3_max value: 21.3684 - type: nauc_recall_at_3_std value: -1.3554 - type: nauc_recall_at_3_diff1 value: 37.4804 - type: nauc_recall_at_5_max value: 20.4902 - type: nauc_recall_at_5_std value: -0.3449 - type: nauc_recall_at_5_diff1 value: 34.9587 - type: nauc_recall_at_10_max value: 19.2959 - type: nauc_recall_at_10_std value: 1.9666 - type: nauc_recall_at_10_diff1 value: 31.903 - type: nauc_recall_at_20_max value: 18.6516 - type: nauc_recall_at_20_std value: 3.9671 - type: nauc_recall_at_20_diff1 value: 30.576999999999998 - type: nauc_recall_at_100_max value: 17.383699999999997 - type: nauc_recall_at_100_std value: 11.050699999999999 - type: nauc_recall_at_100_diff1 value: 26.4222 - type: nauc_recall_at_1000_max value: 17.1265 - type: nauc_recall_at_1000_std value: 18.235699999999998 - type: nauc_recall_at_1000_diff1 value: 23.186300000000003 - type: nauc_precision_at_1_max value: 22.1861 - type: nauc_precision_at_1_std value: -3.218 - type: nauc_precision_at_1_diff1 value: 46.4989 - type: nauc_precision_at_3_max value: 21.3684 - type: nauc_precision_at_3_std value: -1.3554 - type: nauc_precision_at_3_diff1 value: 37.4804 - type: nauc_precision_at_5_max value: 20.4902 - type: nauc_precision_at_5_std value: -0.3449 - type: nauc_precision_at_5_diff1 value: 34.9587 - type: nauc_precision_at_10_max value: 19.2959 - type: nauc_precision_at_10_std value: 1.9666 - type: nauc_precision_at_10_diff1 value: 31.903 - type: nauc_precision_at_20_max value: 18.6516 - type: nauc_precision_at_20_std value: 3.9671 - type: nauc_precision_at_20_diff1 value: 30.576999999999998 - type: nauc_precision_at_100_max value: 17.383699999999997 - type: nauc_precision_at_100_std value: 11.050699999999999 - type: nauc_precision_at_100_diff1 value: 26.4222 - type: nauc_precision_at_1000_max value: 17.1265 - type: nauc_precision_at_1000_std value: 18.235699999999998 - type: nauc_precision_at_1000_diff1 value: 23.186300000000003 - type: nauc_mrr_at_1_max value: 22.159000000000002 - type: nauc_mrr_at_1_std value: -3.2346 - type: nauc_mrr_at_1_diff1 value: 46.4989 - type: nauc_mrr_at_3_max value: 21.8304 - type: nauc_mrr_at_3_std value: -2.4013 - type: nauc_mrr_at_3_diff1 value: 42.0356 - type: nauc_mrr_at_5_max value: 21.617900000000002 - type: nauc_mrr_at_5_std value: -2.1397 - type: nauc_mrr_at_5_diff1 value: 41.3793 - type: nauc_mrr_at_10_max value: 21.467200000000002 - type: nauc_mrr_at_10_std value: -1.7682 - type: nauc_mrr_at_10_diff1 value: 40.912 - type: nauc_mrr_at_20_max value: 21.415200000000002 - type: nauc_mrr_at_20_std value: -1.6295 - type: nauc_mrr_at_20_diff1 value: 40.8319 - type: nauc_mrr_at_100_max value: 21.376800000000003 - type: nauc_mrr_at_100_std value: -1.4815 - type: nauc_mrr_at_100_diff1 value: 40.760400000000004 - type: nauc_mrr_at_1000_max value: 21.3858 - type: nauc_mrr_at_1000_std value: -1.4767000000000001 - type: nauc_mrr_at_1000_diff1 value: 40.7677 - type: main_score value: 30.791 - task: type: Retrieval dataset: name: MTEB CodeSearchNetRetrieval (python) type: code-search-net/code_search_net config: python split: test revision: fdc6a9e39575768c27eb8a2a5f702bf846eb4759 metrics: - type: ndcg_at_1 value: 65.2 - type: ndcg_at_3 value: 76.41 - type: ndcg_at_5 value: 77.981 - type: ndcg_at_10 value: 79.044 - type: ndcg_at_20 value: 79.855 - type: ndcg_at_100 value: 80.622 - type: ndcg_at_1000 value: 80.806 - type: map_at_1 value: 65.2 - type: map_at_3 value: 73.65 - type: map_at_5 value: 74.52499999999999 - type: map_at_10 value: 74.98 - type: map_at_20 value: 75.203 - type: map_at_100 value: 75.319 - type: map_at_1000 value: 75.327 - type: recall_at_1 value: 65.2 - type: recall_at_3 value: 84.39999999999999 - type: recall_at_5 value: 88.2 - type: recall_at_10 value: 91.4 - type: recall_at_20 value: 94.6 - type: recall_at_100 value: 98.6 - type: recall_at_1000 value: 100.0 - type: precision_at_1 value: 65.2 - type: precision_at_3 value: 28.133000000000003 - type: precision_at_5 value: 17.64 - type: precision_at_10 value: 9.139999999999999 - type: precision_at_20 value: 4.73 - type: precision_at_100 value: 0.9860000000000001 - type: precision_at_1000 value: 0.1 - type: mrr_at_1 value: 65.2 - type: mrr_at_3 value: 73.65 - type: mrr_at_5 value: 74.52499999999999 - type: mrr_at_10 value: 74.9802 - type: mrr_at_20 value: 75.20320000000001 - type: mrr_at_100 value: 75.319 - type: mrr_at_1000 value: 75.3269 - type: nauc_ndcg_at_1_max value: 36.4698 - type: nauc_ndcg_at_1_std value: -10.8058 - type: nauc_ndcg_at_1_diff1 value: 70.5679 - type: nauc_ndcg_at_3_max value: 40.582499999999996 - type: nauc_ndcg_at_3_std value: -9.3767 - type: nauc_ndcg_at_3_diff1 value: 64.8235 - type: nauc_ndcg_at_5_max value: 41.191100000000006 - type: nauc_ndcg_at_5_std value: -8.6758 - type: nauc_ndcg_at_5_diff1 value: 64.70179999999999 - type: nauc_ndcg_at_10_max value: 41.5913 - type: nauc_ndcg_at_10_std value: -8.8502 - type: nauc_ndcg_at_10_diff1 value: 65.7197 - type: nauc_ndcg_at_20_max value: 41.4419 - type: nauc_ndcg_at_20_std value: -9.0406 - type: nauc_ndcg_at_20_diff1 value: 66.1819 - type: nauc_ndcg_at_100_max value: 40.6791 - type: nauc_ndcg_at_100_std value: -8.343499999999999 - type: nauc_ndcg_at_100_diff1 value: 66.468 - type: nauc_ndcg_at_1000_max value: 40.3153 - type: nauc_ndcg_at_1000_std value: -8.7689 - type: nauc_ndcg_at_1000_diff1 value: 66.49249999999999 - type: nauc_map_at_1_max value: 36.4698 - type: nauc_map_at_1_std value: -10.8058 - type: nauc_map_at_1_diff1 value: 70.5679 - type: nauc_map_at_3_max value: 39.3299 - type: nauc_map_at_3_std value: -9.4675 - type: nauc_map_at_3_diff1 value: 66.3583 - type: nauc_map_at_5_max value: 39.5636 - type: nauc_map_at_5_std value: -9.1881 - type: nauc_map_at_5_diff1 value: 66.37910000000001 - type: nauc_map_at_10_max value: 39.6806 - type: nauc_map_at_10_std value: -9.3088 - type: nauc_map_at_10_diff1 value: 66.8131 - type: nauc_map_at_20_max value: 39.635999999999996 - type: nauc_map_at_20_std value: -9.3305 - type: nauc_map_at_20_diff1 value: 66.93430000000001 - type: nauc_map_at_100_max value: 39.536500000000004 - type: nauc_map_at_100_std value: -9.1873 - type: nauc_map_at_100_diff1 value: 66.96419999999999 - type: nauc_map_at_1000_max value: 39.5233 - type: nauc_map_at_1000_std value: -9.200999999999999 - type: nauc_map_at_1000_diff1 value: 66.9634 - type: nauc_recall_at_1_max value: 36.4698 - type: nauc_recall_at_1_std value: -10.8058 - type: nauc_recall_at_1_diff1 value: 70.5679 - type: nauc_recall_at_3_max value: 46.0932 - type: nauc_recall_at_3_std value: -9.193900000000001 - type: nauc_recall_at_3_diff1 value: 58.2067 - type: nauc_recall_at_5_max value: 50.2927 - type: nauc_recall_at_5_std value: -5.8297 - type: nauc_recall_at_5_diff1 value: 55.6113 - type: nauc_recall_at_10_max value: 55.961099999999995 - type: nauc_recall_at_10_std value: -5.3568999999999996 - type: nauc_recall_at_10_diff1 value: 58.6075 - type: nauc_recall_at_20_max value: 62.2869 - type: nauc_recall_at_20_std value: -6.4927 - type: nauc_recall_at_20_diff1 value: 60.207699999999996 - type: nauc_recall_at_100_max value: 73.4427 - type: nauc_recall_at_100_std value: 31.606 - type: nauc_recall_at_100_diff1 value: 63.0919 - type: nauc_recall_at_1000_max value: .nan - type: nauc_recall_at_1000_std value: .nan - type: nauc_recall_at_1000_diff1 value: .nan - type: nauc_precision_at_1_max value: 36.4698 - type: nauc_precision_at_1_std value: -10.8058 - type: nauc_precision_at_1_diff1 value: 70.5679 - type: nauc_precision_at_3_max value: 46.0932 - type: nauc_precision_at_3_std value: -9.193900000000001 - type: nauc_precision_at_3_diff1 value: 58.2067 - type: nauc_precision_at_5_max value: 50.2927 - type: nauc_precision_at_5_std value: -5.8297 - type: nauc_precision_at_5_diff1 value: 55.6113 - type: nauc_precision_at_10_max value: 55.961099999999995 - type: nauc_precision_at_10_std value: -5.3568999999999996 - type: nauc_precision_at_10_diff1 value: 58.6075 - type: nauc_precision_at_20_max value: 62.2869 - type: nauc_precision_at_20_std value: -6.4927 - type: nauc_precision_at_20_diff1 value: 60.207699999999996 - type: nauc_precision_at_100_max value: 73.4427 - type: nauc_precision_at_100_std value: 31.606 - type: nauc_precision_at_100_diff1 value: 63.0919 - type: nauc_precision_at_1000_max value: .nan - type: nauc_precision_at_1000_std value: .nan - type: nauc_precision_at_1000_diff1 value: .nan - type: nauc_mrr_at_1_max value: 36.4698 - type: nauc_mrr_at_1_std value: -10.8058 - type: nauc_mrr_at_1_diff1 value: 70.5679 - type: nauc_mrr_at_3_max value: 39.3299 - type: nauc_mrr_at_3_std value: -9.4675 - type: nauc_mrr_at_3_diff1 value: 66.3583 - type: nauc_mrr_at_5_max value: 39.5636 - type: nauc_mrr_at_5_std value: -9.1881 - type: nauc_mrr_at_5_diff1 value: 66.37910000000001 - type: nauc_mrr_at_10_max value: 39.6806 - type: nauc_mrr_at_10_std value: -9.3088 - type: nauc_mrr_at_10_diff1 value: 66.8131 - type: nauc_mrr_at_20_max value: 39.635999999999996 - type: nauc_mrr_at_20_std value: -9.3305 - type: nauc_mrr_at_20_diff1 value: 66.93430000000001 - type: nauc_mrr_at_100_max value: 39.536500000000004 - type: nauc_mrr_at_100_std value: -9.1873 - type: nauc_mrr_at_100_diff1 value: 66.96419999999999 - type: nauc_mrr_at_1000_max value: 39.5233 - type: nauc_mrr_at_1000_std value: -9.200999999999999 - type: nauc_mrr_at_1000_diff1 value: 66.9634 - type: main_score value: 79.044 - task: type: Retrieval dataset: name: MTEB CodeSearchNetRetrieval (javascript) type: code-search-net/code_search_net config: javascript split: test revision: fdc6a9e39575768c27eb8a2a5f702bf846eb4759 metrics: - type: ndcg_at_1 value: 55.2 - type: ndcg_at_3 value: 63.709 - type: ndcg_at_5 value: 65.267 - type: ndcg_at_10 value: 67.239 - type: ndcg_at_20 value: 68.372 - type: ndcg_at_100 value: 69.854 - type: ndcg_at_1000 value: 70.831 - type: map_at_1 value: 55.2 - type: map_at_3 value: 61.667 - type: map_at_5 value: 62.527 - type: map_at_10 value: 63.339999999999996 - type: map_at_20 value: 63.648 - type: map_at_100 value: 63.854 - type: map_at_1000 value: 63.885999999999996 - type: recall_at_1 value: 55.2 - type: recall_at_3 value: 69.6 - type: recall_at_5 value: 73.4 - type: recall_at_10 value: 79.5 - type: recall_at_20 value: 84.0 - type: recall_at_100 value: 92.0 - type: recall_at_1000 value: 100.0 - type: precision_at_1 value: 55.2 - type: precision_at_3 value: 23.200000000000003 - type: precision_at_5 value: 14.680000000000001 - type: precision_at_10 value: 7.95 - type: precision_at_20 value: 4.2 - type: precision_at_100 value: 0.9199999999999999 - type: precision_at_1000 value: 0.1 - type: mrr_at_1 value: 55.2 - type: mrr_at_3 value: 61.6667 - type: mrr_at_5 value: 62.526700000000005 - type: mrr_at_10 value: 63.339999999999996 - type: mrr_at_20 value: 63.6484 - type: mrr_at_100 value: 63.854200000000006 - type: mrr_at_1000 value: 63.88549999999999 - type: nauc_ndcg_at_1_max value: 48.821 - type: nauc_ndcg_at_1_std value: 19.6886 - type: nauc_ndcg_at_1_diff1 value: 65.515 - type: nauc_ndcg_at_3_max value: 56.316 - type: nauc_ndcg_at_3_std value: 26.6555 - type: nauc_ndcg_at_3_diff1 value: 61.755300000000005 - type: nauc_ndcg_at_5_max value: 57.566300000000005 - type: nauc_ndcg_at_5_std value: 29.5288 - type: nauc_ndcg_at_5_diff1 value: 61.655300000000004 - type: nauc_ndcg_at_10_max value: 58.89339999999999 - type: nauc_ndcg_at_10_std value: 32.1136 - type: nauc_ndcg_at_10_diff1 value: 61.7916 - type: nauc_ndcg_at_20_max value: 58.675999999999995 - type: nauc_ndcg_at_20_std value: 32.2575 - type: nauc_ndcg_at_20_diff1 value: 62.5682 - type: nauc_ndcg_at_100_max value: 57.6832 - type: nauc_ndcg_at_100_std value: 31.2476 - type: nauc_ndcg_at_100_diff1 value: 62.356100000000005 - type: nauc_ndcg_at_1000_max value: 56.9118 - type: nauc_ndcg_at_1000_std value: 29.624499999999998 - type: nauc_ndcg_at_1000_diff1 value: 62.4914 - type: nauc_map_at_1_max value: 48.821 - type: nauc_map_at_1_std value: 19.6886 - type: nauc_map_at_1_diff1 value: 65.515 - type: nauc_map_at_3_max value: 54.47260000000001 - type: nauc_map_at_3_std value: 24.864800000000002 - type: nauc_map_at_3_diff1 value: 62.6644 - type: nauc_map_at_5_max value: 55.1021 - type: nauc_map_at_5_std value: 26.2921 - type: nauc_map_at_5_diff1 value: 62.624100000000006 - type: nauc_map_at_10_max value: 55.552 - type: nauc_map_at_10_std value: 27.199 - type: nauc_map_at_10_diff1 value: 62.7054 - type: nauc_map_at_20_max value: 55.4708 - type: nauc_map_at_20_std value: 27.2067 - type: nauc_map_at_20_diff1 value: 62.8945 - type: nauc_map_at_100_max value: 55.3465 - type: nauc_map_at_100_std value: 27.0926 - type: nauc_map_at_100_diff1 value: 62.8575 - type: nauc_map_at_1000_max value: 55.3249 - type: nauc_map_at_1000_std value: 27.0527 - type: nauc_map_at_1000_diff1 value: 62.8617 - type: nauc_recall_at_1_max value: 48.821 - type: nauc_recall_at_1_std value: 19.6886 - type: nauc_recall_at_1_diff1 value: 65.515 - type: nauc_recall_at_3_max value: 62.36279999999999 - type: nauc_recall_at_3_std value: 32.569199999999995 - type: nauc_recall_at_3_diff1 value: 58.781499999999994 - type: nauc_recall_at_5_max value: 66.6246 - type: nauc_recall_at_5_std value: 41.813 - type: nauc_recall_at_5_diff1 value: 58.1854 - type: nauc_recall_at_10_max value: 74.4567 - type: nauc_recall_at_10_std value: 55.835 - type: nauc_recall_at_10_diff1 value: 57.89189999999999 - type: nauc_recall_at_20_max value: 76.9008 - type: nauc_recall_at_20_std value: 62.54110000000001 - type: nauc_recall_at_20_diff1 value: 62.200500000000005 - type: nauc_recall_at_100_max value: 76.46300000000001 - type: nauc_recall_at_100_std value: 71.4723 - type: nauc_recall_at_100_diff1 value: 59.0844 - type: nauc_recall_at_1000_max value: .nan - type: nauc_recall_at_1000_std value: .nan - type: nauc_recall_at_1000_diff1 value: .nan - type: nauc_precision_at_1_max value: 48.821 - type: nauc_precision_at_1_std value: 19.6886 - type: nauc_precision_at_1_diff1 value: 65.515 - type: nauc_precision_at_3_max value: 62.36279999999999 - type: nauc_precision_at_3_std value: 32.569199999999995 - type: nauc_precision_at_3_diff1 value: 58.781499999999994 - type: nauc_precision_at_5_max value: 66.6246 - type: nauc_precision_at_5_std value: 41.813 - type: nauc_precision_at_5_diff1 value: 58.1854 - type: nauc_precision_at_10_max value: 74.4567 - type: nauc_precision_at_10_std value: 55.835 - type: nauc_precision_at_10_diff1 value: 57.89189999999999 - type: nauc_precision_at_20_max value: 76.9008 - type: nauc_precision_at_20_std value: 62.54110000000001 - type: nauc_precision_at_20_diff1 value: 62.200500000000005 - type: nauc_precision_at_100_max value: 76.46300000000001 - type: nauc_precision_at_100_std value: 71.4723 - type: nauc_precision_at_100_diff1 value: 59.0844 - type: nauc_precision_at_1000_max value: .nan - type: nauc_precision_at_1000_std value: .nan - type: nauc_precision_at_1000_diff1 value: .nan - type: nauc_mrr_at_1_max value: 48.821 - type: nauc_mrr_at_1_std value: 19.6886 - type: nauc_mrr_at_1_diff1 value: 65.515 - type: nauc_mrr_at_3_max value: 54.47260000000001 - type: nauc_mrr_at_3_std value: 24.864800000000002 - type: nauc_mrr_at_3_diff1 value: 62.6644 - type: nauc_mrr_at_5_max value: 55.1021 - type: nauc_mrr_at_5_std value: 26.2921 - type: nauc_mrr_at_5_diff1 value: 62.624100000000006 - type: nauc_mrr_at_10_max value: 55.552 - type: nauc_mrr_at_10_std value: 27.199 - type: nauc_mrr_at_10_diff1 value: 62.7054 - type: nauc_mrr_at_20_max value: 55.4708 - type: nauc_mrr_at_20_std value: 27.2067 - type: nauc_mrr_at_20_diff1 value: 62.8945 - type: nauc_mrr_at_100_max value: 55.3465 - type: nauc_mrr_at_100_std value: 27.0926 - type: nauc_mrr_at_100_diff1 value: 62.8575 - type: nauc_mrr_at_1000_max value: 55.3249 - type: nauc_mrr_at_1000_std value: 27.0527 - type: nauc_mrr_at_1000_diff1 value: 62.8617 - type: main_score value: 67.239 - task: type: Retrieval dataset: name: MTEB CodeSearchNetRetrieval (go) type: code-search-net/code_search_net config: go split: test revision: fdc6a9e39575768c27eb8a2a5f702bf846eb4759 metrics: - type: ndcg_at_1 value: 70.19999999999999 - type: ndcg_at_3 value: 79.566 - type: ndcg_at_5 value: 81.012 - type: ndcg_at_10 value: 82.217 - type: ndcg_at_20 value: 82.97 - type: ndcg_at_100 value: 83.43199999999999 - type: ndcg_at_1000 value: 83.597 - type: map_at_1 value: 70.19999999999999 - type: map_at_3 value: 77.333 - type: map_at_5 value: 78.13799999999999 - type: map_at_10 value: 78.641 - type: map_at_20 value: 78.84400000000001 - type: map_at_100 value: 78.908 - type: map_at_1000 value: 78.914 - type: recall_at_1 value: 70.19999999999999 - type: recall_at_3 value: 86.0 - type: recall_at_5 value: 89.5 - type: recall_at_10 value: 93.2 - type: recall_at_20 value: 96.2 - type: recall_at_100 value: 98.7 - type: recall_at_1000 value: 100.0 - type: precision_at_1 value: 70.19999999999999 - type: precision_at_3 value: 28.666999999999998 - type: precision_at_5 value: 17.9 - type: precision_at_10 value: 9.32 - type: precision_at_20 value: 4.81 - type: precision_at_100 value: 0.987 - type: precision_at_1000 value: 0.1 - type: mrr_at_1 value: 70.19999999999999 - type: mrr_at_3 value: 77.33330000000001 - type: mrr_at_5 value: 78.1383 - type: mrr_at_10 value: 78.6408 - type: mrr_at_20 value: 78.8441 - type: mrr_at_100 value: 78.9075 - type: mrr_at_1000 value: 78.91369999999999 - type: nauc_ndcg_at_1_max value: 54.447199999999995 - type: nauc_ndcg_at_1_std value: 5.7226 - type: nauc_ndcg_at_1_diff1 value: 71.1626 - type: nauc_ndcg_at_3_max value: 60.4446 - type: nauc_ndcg_at_3_std value: 6.2227 - type: nauc_ndcg_at_3_diff1 value: 69.419 - type: nauc_ndcg_at_5_max value: 59.7692 - type: nauc_ndcg_at_5_std value: 7.4161 - type: nauc_ndcg_at_5_diff1 value: 68.9958 - type: nauc_ndcg_at_10_max value: 59.559 - type: nauc_ndcg_at_10_std value: 6.792199999999999 - type: nauc_ndcg_at_10_diff1 value: 68.42099999999999 - type: nauc_ndcg_at_20_max value: 59.1576 - type: nauc_ndcg_at_20_std value: 6.762600000000001 - type: nauc_ndcg_at_20_diff1 value: 69.1402 - type: nauc_ndcg_at_100_max value: 58.729699999999994 - type: nauc_ndcg_at_100_std value: 6.6151 - type: nauc_ndcg_at_100_diff1 value: 69.3485 - type: nauc_ndcg_at_1000_max value: 58.68879999999999 - type: nauc_ndcg_at_1000_std value: 6.5546999999999995 - type: nauc_ndcg_at_1000_diff1 value: 69.3974 - type: nauc_map_at_1_max value: 54.447199999999995 - type: nauc_map_at_1_std value: 5.7226 - type: nauc_map_at_1_diff1 value: 71.1626 - type: nauc_map_at_3_max value: 58.82150000000001 - type: nauc_map_at_3_std value: 6.111 - type: nauc_map_at_3_diff1 value: 69.8853 - type: nauc_map_at_5_max value: 58.4332 - type: nauc_map_at_5_std value: 6.6455 - type: nauc_map_at_5_diff1 value: 69.6593 - type: nauc_map_at_10_max value: 58.3284 - type: nauc_map_at_10_std value: 6.3941 - type: nauc_map_at_10_diff1 value: 69.4544 - type: nauc_map_at_20_max value: 58.2269 - type: nauc_map_at_20_std value: 6.3983 - type: nauc_map_at_20_diff1 value: 69.634 - type: nauc_map_at_100_max value: 58.180299999999995 - type: nauc_map_at_100_std value: 6.372 - type: nauc_map_at_100_diff1 value: 69.6674 - type: nauc_map_at_1000_max value: 58.1796 - type: nauc_map_at_1000_std value: 6.3696 - type: nauc_map_at_1000_diff1 value: 69.6689 - type: nauc_recall_at_1_max value: 54.447199999999995 - type: nauc_recall_at_1_std value: 5.7226 - type: nauc_recall_at_1_diff1 value: 71.1626 - type: nauc_recall_at_3_max value: 67.3635 - type: nauc_recall_at_3_std value: 6.682499999999999 - type: nauc_recall_at_3_diff1 value: 67.4356 - type: nauc_recall_at_5_max value: 66.6632 - type: nauc_recall_at_5_std value: 11.969899999999999 - type: nauc_recall_at_5_diff1 value: 65.4311 - type: nauc_recall_at_10_max value: 68.76339999999999 - type: nauc_recall_at_10_std value: 10.0319 - type: nauc_recall_at_10_diff1 value: 59.6357 - type: nauc_recall_at_20_max value: 69.58569999999999 - type: nauc_recall_at_20_std value: 11.5374 - type: nauc_recall_at_20_diff1 value: 63.8926 - type: nauc_recall_at_100_max value: 62.5009 - type: nauc_recall_at_100_std value: 12.447 - type: nauc_recall_at_100_diff1 value: 65.065 - type: nauc_recall_at_1000_max value: .nan - type: nauc_recall_at_1000_std value: .nan - type: nauc_recall_at_1000_diff1 value: .nan - type: nauc_precision_at_1_max value: 54.447199999999995 - type: nauc_precision_at_1_std value: 5.7226 - type: nauc_precision_at_1_diff1 value: 71.1626 - type: nauc_precision_at_3_max value: 67.3635 - type: nauc_precision_at_3_std value: 6.682499999999999 - type: nauc_precision_at_3_diff1 value: 67.4356 - type: nauc_precision_at_5_max value: 66.6632 - type: nauc_precision_at_5_std value: 11.969899999999999 - type: nauc_precision_at_5_diff1 value: 65.4311 - type: nauc_precision_at_10_max value: 68.76339999999999 - type: nauc_precision_at_10_std value: 10.0319 - type: nauc_precision_at_10_diff1 value: 59.6357 - type: nauc_precision_at_20_max value: 69.58569999999999 - type: nauc_precision_at_20_std value: 11.5374 - type: nauc_precision_at_20_diff1 value: 63.8926 - type: nauc_precision_at_100_max value: 62.5009 - type: nauc_precision_at_100_std value: 12.447 - type: nauc_precision_at_100_diff1 value: 65.065 - type: nauc_precision_at_1000_max value: .nan - type: nauc_precision_at_1000_std value: .nan - type: nauc_precision_at_1000_diff1 value: .nan - type: nauc_mrr_at_1_max value: 54.447199999999995 - type: nauc_mrr_at_1_std value: 5.7226 - type: nauc_mrr_at_1_diff1 value: 71.1626 - type: nauc_mrr_at_3_max value: 58.82150000000001 - type: nauc_mrr_at_3_std value: 6.111 - type: nauc_mrr_at_3_diff1 value: 69.8853 - type: nauc_mrr_at_5_max value: 58.4332 - type: nauc_mrr_at_5_std value: 6.6455 - type: nauc_mrr_at_5_diff1 value: 69.6593 - type: nauc_mrr_at_10_max value: 58.3284 - type: nauc_mrr_at_10_std value: 6.3941 - type: nauc_mrr_at_10_diff1 value: 69.4544 - type: nauc_mrr_at_20_max value: 58.2269 - type: nauc_mrr_at_20_std value: 6.3983 - type: nauc_mrr_at_20_diff1 value: 69.634 - type: nauc_mrr_at_100_max value: 58.180299999999995 - type: nauc_mrr_at_100_std value: 6.372 - type: nauc_mrr_at_100_diff1 value: 69.6674 - type: nauc_mrr_at_1000_max value: 58.1796 - type: nauc_mrr_at_1000_std value: 6.3696 - type: nauc_mrr_at_1000_diff1 value: 69.6689 - type: main_score value: 82.217 - task: type: Retrieval dataset: name: MTEB CodeSearchNetRetrieval (ruby) type: code-search-net/code_search_net config: ruby split: test revision: fdc6a9e39575768c27eb8a2a5f702bf846eb4759 metrics: - type: ndcg_at_1 value: 56.49999999999999 - type: ndcg_at_3 value: 66.597 - type: ndcg_at_5 value: 68.98100000000001 - type: ndcg_at_10 value: 70.829 - type: ndcg_at_20 value: 71.77799999999999 - type: ndcg_at_100 value: 72.85199999999999 - type: ndcg_at_1000 value: 73.563 - type: map_at_1 value: 56.49999999999999 - type: map_at_3 value: 64.2 - type: map_at_5 value: 65.52 - type: map_at_10 value: 66.305 - type: map_at_20 value: 66.572 - type: map_at_100 value: 66.733 - type: map_at_1000 value: 66.756 - type: recall_at_1 value: 56.49999999999999 - type: recall_at_3 value: 73.5 - type: recall_at_5 value: 79.3 - type: recall_at_10 value: 84.89999999999999 - type: recall_at_20 value: 88.6 - type: recall_at_100 value: 94.19999999999999 - type: recall_at_1000 value: 100.0 - type: precision_at_1 value: 56.49999999999999 - type: precision_at_3 value: 24.5 - type: precision_at_5 value: 15.86 - type: precision_at_10 value: 8.49 - type: precision_at_20 value: 4.43 - type: precision_at_100 value: 0.942 - type: precision_at_1000 value: 0.1 - type: mrr_at_1 value: 56.49999999999999 - type: mrr_at_3 value: 64.2 - type: mrr_at_5 value: 65.52 - type: mrr_at_10 value: 66.30460000000001 - type: mrr_at_20 value: 66.5724 - type: mrr_at_100 value: 66.7334 - type: mrr_at_1000 value: 66.7564 - type: nauc_ndcg_at_1_max value: 55.3207 - type: nauc_ndcg_at_1_std value: 7.2139 - type: nauc_ndcg_at_1_diff1 value: 72.6385 - type: nauc_ndcg_at_3_max value: 58.4997 - type: nauc_ndcg_at_3_std value: 8.3729 - type: nauc_ndcg_at_3_diff1 value: 69.0137 - type: nauc_ndcg_at_5_max value: 58.213899999999995 - type: nauc_ndcg_at_5_std value: 11.8464 - type: nauc_ndcg_at_5_diff1 value: 67.8369 - type: nauc_ndcg_at_10_max value: 58.2068 - type: nauc_ndcg_at_10_std value: 13.320000000000002 - type: nauc_ndcg_at_10_diff1 value: 67.8139 - type: nauc_ndcg_at_20_max value: 58.0545 - type: nauc_ndcg_at_20_std value: 13.601199999999999 - type: nauc_ndcg_at_20_diff1 value: 67.814 - type: nauc_ndcg_at_100_max value: 58.1651 - type: nauc_ndcg_at_100_std value: 13.946900000000001 - type: nauc_ndcg_at_100_diff1 value: 68.07180000000001 - type: nauc_ndcg_at_1000_max value: 57.9397 - type: nauc_ndcg_at_1000_std value: 12.188400000000001 - type: nauc_ndcg_at_1000_diff1 value: 68.6001 - type: nauc_map_at_1_max value: 55.3207 - type: nauc_map_at_1_std value: 7.2139 - type: nauc_map_at_1_diff1 value: 72.6385 - type: nauc_map_at_3_max value: 57.678399999999996 - type: nauc_map_at_3_std value: 7.900500000000001 - type: nauc_map_at_3_diff1 value: 69.8646 - type: nauc_map_at_5_max value: 57.5229 - type: nauc_map_at_5_std value: 9.7157 - type: nauc_map_at_5_diff1 value: 69.2704 - type: nauc_map_at_10_max value: 57.5133 - type: nauc_map_at_10_std value: 10.2078 - type: nauc_map_at_10_diff1 value: 69.2876 - type: nauc_map_at_20_max value: 57.4843 - type: nauc_map_at_20_std value: 10.2501 - type: nauc_map_at_20_diff1 value: 69.303 - type: nauc_map_at_100_max value: 57.4927 - type: nauc_map_at_100_std value: 10.3077 - type: nauc_map_at_100_diff1 value: 69.3295 - type: nauc_map_at_1000_max value: 57.4921 - type: nauc_map_at_1000_std value: 10.2661 - type: nauc_map_at_1000_diff1 value: 69.3497 - type: nauc_recall_at_1_max value: 55.3207 - type: nauc_recall_at_1_std value: 7.2139 - type: nauc_recall_at_1_diff1 value: 72.6385 - type: nauc_recall_at_3_max value: 61.36899999999999 - type: nauc_recall_at_3_std value: 10.1165 - type: nauc_recall_at_3_diff1 value: 66.0874 - type: nauc_recall_at_5_max value: 60.956999999999994 - type: nauc_recall_at_5_std value: 21.409 - type: nauc_recall_at_5_diff1 value: 61.770199999999996 - type: nauc_recall_at_10_max value: 61.73689999999999 - type: nauc_recall_at_10_std value: 32.1058 - type: nauc_recall_at_10_diff1 value: 59.7434 - type: nauc_recall_at_20_max value: 61.2737 - type: nauc_recall_at_20_std value: 39.7564 - type: nauc_recall_at_20_diff1 value: 57.3813 - type: nauc_recall_at_100_max value: 66.6667 - type: nauc_recall_at_100_std value: 69.0613 - type: nauc_recall_at_100_diff1 value: 53.7574 - type: nauc_recall_at_1000_max value: .nan - type: nauc_recall_at_1000_std value: .nan - type: nauc_recall_at_1000_diff1 value: .nan - type: nauc_precision_at_1_max value: 55.3207 - type: nauc_precision_at_1_std value: 7.2139 - type: nauc_precision_at_1_diff1 value: 72.6385 - type: nauc_precision_at_3_max value: 61.36899999999999 - type: nauc_precision_at_3_std value: 10.1165 - type: nauc_precision_at_3_diff1 value: 66.0874 - type: nauc_precision_at_5_max value: 60.956999999999994 - type: nauc_precision_at_5_std value: 21.409 - type: nauc_precision_at_5_diff1 value: 61.770199999999996 - type: nauc_precision_at_10_max value: 61.73689999999999 - type: nauc_precision_at_10_std value: 32.1058 - type: nauc_precision_at_10_diff1 value: 59.7434 - type: nauc_precision_at_20_max value: 61.2737 - type: nauc_precision_at_20_std value: 39.7564 - type: nauc_precision_at_20_diff1 value: 57.3813 - type: nauc_precision_at_100_max value: 66.6667 - type: nauc_precision_at_100_std value: 69.0613 - type: nauc_precision_at_100_diff1 value: 53.7574 - type: nauc_precision_at_1000_max value: .nan - type: nauc_precision_at_1000_std value: .nan - type: nauc_precision_at_1000_diff1 value: .nan - type: nauc_mrr_at_1_max value: 55.3207 - type: nauc_mrr_at_1_std value: 7.2139 - type: nauc_mrr_at_1_diff1 value: 72.6385 - type: nauc_mrr_at_3_max value: 57.678399999999996 - type: nauc_mrr_at_3_std value: 7.900500000000001 - type: nauc_mrr_at_3_diff1 value: 69.8646 - type: nauc_mrr_at_5_max value: 57.5229 - type: nauc_mrr_at_5_std value: 9.7157 - type: nauc_mrr_at_5_diff1 value: 69.2704 - type: nauc_mrr_at_10_max value: 57.5133 - type: nauc_mrr_at_10_std value: 10.2078 - type: nauc_mrr_at_10_diff1 value: 69.2876 - type: nauc_mrr_at_20_max value: 57.4843 - type: nauc_mrr_at_20_std value: 10.2501 - type: nauc_mrr_at_20_diff1 value: 69.303 - type: nauc_mrr_at_100_max value: 57.4927 - type: nauc_mrr_at_100_std value: 10.3077 - type: nauc_mrr_at_100_diff1 value: 69.3295 - type: nauc_mrr_at_1000_max value: 57.4921 - type: nauc_mrr_at_1000_std value: 10.2661 - type: nauc_mrr_at_1000_diff1 value: 69.3497 - type: main_score value: 70.829 - task: type: Retrieval dataset: name: MTEB CodeSearchNetRetrieval (java) type: code-search-net/code_search_net config: java split: test revision: fdc6a9e39575768c27eb8a2a5f702bf846eb4759 metrics: - type: ndcg_at_1 value: 50.3 - type: ndcg_at_3 value: 62.883 - type: ndcg_at_5 value: 65.11200000000001 - type: ndcg_at_10 value: 67.044 - type: ndcg_at_20 value: 68.326 - type: ndcg_at_100 value: 69.592 - type: ndcg_at_1000 value: 70.209 - type: map_at_1 value: 50.3 - type: map_at_3 value: 59.8 - type: map_at_5 value: 61.040000000000006 - type: map_at_10 value: 61.852 - type: map_at_20 value: 62.212999999999994 - type: map_at_100 value: 62.397000000000006 - type: map_at_1000 value: 62.416000000000004 - type: recall_at_1 value: 50.3 - type: recall_at_3 value: 71.8 - type: recall_at_5 value: 77.2 - type: recall_at_10 value: 83.1 - type: recall_at_20 value: 88.1 - type: recall_at_100 value: 94.8 - type: recall_at_1000 value: 100.0 - type: precision_at_1 value: 50.3 - type: precision_at_3 value: 23.933 - type: precision_at_5 value: 15.440000000000001 - type: precision_at_10 value: 8.309999999999999 - type: precision_at_20 value: 4.405 - type: precision_at_100 value: 0.9480000000000001 - type: precision_at_1000 value: 0.1 - type: mrr_at_1 value: 50.3 - type: mrr_at_3 value: 59.8 - type: mrr_at_5 value: 61.040000000000006 - type: mrr_at_10 value: 61.8522 - type: mrr_at_20 value: 62.21339999999999 - type: mrr_at_100 value: 62.397499999999994 - type: mrr_at_1000 value: 62.415600000000005 - type: nauc_ndcg_at_1_max value: 27.9845 - type: nauc_ndcg_at_1_std value: -16.28 - type: nauc_ndcg_at_1_diff1 value: 61.9927 - type: nauc_ndcg_at_3_max value: 33.0521 - type: nauc_ndcg_at_3_std value: -10.3558 - type: nauc_ndcg_at_3_diff1 value: 56.8436 - type: nauc_ndcg_at_5_max value: 34.6635 - type: nauc_ndcg_at_5_std value: -7.1861 - type: nauc_ndcg_at_5_diff1 value: 56.39999999999999 - type: nauc_ndcg_at_10_max value: 36.0742 - type: nauc_ndcg_at_10_std value: -6.1496 - type: nauc_ndcg_at_10_diff1 value: 57.239 - type: nauc_ndcg_at_20_max value: 36.5836 - type: nauc_ndcg_at_20_std value: -5.3723 - type: nauc_ndcg_at_20_diff1 value: 57.7333 - type: nauc_ndcg_at_100_max value: 36.0909 - type: nauc_ndcg_at_100_std value: -5.655799999999999 - type: nauc_ndcg_at_100_diff1 value: 58.411699999999996 - type: nauc_ndcg_at_1000_max value: 34.8377 - type: nauc_ndcg_at_1000_std value: -7.542999999999999 - type: nauc_ndcg_at_1000_diff1 value: 58.198899999999995 - type: nauc_map_at_1_max value: 27.9845 - type: nauc_map_at_1_std value: -16.28 - type: nauc_map_at_1_diff1 value: 61.9927 - type: nauc_map_at_3_max value: 31.7824 - type: nauc_map_at_3_std value: -11.9282 - type: nauc_map_at_3_diff1 value: 58.2543 - type: nauc_map_at_5_max value: 32.5811 - type: nauc_map_at_5_std value: -10.3315 - type: nauc_map_at_5_diff1 value: 58.046 - type: nauc_map_at_10_max value: 33.0525 - type: nauc_map_at_10_std value: -10.0071 - type: nauc_map_at_10_diff1 value: 58.3778 - type: nauc_map_at_20_max value: 33.164 - type: nauc_map_at_20_std value: -9.8753 - type: nauc_map_at_20_diff1 value: 58.5075 - type: nauc_map_at_100_max value: 33.0857 - type: nauc_map_at_100_std value: -9.9373 - type: nauc_map_at_100_diff1 value: 58.581399999999995 - type: nauc_map_at_1000_max value: 33.0589 - type: nauc_map_at_1000_std value: -9.9773 - type: nauc_map_at_1000_diff1 value: 58.5777 - type: nauc_recall_at_1_max value: 27.9845 - type: nauc_recall_at_1_std value: -16.28 - type: nauc_recall_at_1_diff1 value: 61.9927 - type: nauc_recall_at_3_max value: 37.5284 - type: nauc_recall_at_3_std value: -4.7627999999999995 - type: nauc_recall_at_3_diff1 value: 51.8022 - type: nauc_recall_at_5_max value: 43.4852 - type: nauc_recall_at_5_std value: 6.3649 - type: nauc_recall_at_5_diff1 value: 49.5664 - type: nauc_recall_at_10_max value: 53.156000000000006 - type: nauc_recall_at_10_std value: 15.4361 - type: nauc_recall_at_10_diff1 value: 51.865300000000005 - type: nauc_recall_at_20_max value: 63.3834 - type: nauc_recall_at_20_std value: 30.2094 - type: nauc_recall_at_20_diff1 value: 54.013999999999996 - type: nauc_recall_at_100_max value: 84.36399999999999 - type: nauc_recall_at_100_std value: 67.20089999999999 - type: nauc_recall_at_100_diff1 value: 66.6146 - type: nauc_recall_at_1000_max value: .nan - type: nauc_recall_at_1000_std value: .nan - type: nauc_recall_at_1000_diff1 value: .nan - type: nauc_precision_at_1_max value: 27.9845 - type: nauc_precision_at_1_std value: -16.28 - type: nauc_precision_at_1_diff1 value: 61.9927 - type: nauc_precision_at_3_max value: 37.5284 - type: nauc_precision_at_3_std value: -4.7627999999999995 - type: nauc_precision_at_3_diff1 value: 51.8022 - type: nauc_precision_at_5_max value: 43.4852 - type: nauc_precision_at_5_std value: 6.3649 - type: nauc_precision_at_5_diff1 value: 49.5664 - type: nauc_precision_at_10_max value: 53.156000000000006 - type: nauc_precision_at_10_std value: 15.4361 - type: nauc_precision_at_10_diff1 value: 51.865300000000005 - type: nauc_precision_at_20_max value: 63.3834 - type: nauc_precision_at_20_std value: 30.2094 - type: nauc_precision_at_20_diff1 value: 54.013999999999996 - type: nauc_precision_at_100_max value: 84.36399999999999 - type: nauc_precision_at_100_std value: 67.20089999999999 - type: nauc_precision_at_100_diff1 value: 66.6146 - type: nauc_precision_at_1000_max value: .nan - type: nauc_precision_at_1000_std value: .nan - type: nauc_precision_at_1000_diff1 value: .nan - type: nauc_mrr_at_1_max value: 27.9845 - type: nauc_mrr_at_1_std value: -16.28 - type: nauc_mrr_at_1_diff1 value: 61.9927 - type: nauc_mrr_at_3_max value: 31.7824 - type: nauc_mrr_at_3_std value: -11.9282 - type: nauc_mrr_at_3_diff1 value: 58.2543 - type: nauc_mrr_at_5_max value: 32.5811 - type: nauc_mrr_at_5_std value: -10.3315 - type: nauc_mrr_at_5_diff1 value: 58.046 - type: nauc_mrr_at_10_max value: 33.0525 - type: nauc_mrr_at_10_std value: -10.0071 - type: nauc_mrr_at_10_diff1 value: 58.3778 - type: nauc_mrr_at_20_max value: 33.164 - type: nauc_mrr_at_20_std value: -9.8753 - type: nauc_mrr_at_20_diff1 value: 58.5075 - type: nauc_mrr_at_100_max value: 33.0857 - type: nauc_mrr_at_100_std value: -9.9373 - type: nauc_mrr_at_100_diff1 value: 58.581399999999995 - type: nauc_mrr_at_1000_max value: 33.0589 - type: nauc_mrr_at_1000_std value: -9.9773 - type: nauc_mrr_at_1000_diff1 value: 58.5777 - type: main_score value: 67.044 - task: type: Retrieval dataset: name: MTEB CodeSearchNetRetrieval (php) type: code-search-net/code_search_net config: php split: test revision: fdc6a9e39575768c27eb8a2a5f702bf846eb4759 metrics: - type: ndcg_at_1 value: 52.5 - type: ndcg_at_3 value: 65.362 - type: ndcg_at_5 value: 67.797 - type: ndcg_at_10 value: 69.791 - type: ndcg_at_20 value: 70.787 - type: ndcg_at_100 value: 71.607 - type: ndcg_at_1000 value: 72.24000000000001 - type: map_at_1 value: 52.5 - type: map_at_3 value: 62.233000000000004 - type: map_at_5 value: 63.588 - type: map_at_10 value: 64.424 - type: map_at_20 value: 64.703 - type: map_at_100 value: 64.825 - type: map_at_1000 value: 64.84100000000001 - type: recall_at_1 value: 52.5 - type: recall_at_3 value: 74.4 - type: recall_at_5 value: 80.30000000000001 - type: recall_at_10 value: 86.4 - type: recall_at_20 value: 90.3 - type: recall_at_100 value: 94.6 - type: recall_at_1000 value: 100.0 - type: precision_at_1 value: 52.5 - type: precision_at_3 value: 24.8 - type: precision_at_5 value: 16.06 - type: precision_at_10 value: 8.64 - type: precision_at_20 value: 4.515000000000001 - type: precision_at_100 value: 0.946 - type: precision_at_1000 value: 0.1 - type: mrr_at_1 value: 52.5 - type: mrr_at_3 value: 62.2333 - type: mrr_at_5 value: 63.5883 - type: mrr_at_10 value: 64.4237 - type: mrr_at_20 value: 64.7029 - type: mrr_at_100 value: 64.8249 - type: mrr_at_1000 value: 64.84140000000001 - type: nauc_ndcg_at_1_max value: 28.977700000000002 - type: nauc_ndcg_at_1_std value: 5.5688 - type: nauc_ndcg_at_1_diff1 value: 62.8127 - type: nauc_ndcg_at_3_max value: 42.5053 - type: nauc_ndcg_at_3_std value: 13.8126 - type: nauc_ndcg_at_3_diff1 value: 60.791700000000006 - type: nauc_ndcg_at_5_max value: 43.521100000000004 - type: nauc_ndcg_at_5_std value: 14.5838 - type: nauc_ndcg_at_5_diff1 value: 61.267700000000005 - type: nauc_ndcg_at_10_max value: 43.2523 - type: nauc_ndcg_at_10_std value: 16.2237 - type: nauc_ndcg_at_10_diff1 value: 61.642300000000006 - type: nauc_ndcg_at_20_max value: 42.7707 - type: nauc_ndcg_at_20_std value: 17.0607 - type: nauc_ndcg_at_20_diff1 value: 61.5855 - type: nauc_ndcg_at_100_max value: 42.127900000000004 - type: nauc_ndcg_at_100_std value: 16.582900000000002 - type: nauc_ndcg_at_100_diff1 value: 61.916700000000006 - type: nauc_ndcg_at_1000_max value: 40.7945 - type: nauc_ndcg_at_1000_std value: 14.6562 - type: nauc_ndcg_at_1000_diff1 value: 61.7069 - type: nauc_map_at_1_max value: 28.977700000000002 - type: nauc_map_at_1_std value: 5.5688 - type: nauc_map_at_1_diff1 value: 62.8127 - type: nauc_map_at_3_max value: 38.5313 - type: nauc_map_at_3_std value: 11.2395 - type: nauc_map_at_3_diff1 value: 61.1888 - type: nauc_map_at_5_max value: 38.8835 - type: nauc_map_at_5_std value: 11.5395 - type: nauc_map_at_5_diff1 value: 61.449 - type: nauc_map_at_10_max value: 38.6822 - type: nauc_map_at_10_std value: 12.0181 - type: nauc_map_at_10_diff1 value: 61.5846 - type: nauc_map_at_20_max value: 38.5328 - type: nauc_map_at_20_std value: 12.182500000000001 - type: nauc_map_at_20_diff1 value: 61.578599999999994 - type: nauc_map_at_100_max value: 38.4484 - type: nauc_map_at_100_std value: 12.1157 - type: nauc_map_at_100_diff1 value: 61.6247 - type: nauc_map_at_1000_max value: 38.418600000000005 - type: nauc_map_at_1000_std value: 12.0795 - type: nauc_map_at_1000_diff1 value: 61.6214 - type: nauc_recall_at_1_max value: 28.977700000000002 - type: nauc_recall_at_1_std value: 5.5688 - type: nauc_recall_at_1_diff1 value: 62.8127 - type: nauc_recall_at_3_max value: 57.338699999999996 - type: nauc_recall_at_3_std value: 23.4946 - type: nauc_recall_at_3_diff1 value: 59.4094 - type: nauc_recall_at_5_max value: 64.4058 - type: nauc_recall_at_5_std value: 28.382 - type: nauc_recall_at_5_diff1 value: 60.671600000000005 - type: nauc_recall_at_10_max value: 71.11070000000001 - type: nauc_recall_at_10_std value: 43.6152 - type: nauc_recall_at_10_diff1 value: 62.6013 - type: nauc_recall_at_20_max value: 76.3142 - type: nauc_recall_at_20_std value: 61.0644 - type: nauc_recall_at_20_diff1 value: 62.244600000000005 - type: nauc_recall_at_100_max value: 87.9526 - type: nauc_recall_at_100_std value: 84.63619999999999 - type: nauc_recall_at_100_diff1 value: 69.6848 - type: nauc_recall_at_1000_max value: .nan - type: nauc_recall_at_1000_std value: .nan - type: nauc_recall_at_1000_diff1 value: .nan - type: nauc_precision_at_1_max value: 28.977700000000002 - type: nauc_precision_at_1_std value: 5.5688 - type: nauc_precision_at_1_diff1 value: 62.8127 - type: nauc_precision_at_3_max value: 57.338699999999996 - type: nauc_precision_at_3_std value: 23.4946 - type: nauc_precision_at_3_diff1 value: 59.4094 - type: nauc_precision_at_5_max value: 64.4058 - type: nauc_precision_at_5_std value: 28.382 - type: nauc_precision_at_5_diff1 value: 60.671600000000005 - type: nauc_precision_at_10_max value: 71.11070000000001 - type: nauc_precision_at_10_std value: 43.6152 - type: nauc_precision_at_10_diff1 value: 62.6013 - type: nauc_precision_at_20_max value: 76.3142 - type: nauc_precision_at_20_std value: 61.0644 - type: nauc_precision_at_20_diff1 value: 62.244600000000005 - type: nauc_precision_at_100_max value: 87.9526 - type: nauc_precision_at_100_std value: 84.63619999999999 - type: nauc_precision_at_100_diff1 value: 69.6848 - type: nauc_precision_at_1000_max value: .nan - type: nauc_precision_at_1000_std value: .nan - type: nauc_precision_at_1000_diff1 value: .nan - type: nauc_mrr_at_1_max value: 28.977700000000002 - type: nauc_mrr_at_1_std value: 5.5688 - type: nauc_mrr_at_1_diff1 value: 62.8127 - type: nauc_mrr_at_3_max value: 38.5313 - type: nauc_mrr_at_3_std value: 11.2395 - type: nauc_mrr_at_3_diff1 value: 61.1888 - type: nauc_mrr_at_5_max value: 38.8835 - type: nauc_mrr_at_5_std value: 11.5395 - type: nauc_mrr_at_5_diff1 value: 61.449 - type: nauc_mrr_at_10_max value: 38.6822 - type: nauc_mrr_at_10_std value: 12.0181 - type: nauc_mrr_at_10_diff1 value: 61.5846 - type: nauc_mrr_at_20_max value: 38.5328 - type: nauc_mrr_at_20_std value: 12.182500000000001 - type: nauc_mrr_at_20_diff1 value: 61.578599999999994 - type: nauc_mrr_at_100_max value: 38.4484 - type: nauc_mrr_at_100_std value: 12.1157 - type: nauc_mrr_at_100_diff1 value: 61.6247 - type: nauc_mrr_at_1000_max value: 38.418600000000005 - type: nauc_mrr_at_1000_std value: 12.0795 - type: nauc_mrr_at_1000_diff1 value: 61.6214 - type: main_score value: 69.791 - task: type: Retrieval dataset: name: MTEB CodeTransOceanContest (default) type: CoIR-Retrieval/codetrans-contest config: default split: test revision: 20da4eb20a4b17300c0986ee148c90867a7f2a4d metrics: - type: ndcg_at_1 value: 33.032000000000004 - type: ndcg_at_3 value: 38.041000000000004 - type: ndcg_at_5 value: 40.67 - type: ndcg_at_10 value: 43.651 - type: ndcg_at_20 value: 45.255 - type: ndcg_at_100 value: 48.41 - type: ndcg_at_1000 value: 50.775000000000006 - type: map_at_1 value: 33.032000000000004 - type: map_at_3 value: 36.802 - type: map_at_5 value: 38.273 - type: map_at_10 value: 39.45 - type: map_at_20 value: 39.891 - type: map_at_100 value: 40.312 - type: map_at_1000 value: 40.396 - type: recall_at_1 value: 33.032000000000004 - type: recall_at_3 value: 41.629 - type: recall_at_5 value: 47.964 - type: recall_at_10 value: 57.465999999999994 - type: recall_at_20 value: 63.800999999999995 - type: recall_at_100 value: 80.99499999999999 - type: recall_at_1000 value: 100.0 - type: precision_at_1 value: 33.032000000000004 - type: precision_at_3 value: 13.876 - type: precision_at_5 value: 9.593 - type: precision_at_10 value: 5.747 - type: precision_at_20 value: 3.19 - type: precision_at_100 value: 0.8099999999999999 - type: precision_at_1000 value: 0.1 - type: mrr_at_1 value: 33.0317 - type: mrr_at_3 value: 36.8024 - type: mrr_at_5 value: 38.273 - type: mrr_at_10 value: 39.4504 - type: mrr_at_20 value: 39.8911 - type: mrr_at_100 value: 40.3122 - type: mrr_at_1000 value: 40.3955 - type: nauc_ndcg_at_1_max value: 53.0197 - type: nauc_ndcg_at_1_std value: 0.8863 - type: nauc_ndcg_at_1_diff1 value: 67.8151 - type: nauc_ndcg_at_3_max value: 50.37350000000001 - type: nauc_ndcg_at_3_std value: 1.3549 - type: nauc_ndcg_at_3_diff1 value: 61.698699999999995 - type: nauc_ndcg_at_5_max value: 49.1498 - type: nauc_ndcg_at_5_std value: 2.5727 - type: nauc_ndcg_at_5_diff1 value: 58.0748 - type: nauc_ndcg_at_10_max value: 47.5197 - type: nauc_ndcg_at_10_std value: 2.7498 - type: nauc_ndcg_at_10_diff1 value: 56.9398 - type: nauc_ndcg_at_20_max value: 47.5836 - type: nauc_ndcg_at_20_std value: 3.4302 - type: nauc_ndcg_at_20_diff1 value: 55.8913 - type: nauc_ndcg_at_100_max value: 48.079499999999996 - type: nauc_ndcg_at_100_std value: 3.7983999999999996 - type: nauc_ndcg_at_100_diff1 value: 56.7706 - type: nauc_ndcg_at_1000_max value: 48.7136 - type: nauc_ndcg_at_1000_std value: 2.949 - type: nauc_ndcg_at_1000_diff1 value: 58.0488 - type: nauc_map_at_1_max value: 53.0197 - type: nauc_map_at_1_std value: 0.8863 - type: nauc_map_at_1_diff1 value: 67.8151 - type: nauc_map_at_3_max value: 51.1105 - type: nauc_map_at_3_std value: 1.5191 - type: nauc_map_at_3_diff1 value: 63.005900000000004 - type: nauc_map_at_5_max value: 50.4462 - type: nauc_map_at_5_std value: 2.0751 - type: nauc_map_at_5_diff1 value: 61.0287 - type: nauc_map_at_10_max value: 49.772499999999994 - type: nauc_map_at_10_std value: 2.1092 - type: nauc_map_at_10_diff1 value: 60.528000000000006 - type: nauc_map_at_20_max value: 49.7904 - type: nauc_map_at_20_std value: 2.3456 - type: nauc_map_at_20_diff1 value: 60.2416 - type: nauc_map_at_100_max value: 49.8742 - type: nauc_map_at_100_std value: 2.3747000000000003 - type: nauc_map_at_100_diff1 value: 60.390600000000006 - type: nauc_map_at_1000_max value: 49.8875 - type: nauc_map_at_1000_std value: 2.3390999999999997 - type: nauc_map_at_1000_diff1 value: 60.41180000000001 - type: nauc_recall_at_1_max value: 53.0197 - type: nauc_recall_at_1_std value: 0.8863 - type: nauc_recall_at_1_diff1 value: 67.8151 - type: nauc_recall_at_3_max value: 48.2306 - type: nauc_recall_at_3_std value: 0.7745 - type: nauc_recall_at_3_diff1 value: 58.0358 - type: nauc_recall_at_5_max value: 45.1577 - type: nauc_recall_at_5_std value: 4.228400000000001 - type: nauc_recall_at_5_diff1 value: 49.0182 - type: nauc_recall_at_10_max value: 39.584 - type: nauc_recall_at_10_std value: 5.1647 - type: nauc_recall_at_10_diff1 value: 44.864399999999996 - type: nauc_recall_at_20_max value: 39.1616 - type: nauc_recall_at_20_std value: 7.9384 - type: nauc_recall_at_20_diff1 value: 39.124700000000004 - type: nauc_recall_at_100_max value: 38.4356 - type: nauc_recall_at_100_std value: 14.498 - type: nauc_recall_at_100_diff1 value: 36.8934 - type: nauc_recall_at_1000_max value: .nan - type: nauc_recall_at_1000_std value: .nan - type: nauc_recall_at_1000_diff1 value: .nan - type: nauc_precision_at_1_max value: 53.0197 - type: nauc_precision_at_1_std value: 0.8863 - type: nauc_precision_at_1_diff1 value: 67.8151 - type: nauc_precision_at_3_max value: 48.2306 - type: nauc_precision_at_3_std value: 0.7745 - type: nauc_precision_at_3_diff1 value: 58.0358 - type: nauc_precision_at_5_max value: 45.1577 - type: nauc_precision_at_5_std value: 4.228400000000001 - type: nauc_precision_at_5_diff1 value: 49.0182 - type: nauc_precision_at_10_max value: 39.584 - type: nauc_precision_at_10_std value: 5.1647 - type: nauc_precision_at_10_diff1 value: 44.864399999999996 - type: nauc_precision_at_20_max value: 39.1616 - type: nauc_precision_at_20_std value: 7.9384 - type: nauc_precision_at_20_diff1 value: 39.124700000000004 - type: nauc_precision_at_100_max value: 38.4356 - type: nauc_precision_at_100_std value: 14.498 - type: nauc_precision_at_100_diff1 value: 36.8934 - type: nauc_precision_at_1000_max value: 100.0 - type: nauc_precision_at_1000_std value: 100.0 - type: nauc_precision_at_1000_diff1 value: 100.0 - type: nauc_mrr_at_1_max value: 53.0197 - type: nauc_mrr_at_1_std value: 0.8863 - type: nauc_mrr_at_1_diff1 value: 67.8151 - type: nauc_mrr_at_3_max value: 51.1105 - type: nauc_mrr_at_3_std value: 1.5191 - type: nauc_mrr_at_3_diff1 value: 63.005900000000004 - type: nauc_mrr_at_5_max value: 50.4462 - type: nauc_mrr_at_5_std value: 2.0751 - type: nauc_mrr_at_5_diff1 value: 61.0287 - type: nauc_mrr_at_10_max value: 49.772499999999994 - type: nauc_mrr_at_10_std value: 2.1092 - type: nauc_mrr_at_10_diff1 value: 60.528000000000006 - type: nauc_mrr_at_20_max value: 49.7904 - type: nauc_mrr_at_20_std value: 2.3456 - type: nauc_mrr_at_20_diff1 value: 60.2416 - type: nauc_mrr_at_100_max value: 49.8742 - type: nauc_mrr_at_100_std value: 2.3747000000000003 - type: nauc_mrr_at_100_diff1 value: 60.390600000000006 - type: nauc_mrr_at_1000_max value: 49.8875 - type: nauc_mrr_at_1000_std value: 2.3390999999999997 - type: nauc_mrr_at_1000_diff1 value: 60.41180000000001 - type: main_score value: 43.651 - task: type: Retrieval dataset: name: MTEB CodeTransOceanDL (default) type: CoIR-Retrieval/codetrans-dl config: default split: test revision: 281562cb8a1265ab5c0824bfa6ddcd9b0a15618f metrics: - type: ndcg_at_1 value: 8.333 - type: ndcg_at_3 value: 9.795 - type: ndcg_at_5 value: 13.286999999999999 - type: ndcg_at_10 value: 18.151999999999997 - type: ndcg_at_20 value: 21.914 - type: ndcg_at_100 value: 28.576 - type: ndcg_at_1000 value: 30.407 - type: map_at_1 value: 8.333 - type: map_at_3 value: 9.352 - type: map_at_5 value: 11.324 - type: map_at_10 value: 13.233 - type: map_at_20 value: 14.325 - type: map_at_100 value: 15.153 - type: map_at_1000 value: 15.243 - type: recall_at_1 value: 8.333 - type: recall_at_3 value: 11.111 - type: recall_at_5 value: 19.444 - type: recall_at_10 value: 35.0 - type: recall_at_20 value: 49.444 - type: recall_at_100 value: 86.667 - type: recall_at_1000 value: 100.0 - type: precision_at_1 value: 8.333 - type: precision_at_3 value: 3.7039999999999997 - type: precision_at_5 value: 3.8890000000000002 - type: precision_at_10 value: 3.5000000000000004 - type: precision_at_20 value: 2.472 - type: precision_at_100 value: 0.8670000000000001 - type: precision_at_1000 value: 0.1 - type: mrr_at_1 value: 3.3333000000000004 - type: mrr_at_3 value: 6.6667000000000005 - type: mrr_at_5 value: 7.7778 - type: mrr_at_10 value: 10.247100000000001 - type: mrr_at_20 value: 11.3458 - type: mrr_at_100 value: 12.177 - type: mrr_at_1000 value: 12.2675 - type: nauc_ndcg_at_1_max value: -39.772800000000004 - type: nauc_ndcg_at_1_std value: -34.0524 - type: nauc_ndcg_at_1_diff1 value: -32.8146 - type: nauc_ndcg_at_3_max value: -39.8776 - type: nauc_ndcg_at_3_std value: -34.6862 - type: nauc_ndcg_at_3_diff1 value: -19.3707 - type: nauc_ndcg_at_5_max value: -40.8597 - type: nauc_ndcg_at_5_std value: -38.1022 - type: nauc_ndcg_at_5_diff1 value: -6.4628000000000005 - type: nauc_ndcg_at_10_max value: -40.2327 - type: nauc_ndcg_at_10_std value: -47.2976 - type: nauc_ndcg_at_10_diff1 value: -4.4762 - type: nauc_ndcg_at_20_max value: -41.7987 - type: nauc_ndcg_at_20_std value: -54.2481 - type: nauc_ndcg_at_20_diff1 value: -8.6146 - type: nauc_ndcg_at_100_max value: -39.463100000000004 - type: nauc_ndcg_at_100_std value: -45.7414 - type: nauc_ndcg_at_100_diff1 value: -9.2455 - type: nauc_ndcg_at_1000_max value: -40.8904 - type: nauc_ndcg_at_1000_std value: -46.5535 - type: nauc_ndcg_at_1000_diff1 value: -11.476799999999999 - type: nauc_map_at_1_max value: -39.772800000000004 - type: nauc_map_at_1_std value: -34.0524 - type: nauc_map_at_1_diff1 value: -32.8146 - type: nauc_map_at_3_max value: -39.894200000000005 - type: nauc_map_at_3_std value: -34.4818 - type: nauc_map_at_3_diff1 value: -23.0092 - type: nauc_map_at_5_max value: -40.5148 - type: nauc_map_at_5_std value: -36.6914 - type: nauc_map_at_5_diff1 value: -14.0244 - type: nauc_map_at_10_max value: -40.3751 - type: nauc_map_at_10_std value: -41.0546 - type: nauc_map_at_10_diff1 value: -12.7255 - type: nauc_map_at_20_max value: -40.8992 - type: nauc_map_at_20_std value: -43.580999999999996 - type: nauc_map_at_20_diff1 value: -14.1348 - type: nauc_map_at_100_max value: -40.8422 - type: nauc_map_at_100_std value: -42.7572 - type: nauc_map_at_100_diff1 value: -14.5847 - type: nauc_map_at_1000_max value: -40.8622 - type: nauc_map_at_1000_std value: -42.7255 - type: nauc_map_at_1000_diff1 value: -14.716099999999999 - type: nauc_recall_at_1_max value: -39.772800000000004 - type: nauc_recall_at_1_std value: -34.0524 - type: nauc_recall_at_1_diff1 value: -32.8146 - type: nauc_recall_at_3_max value: -39.8223 - type: nauc_recall_at_3_std value: -35.2166 - type: nauc_recall_at_3_diff1 value: -10.0944 - type: nauc_recall_at_5_max value: -41.574 - type: nauc_recall_at_5_std value: -41.0135 - type: nauc_recall_at_5_diff1 value: 8.5898 - type: nauc_recall_at_10_max value: -39.7009 - type: nauc_recall_at_10_std value: -59.587900000000005 - type: nauc_recall_at_10_diff1 value: 9.6476 - type: nauc_recall_at_20_max value: -43.7116 - type: nauc_recall_at_20_std value: -76.6625 - type: nauc_recall_at_20_diff1 value: -0.7394999999999999 - type: nauc_recall_at_100_max value: -22.023799999999998 - type: nauc_recall_at_100_std value: -33.848099999999995 - type: nauc_recall_at_100_diff1 value: 12.5282 - type: nauc_recall_at_1000_max value: .nan - type: nauc_recall_at_1000_std value: .nan - type: nauc_recall_at_1000_diff1 value: .nan - type: nauc_precision_at_1_max value: -39.772800000000004 - type: nauc_precision_at_1_std value: -34.0524 - type: nauc_precision_at_1_diff1 value: -32.8146 - type: nauc_precision_at_3_max value: -39.8223 - type: nauc_precision_at_3_std value: -35.2166 - type: nauc_precision_at_3_diff1 value: -10.0944 - type: nauc_precision_at_5_max value: -41.574 - type: nauc_precision_at_5_std value: -41.0135 - type: nauc_precision_at_5_diff1 value: 8.5898 - type: nauc_precision_at_10_max value: -39.7009 - type: nauc_precision_at_10_std value: -59.587900000000005 - type: nauc_precision_at_10_diff1 value: 9.6476 - type: nauc_precision_at_20_max value: -43.7116 - type: nauc_precision_at_20_std value: -76.6625 - type: nauc_precision_at_20_diff1 value: -0.7394999999999999 - type: nauc_precision_at_100_max value: -22.023799999999998 - type: nauc_precision_at_100_std value: -33.848099999999995 - type: nauc_precision_at_100_diff1 value: 12.5282 - type: nauc_precision_at_1000_max value: 100.0 - type: nauc_precision_at_1000_std value: 100.0 - type: nauc_precision_at_1000_diff1 value: 100.0 - type: nauc_mrr_at_1_max value: -37.1478 - type: nauc_mrr_at_1_std value: -38.2256 - type: nauc_mrr_at_1_diff1 value: -19.2648 - type: nauc_mrr_at_3_max value: -38.5609 - type: nauc_mrr_at_3_std value: -36.7946 - type: nauc_mrr_at_3_diff1 value: 15.8383 - type: nauc_mrr_at_5_max value: -38.6003 - type: nauc_mrr_at_5_std value: -38.6368 - type: nauc_mrr_at_5_diff1 value: 10.5538 - type: nauc_mrr_at_10_max value: -40.3107 - type: nauc_mrr_at_10_std value: -44.6633 - type: nauc_mrr_at_10_diff1 value: 12.0739 - type: nauc_mrr_at_20_max value: -40.2119 - type: nauc_mrr_at_20_std value: -47.942099999999996 - type: nauc_mrr_at_20_diff1 value: 9.2441 - type: nauc_mrr_at_100_max value: -40.095 - type: nauc_mrr_at_100_std value: -46.9315 - type: nauc_mrr_at_100_diff1 value: 9.4182 - type: nauc_mrr_at_1000_max value: -40.117799999999995 - type: nauc_mrr_at_1000_std value: -46.914699999999996 - type: nauc_mrr_at_1000_diff1 value: 9.3917 - type: main_score value: 18.151999999999997 - task: type: Retrieval dataset: name: MTEB CosQA (default) type: CoIR-Retrieval/cosqa config: default split: test revision: bc5efb7e9d437246ce393ed19d772e08e4a79535 metrics: - type: ndcg_at_1 value: 12.6 - type: ndcg_at_3 value: 19.259 - type: ndcg_at_5 value: 24.078 - type: ndcg_at_10 value: 28.288999999999998 - type: ndcg_at_20 value: 31.706 - type: ndcg_at_100 value: 36.05 - type: ndcg_at_1000 value: 37.632 - type: map_at_1 value: 12.6 - type: map_at_3 value: 17.5 - type: map_at_5 value: 20.150000000000002 - type: map_at_10 value: 21.931 - type: map_at_20 value: 22.884 - type: map_at_100 value: 23.502000000000002 - type: map_at_1000 value: 23.566000000000003 - type: recall_at_1 value: 12.6 - type: recall_at_3 value: 24.4 - type: recall_at_5 value: 36.199999999999996 - type: recall_at_10 value: 49.0 - type: recall_at_20 value: 62.4 - type: recall_at_100 value: 85.6 - type: recall_at_1000 value: 97.8 - type: precision_at_1 value: 12.6 - type: precision_at_3 value: 8.133 - type: precision_at_5 value: 7.24 - type: precision_at_10 value: 4.9 - type: precision_at_20 value: 3.1199999999999997 - type: precision_at_100 value: 0.856 - type: precision_at_1000 value: 0.098 - type: mrr_at_1 value: 12.2 - type: mrr_at_3 value: 17.6333 - type: mrr_at_5 value: 19.453300000000002 - type: mrr_at_10 value: 21.3205 - type: mrr_at_20 value: 22.315199999999997 - type: mrr_at_100 value: 22.9331 - type: mrr_at_1000 value: 22.9955 - type: nauc_ndcg_at_1_max value: 10.2948 - type: nauc_ndcg_at_1_std value: -13.1709 - type: nauc_ndcg_at_1_diff1 value: 31.4251 - type: nauc_ndcg_at_3_max value: 15.477599999999999 - type: nauc_ndcg_at_3_std value: -11.7827 - type: nauc_ndcg_at_3_diff1 value: 17.4257 - type: nauc_ndcg_at_5_max value: 17.7434 - type: nauc_ndcg_at_5_std value: -10.7058 - type: nauc_ndcg_at_5_diff1 value: 13.955100000000002 - type: nauc_ndcg_at_10_max value: 17.799100000000003 - type: nauc_ndcg_at_10_std value: -8.629000000000001 - type: nauc_ndcg_at_10_diff1 value: 12.266399999999999 - type: nauc_ndcg_at_20_max value: 18.454 - type: nauc_ndcg_at_20_std value: -8.0871 - type: nauc_ndcg_at_20_diff1 value: 11.4802 - type: nauc_ndcg_at_100_max value: 18.8607 - type: nauc_ndcg_at_100_std value: -5.8566 - type: nauc_ndcg_at_100_diff1 value: 12.559899999999999 - type: nauc_ndcg_at_1000_max value: 18.1409 - type: nauc_ndcg_at_1000_std value: -6.894799999999999 - type: nauc_ndcg_at_1000_diff1 value: 13.9734 - type: nauc_map_at_1_max value: 10.2948 - type: nauc_map_at_1_std value: -13.1709 - type: nauc_map_at_1_diff1 value: 31.4251 - type: nauc_map_at_3_max value: 14.4256 - type: nauc_map_at_3_std value: -12.173 - type: nauc_map_at_3_diff1 value: 20.4742 - type: nauc_map_at_5_max value: 15.842400000000001 - type: nauc_map_at_5_std value: -11.5686 - type: nauc_map_at_5_diff1 value: 18.195800000000002 - type: nauc_map_at_10_max value: 15.786200000000001 - type: nauc_map_at_10_std value: -10.564 - type: nauc_map_at_10_diff1 value: 17.227899999999998 - type: nauc_map_at_20_max value: 15.987199999999998 - type: nauc_map_at_20_std value: -10.4241 - type: nauc_map_at_20_diff1 value: 17.0317 - type: nauc_map_at_100_max value: 16.1125 - type: nauc_map_at_100_std value: -9.9394 - type: nauc_map_at_100_diff1 value: 17.191100000000002 - type: nauc_map_at_1000_max value: 16.0868 - type: nauc_map_at_1000_std value: -9.9615 - type: nauc_map_at_1000_diff1 value: 17.241999999999997 - type: nauc_recall_at_1_max value: 10.2948 - type: nauc_recall_at_1_std value: -13.1709 - type: nauc_recall_at_1_diff1 value: 31.4251 - type: nauc_recall_at_3_max value: 17.924799999999998 - type: nauc_recall_at_3_std value: -10.84 - type: nauc_recall_at_3_diff1 value: 10.267800000000001 - type: nauc_recall_at_5_max value: 22.0265 - type: nauc_recall_at_5_std value: -8.6675 - type: nauc_recall_at_5_diff1 value: 4.5511 - type: nauc_recall_at_10_max value: 22.5353 - type: nauc_recall_at_10_std value: -3.7438 - type: nauc_recall_at_10_diff1 value: 1.05 - type: nauc_recall_at_20_max value: 25.4119 - type: nauc_recall_at_20_std value: -1.0668 - type: nauc_recall_at_20_diff1 value: -3.4072999999999998 - type: nauc_recall_at_100_max value: 34.5952 - type: nauc_recall_at_100_std value: 22.4855 - type: nauc_recall_at_100_diff1 value: -9.0738 - type: nauc_recall_at_1000_max value: 56.485 - type: nauc_recall_at_1000_std value: 72.184 - type: nauc_recall_at_1000_diff1 value: -5.3136 - type: nauc_precision_at_1_max value: 10.2948 - type: nauc_precision_at_1_std value: -13.1709 - type: nauc_precision_at_1_diff1 value: 31.4251 - type: nauc_precision_at_3_max value: 17.924799999999998 - type: nauc_precision_at_3_std value: -10.84 - type: nauc_precision_at_3_diff1 value: 10.267800000000001 - type: nauc_precision_at_5_max value: 22.0265 - type: nauc_precision_at_5_std value: -8.6675 - type: nauc_precision_at_5_diff1 value: 4.5511 - type: nauc_precision_at_10_max value: 22.5353 - type: nauc_precision_at_10_std value: -3.7438 - type: nauc_precision_at_10_diff1 value: 1.05 - type: nauc_precision_at_20_max value: 25.4119 - type: nauc_precision_at_20_std value: -1.0668 - type: nauc_precision_at_20_diff1 value: -3.4072999999999998 - type: nauc_precision_at_100_max value: 34.5952 - type: nauc_precision_at_100_std value: 22.4855 - type: nauc_precision_at_100_diff1 value: -9.0738 - type: nauc_precision_at_1000_max value: 56.485 - type: nauc_precision_at_1000_std value: 72.184 - type: nauc_precision_at_1000_diff1 value: -5.3136 - type: nauc_mrr_at_1_max value: 12.3113 - type: nauc_mrr_at_1_std value: -16.7186 - type: nauc_mrr_at_1_diff1 value: 32.4301 - type: nauc_mrr_at_3_max value: 11.8664 - type: nauc_mrr_at_3_std value: -15.562500000000002 - type: nauc_mrr_at_3_diff1 value: 20.180600000000002 - type: nauc_mrr_at_5_max value: 11.9561 - type: nauc_mrr_at_5_std value: -15.1641 - type: nauc_mrr_at_5_diff1 value: 19.1071 - type: nauc_mrr_at_10_max value: 12.867899999999999 - type: nauc_mrr_at_10_std value: -14.1707 - type: nauc_mrr_at_10_diff1 value: 17.613599999999998 - type: nauc_mrr_at_20_max value: 13.3821 - type: nauc_mrr_at_20_std value: -13.727800000000002 - type: nauc_mrr_at_20_diff1 value: 17.712600000000002 - type: nauc_mrr_at_100_max value: 13.530100000000001 - type: nauc_mrr_at_100_std value: -13.292599999999998 - type: nauc_mrr_at_100_diff1 value: 17.8945 - type: nauc_mrr_at_1000_max value: 13.492899999999999 - type: nauc_mrr_at_1000_std value: -13.3262 - type: nauc_mrr_at_1000_diff1 value: 17.945 - type: main_score value: 28.288999999999998 - task: type: Retrieval dataset: name: MTEB DBPedia (default) type: mteb/dbpedia config: default split: test revision: c0f706b76e590d620bd6618b3ca8efdd34e2d659 metrics: - type: ndcg_at_1 value: 46.625 - type: ndcg_at_3 value: 37.483 - type: ndcg_at_5 value: 34.943000000000005 - type: ndcg_at_10 value: 32.805 - type: ndcg_at_20 value: 31.857999999999997 - type: ndcg_at_100 value: 36.504 - type: ndcg_at_1000 value: 44.015 - type: map_at_1 value: 7.455 - type: map_at_3 value: 11.231 - type: map_at_5 value: 12.76 - type: map_at_10 value: 14.927000000000001 - type: map_at_20 value: 16.732 - type: map_at_100 value: 19.903000000000002 - type: map_at_1000 value: 21.227 - type: recall_at_1 value: 7.455 - type: recall_at_3 value: 12.423 - type: recall_at_5 value: 15.326 - type: recall_at_10 value: 19.858 - type: recall_at_20 value: 24.929000000000002 - type: recall_at_100 value: 42.799 - type: recall_at_1000 value: 66.485 - type: precision_at_1 value: 58.75 - type: precision_at_3 value: 40.916999999999994 - type: precision_at_5 value: 34.050000000000004 - type: precision_at_10 value: 25.75 - type: precision_at_20 value: 18.712 - type: precision_at_100 value: 7.904999999999999 - type: precision_at_1000 value: 1.754 - type: mrr_at_1 value: 58.75 - type: mrr_at_3 value: 65.2083 - type: mrr_at_5 value: 66.7708 - type: mrr_at_10 value: 67.4141 - type: mrr_at_20 value: 67.6811 - type: mrr_at_100 value: 67.8579 - type: mrr_at_1000 value: 67.8709 - type: nauc_ndcg_at_1_max value: 29.0439 - type: nauc_ndcg_at_1_std value: 20.5015 - type: nauc_ndcg_at_1_diff1 value: 35.499199999999995 - type: nauc_ndcg_at_3_max value: 29.8709 - type: nauc_ndcg_at_3_std value: 23.020699999999998 - type: nauc_ndcg_at_3_diff1 value: 28.618100000000002 - type: nauc_ndcg_at_5_max value: 27.7184 - type: nauc_ndcg_at_5_std value: 23.0527 - type: nauc_ndcg_at_5_diff1 value: 25.526 - type: nauc_ndcg_at_10_max value: 25.145400000000002 - type: nauc_ndcg_at_10_std value: 21.6828 - type: nauc_ndcg_at_10_diff1 value: 25.123 - type: nauc_ndcg_at_20_max value: 24.1687 - type: nauc_ndcg_at_20_std value: 18.192800000000002 - type: nauc_ndcg_at_20_diff1 value: 25.2305 - type: nauc_ndcg_at_100_max value: 26.4048 - type: nauc_ndcg_at_100_std value: 22.2057 - type: nauc_ndcg_at_100_diff1 value: 23.2848 - type: nauc_ndcg_at_1000_max value: 30.6232 - type: nauc_ndcg_at_1000_std value: 30.4798 - type: nauc_ndcg_at_1000_diff1 value: 22.5713 - type: nauc_map_at_1_max value: 4.2514 - type: nauc_map_at_1_std value: -16.109 - type: nauc_map_at_1_diff1 value: 31.521300000000004 - type: nauc_map_at_3_max value: 10.5699 - type: nauc_map_at_3_std value: -13.2038 - type: nauc_map_at_3_diff1 value: 27.992099999999997 - type: nauc_map_at_5_max value: 12.110999999999999 - type: nauc_map_at_5_std value: -9.2883 - type: nauc_map_at_5_diff1 value: 24.2311 - type: nauc_map_at_10_max value: 15.5794 - type: nauc_map_at_10_std value: -1.9084 - type: nauc_map_at_10_diff1 value: 23.5487 - type: nauc_map_at_20_max value: 19.2937 - type: nauc_map_at_20_std value: 5.1674 - type: nauc_map_at_20_diff1 value: 23.1231 - type: nauc_map_at_100_max value: 23.7248 - type: nauc_map_at_100_std value: 15.6969 - type: nauc_map_at_100_diff1 value: 22.087899999999998 - type: nauc_map_at_1000_max value: 25.3616 - type: nauc_map_at_1000_std value: 18.9624 - type: nauc_map_at_1000_diff1 value: 22.3491 - type: nauc_recall_at_1_max value: 4.2514 - type: nauc_recall_at_1_std value: -16.109 - type: nauc_recall_at_1_diff1 value: 31.521300000000004 - type: nauc_recall_at_3_max value: 9.579600000000001 - type: nauc_recall_at_3_std value: -14.1439 - type: nauc_recall_at_3_diff1 value: 24.0237 - type: nauc_recall_at_5_max value: 7.7634 - type: nauc_recall_at_5_std value: -11.6212 - type: nauc_recall_at_5_diff1 value: 15.8449 - type: nauc_recall_at_10_max value: 12.070500000000001 - type: nauc_recall_at_10_std value: -3.6641 - type: nauc_recall_at_10_diff1 value: 16.755 - type: nauc_recall_at_20_max value: 16.974600000000002 - type: nauc_recall_at_20_std value: 4.442 - type: nauc_recall_at_20_diff1 value: 16.2465 - type: nauc_recall_at_100_max value: 20.0143 - type: nauc_recall_at_100_std value: 19.0564 - type: nauc_recall_at_100_diff1 value: 11.2073 - type: nauc_recall_at_1000_max value: 25.826999999999998 - type: nauc_recall_at_1000_std value: 31.867600000000003 - type: nauc_recall_at_1000_diff1 value: 7.5985 - type: nauc_precision_at_1_max value: 46.4049 - type: nauc_precision_at_1_std value: 34.9663 - type: nauc_precision_at_1_diff1 value: 41.281099999999995 - type: nauc_precision_at_3_max value: 40.3772 - type: nauc_precision_at_3_std value: 39.231700000000004 - type: nauc_precision_at_3_diff1 value: 20.8721 - type: nauc_precision_at_5_max value: 35.3251 - type: nauc_precision_at_5_std value: 45.041399999999996 - type: nauc_precision_at_5_diff1 value: 12.377699999999999 - type: nauc_precision_at_10_max value: 33.1469 - type: nauc_precision_at_10_std value: 50.484700000000004 - type: nauc_precision_at_10_diff1 value: 9.9524 - type: nauc_precision_at_20_max value: 31.897599999999997 - type: nauc_precision_at_20_std value: 53.0212 - type: nauc_precision_at_20_diff1 value: 9.0274 - type: nauc_precision_at_100_max value: 27.060499999999998 - type: nauc_precision_at_100_std value: 51.7917 - type: nauc_precision_at_100_diff1 value: 5.3346 - type: nauc_precision_at_1000_max value: 10.5127 - type: nauc_precision_at_1000_std value: 27.1389 - type: nauc_precision_at_1000_diff1 value: 4.072 - type: nauc_mrr_at_1_max value: 46.4049 - type: nauc_mrr_at_1_std value: 34.9663 - type: nauc_mrr_at_1_diff1 value: 41.281099999999995 - type: nauc_mrr_at_3_max value: 49.1925 - type: nauc_mrr_at_3_std value: 38.4208 - type: nauc_mrr_at_3_diff1 value: 39.4442 - type: nauc_mrr_at_5_max value: 49.4555 - type: nauc_mrr_at_5_std value: 39.9529 - type: nauc_mrr_at_5_diff1 value: 39.4985 - type: nauc_mrr_at_10_max value: 49.215900000000005 - type: nauc_mrr_at_10_std value: 39.846199999999996 - type: nauc_mrr_at_10_diff1 value: 39.6351 - type: nauc_mrr_at_20_max value: 49.2931 - type: nauc_mrr_at_20_std value: 39.7556 - type: nauc_mrr_at_20_diff1 value: 39.536500000000004 - type: nauc_mrr_at_100_max value: 49.236799999999995 - type: nauc_mrr_at_100_std value: 39.7146 - type: nauc_mrr_at_100_diff1 value: 39.5436 - type: nauc_mrr_at_1000_max value: 49.2376 - type: nauc_mrr_at_1000_std value: 39.7079 - type: nauc_mrr_at_1000_diff1 value: 39.5441 - type: main_score value: 32.805 - task: type: Classification dataset: name: MTEB EmotionClassification (default) type: mteb/emotion config: default split: test revision: 4f58c6b202a23cf9a4da393831edf4f9183cad37 metrics: - type: accuracy value: 33.755 - type: f1 value: 30.109 - type: f1_weighted value: 35.891 - type: main_score value: 33.755 - task: type: Retrieval dataset: name: MTEB FEVER (default) type: mteb/fever config: default split: test revision: bea83ef9e8fb933d90a2f1d5515737465d613e12 metrics: - type: ndcg_at_1 value: 75.203 - type: ndcg_at_3 value: 81.208 - type: ndcg_at_5 value: 82.319 - type: ndcg_at_10 value: 83.155 - type: ndcg_at_20 value: 83.524 - type: ndcg_at_100 value: 83.852 - type: ndcg_at_1000 value: 84.052 - type: map_at_1 value: 69.63000000000001 - type: map_at_3 value: 77.50200000000001 - type: map_at_5 value: 78.251 - type: map_at_10 value: 78.648 - type: map_at_20 value: 78.767 - type: map_at_100 value: 78.82400000000001 - type: map_at_1000 value: 78.834 - type: recall_at_1 value: 69.63000000000001 - type: recall_at_3 value: 86.444 - type: recall_at_5 value: 89.298 - type: recall_at_10 value: 91.843 - type: recall_at_20 value: 93.195 - type: recall_at_100 value: 94.77799999999999 - type: recall_at_1000 value: 96.068 - type: precision_at_1 value: 75.203 - type: precision_at_3 value: 31.293 - type: precision_at_5 value: 19.448 - type: precision_at_10 value: 10.024 - type: precision_at_20 value: 5.102 - type: precision_at_100 value: 1.045 - type: precision_at_1000 value: 0.107 - type: mrr_at_1 value: 75.2025 - type: mrr_at_3 value: 83.0608 - type: mrr_at_5 value: 83.6871 - type: mrr_at_10 value: 84.0239 - type: mrr_at_20 value: 84.1082 - type: mrr_at_100 value: 84.1355 - type: mrr_at_1000 value: 84.137 - type: nauc_ndcg_at_1_max value: 29.9781 - type: nauc_ndcg_at_1_std value: -27.174799999999998 - type: nauc_ndcg_at_1_diff1 value: 65.8967 - type: nauc_ndcg_at_3_max value: 24.2173 - type: nauc_ndcg_at_3_std value: -22.2349 - type: nauc_ndcg_at_3_diff1 value: 48.9054 - type: nauc_ndcg_at_5_max value: 22.6904 - type: nauc_ndcg_at_5_std value: -21.4784 - type: nauc_ndcg_at_5_diff1 value: 48.186099999999996 - type: nauc_ndcg_at_10_max value: 22.2573 - type: nauc_ndcg_at_10_std value: -20.415 - type: nauc_ndcg_at_10_diff1 value: 47.7873 - type: nauc_ndcg_at_20_max value: 22.0394 - type: nauc_ndcg_at_20_std value: -19.7697 - type: nauc_ndcg_at_20_diff1 value: 47.958099999999995 - type: nauc_ndcg_at_100_max value: 21.6255 - type: nauc_ndcg_at_100_std value: -19.778200000000002 - type: nauc_ndcg_at_100_diff1 value: 48.0176 - type: nauc_ndcg_at_1000_max value: 21.8334 - type: nauc_ndcg_at_1000_std value: -19.947699999999998 - type: nauc_ndcg_at_1000_diff1 value: 48.491800000000005 - type: nauc_map_at_1_max value: 22.7733 - type: nauc_map_at_1_std value: -22.9147 - type: nauc_map_at_1_diff1 value: 54.33480000000001 - type: nauc_map_at_3_max value: 21.7638 - type: nauc_map_at_3_std value: -21.5291 - type: nauc_map_at_3_diff1 value: 48.4323 - type: nauc_map_at_5_max value: 21.3712 - type: nauc_map_at_5_std value: -21.1705 - type: nauc_map_at_5_diff1 value: 48.302499999999995 - type: nauc_map_at_10_max value: 21.2869 - type: nauc_map_at_10_std value: -20.826900000000002 - type: nauc_map_at_10_diff1 value: 48.238 - type: nauc_map_at_20_max value: 21.259700000000002 - type: nauc_map_at_20_std value: -20.6727 - type: nauc_map_at_20_diff1 value: 48.280499999999996 - type: nauc_map_at_100_max value: 21.2305 - type: nauc_map_at_100_std value: -20.6466 - type: nauc_map_at_100_diff1 value: 48.3009 - type: nauc_map_at_1000_max value: 21.2364 - type: nauc_map_at_1000_std value: -20.6521 - type: nauc_map_at_1000_diff1 value: 48.3154 - type: nauc_recall_at_1_max value: 22.7733 - type: nauc_recall_at_1_std value: -22.9147 - type: nauc_recall_at_1_diff1 value: 54.33480000000001 - type: nauc_recall_at_3_max value: 17.147100000000002 - type: nauc_recall_at_3_std value: -16.8494 - type: nauc_recall_at_3_diff1 value: 30.9712 - type: nauc_recall_at_5_max value: 12.0947 - type: nauc_recall_at_5_std value: -13.142000000000001 - type: nauc_recall_at_5_diff1 value: 24.760099999999998 - type: nauc_recall_at_10_max value: 7.1945 - type: nauc_recall_at_10_std value: -5.1164000000000005 - type: nauc_recall_at_10_diff1 value: 15.933900000000001 - type: nauc_recall_at_20_max value: 2.3306 - type: nauc_recall_at_20_std value: 2.748 - type: nauc_recall_at_20_diff1 value: 11.4733 - type: nauc_recall_at_100_max value: -9.991999999999999 - type: nauc_recall_at_100_std value: 7.362299999999999 - type: nauc_recall_at_100_diff1 value: 2.2306 - type: nauc_recall_at_1000_max value: -15.401200000000001 - type: nauc_recall_at_1000_std value: 10.616100000000001 - type: nauc_recall_at_1000_diff1 value: 1.9488999999999999 - type: nauc_precision_at_1_max value: 29.9781 - type: nauc_precision_at_1_std value: -27.174799999999998 - type: nauc_precision_at_1_diff1 value: 65.8967 - type: nauc_precision_at_3_max value: 29.6113 - type: nauc_precision_at_3_std value: -21.1606 - type: nauc_precision_at_3_diff1 value: 37.9441 - type: nauc_precision_at_5_max value: 23.069300000000002 - type: nauc_precision_at_5_std value: -13.168099999999999 - type: nauc_precision_at_5_diff1 value: 25.095299999999998 - type: nauc_precision_at_10_max value: 17.7956 - type: nauc_precision_at_10_std value: -0.28609999999999997 - type: nauc_precision_at_10_diff1 value: 9.4407 - type: nauc_precision_at_20_max value: 13.2934 - type: nauc_precision_at_20_std value: 10.9965 - type: nauc_precision_at_20_diff1 value: 0.43470000000000003 - type: nauc_precision_at_100_max value: 5.1414 - type: nauc_precision_at_100_std value: 16.2173 - type: nauc_precision_at_100_diff1 value: -10.2967 - type: nauc_precision_at_1000_max value: 6.0449 - type: nauc_precision_at_1000_std value: 12.698899999999998 - type: nauc_precision_at_1000_diff1 value: -8.3748 - type: nauc_mrr_at_1_max value: 29.9781 - type: nauc_mrr_at_1_std value: -27.174799999999998 - type: nauc_mrr_at_1_diff1 value: 65.8967 - type: nauc_mrr_at_3_max value: 33.2001 - type: nauc_mrr_at_3_std value: -27.142699999999998 - type: nauc_mrr_at_3_diff1 value: 62.546400000000006 - type: nauc_mrr_at_5_max value: 32.9296 - type: nauc_mrr_at_5_std value: -27.0933 - type: nauc_mrr_at_5_diff1 value: 62.8135 - type: nauc_mrr_at_10_max value: 32.9972 - type: nauc_mrr_at_10_std value: -26.7892 - type: nauc_mrr_at_10_diff1 value: 62.936099999999996 - type: nauc_mrr_at_20_max value: 32.9283 - type: nauc_mrr_at_20_std value: -26.6706 - type: nauc_mrr_at_20_diff1 value: 63.0346 - type: nauc_mrr_at_100_max value: 32.8554 - type: nauc_mrr_at_100_std value: -26.7179 - type: nauc_mrr_at_100_diff1 value: 63.0571 - type: nauc_mrr_at_1000_max value: 32.8523 - type: nauc_mrr_at_1000_std value: -26.7208 - type: nauc_mrr_at_1000_diff1 value: 63.0605 - type: main_score value: 83.155 - task: type: Retrieval dataset: name: MTEB FiQA2018 (default) type: mteb/fiqa config: default split: test revision: 27a168819829fe9bcd655c2df245fb19452e8e06 metrics: - type: ndcg_at_1 value: 27.468999999999998 - type: ndcg_at_3 value: 25.183 - type: ndcg_at_5 value: 26.148 - type: ndcg_at_10 value: 28.404 - type: ndcg_at_20 value: 30.891999999999996 - type: ndcg_at_100 value: 35.167 - type: ndcg_at_1000 value: 38.803 - type: map_at_1 value: 13.864 - type: map_at_3 value: 18.989 - type: map_at_5 value: 20.521 - type: map_at_10 value: 21.858 - type: map_at_20 value: 22.686999999999998 - type: map_at_100 value: 23.491 - type: map_at_1000 value: 23.674 - type: recall_at_1 value: 13.864 - type: recall_at_3 value: 23.327 - type: recall_at_5 value: 28.015 - type: recall_at_10 value: 34.977999999999994 - type: recall_at_20 value: 42.495 - type: recall_at_100 value: 59.967999999999996 - type: recall_at_1000 value: 82.39800000000001 - type: precision_at_1 value: 27.468999999999998 - type: precision_at_3 value: 16.409000000000002 - type: precision_at_5 value: 12.099 - type: precision_at_10 value: 7.701 - type: precision_at_20 value: 4.877 - type: precision_at_100 value: 1.465 - type: precision_at_1000 value: 0.211 - type: mrr_at_1 value: 27.4691 - type: mrr_at_3 value: 32.844699999999996 - type: mrr_at_5 value: 34.110099999999996 - type: mrr_at_10 value: 35.1631 - type: mrr_at_20 value: 35.869099999999996 - type: mrr_at_100 value: 36.2438 - type: mrr_at_1000 value: 36.304700000000004 - type: nauc_ndcg_at_1_max value: 31.897 - type: nauc_ndcg_at_1_std value: 1.7016 - type: nauc_ndcg_at_1_diff1 value: 46.680899999999994 - type: nauc_ndcg_at_3_max value: 28.7103 - type: nauc_ndcg_at_3_std value: 0.08220000000000001 - type: nauc_ndcg_at_3_diff1 value: 38.1892 - type: nauc_ndcg_at_5_max value: 27.988000000000003 - type: nauc_ndcg_at_5_std value: 2.6533 - type: nauc_ndcg_at_5_diff1 value: 37.1171 - type: nauc_ndcg_at_10_max value: 28.205400000000004 - type: nauc_ndcg_at_10_std value: 3.6081000000000003 - type: nauc_ndcg_at_10_diff1 value: 37.0636 - type: nauc_ndcg_at_20_max value: 28.708 - type: nauc_ndcg_at_20_std value: 4.999 - type: nauc_ndcg_at_20_diff1 value: 35.5315 - type: nauc_ndcg_at_100_max value: 30.000300000000003 - type: nauc_ndcg_at_100_std value: 8.0321 - type: nauc_ndcg_at_100_diff1 value: 35.0261 - type: nauc_ndcg_at_1000_max value: 31.476399999999998 - type: nauc_ndcg_at_1000_std value: 8.7892 - type: nauc_ndcg_at_1000_diff1 value: 35.8262 - type: nauc_map_at_1_max value: 19.6103 - type: nauc_map_at_1_std value: -1.459 - type: nauc_map_at_1_diff1 value: 43.7768 - type: nauc_map_at_3_max value: 23.213800000000003 - type: nauc_map_at_3_std value: -1.0172 - type: nauc_map_at_3_diff1 value: 38.4649 - type: nauc_map_at_5_max value: 24.4147 - type: nauc_map_at_5_std value: 0.6049 - type: nauc_map_at_5_diff1 value: 38.278800000000004 - type: nauc_map_at_10_max value: 25.1577 - type: nauc_map_at_10_std value: 1.5727000000000002 - type: nauc_map_at_10_diff1 value: 37.8236 - type: nauc_map_at_20_max value: 25.5774 - type: nauc_map_at_20_std value: 2.3826 - type: nauc_map_at_20_diff1 value: 37.2606 - type: nauc_map_at_100_max value: 26.1034 - type: nauc_map_at_100_std value: 3.0844 - type: nauc_map_at_100_diff1 value: 37.1361 - type: nauc_map_at_1000_max value: 26.2481 - type: nauc_map_at_1000_std value: 3.1667 - type: nauc_map_at_1000_diff1 value: 37.2042 - type: nauc_recall_at_1_max value: 19.6103 - type: nauc_recall_at_1_std value: -1.459 - type: nauc_recall_at_1_diff1 value: 43.7768 - type: nauc_recall_at_3_max value: 21.9254 - type: nauc_recall_at_3_std value: -1.2038 - type: nauc_recall_at_3_diff1 value: 32.2851 - type: nauc_recall_at_5_max value: 21.9256 - type: nauc_recall_at_5_std value: 3.1369000000000002 - type: nauc_recall_at_5_diff1 value: 29.456500000000002 - type: nauc_recall_at_10_max value: 23.393900000000002 - type: nauc_recall_at_10_std value: 5.2703 - type: nauc_recall_at_10_diff1 value: 28.5136 - type: nauc_recall_at_20_max value: 24.5427 - type: nauc_recall_at_20_std value: 9.1449 - type: nauc_recall_at_20_diff1 value: 23.919 - type: nauc_recall_at_100_max value: 25.683600000000002 - type: nauc_recall_at_100_std value: 21.0368 - type: nauc_recall_at_100_diff1 value: 18.8564 - type: nauc_recall_at_1000_max value: 34.0063 - type: nauc_recall_at_1000_std value: 38.035799999999995 - type: nauc_recall_at_1000_diff1 value: 17.1266 - type: nauc_precision_at_1_max value: 31.897 - type: nauc_precision_at_1_std value: 1.7016 - type: nauc_precision_at_1_diff1 value: 46.680899999999994 - type: nauc_precision_at_3_max value: 33.503699999999995 - type: nauc_precision_at_3_std value: 1.7436 - type: nauc_precision_at_3_diff1 value: 31.8292 - type: nauc_precision_at_5_max value: 35.5747 - type: nauc_precision_at_5_std value: 8.4447 - type: nauc_precision_at_5_diff1 value: 27.433600000000002 - type: nauc_precision_at_10_max value: 35.7915 - type: nauc_precision_at_10_std value: 12.0952 - type: nauc_precision_at_10_diff1 value: 23.2614 - type: nauc_precision_at_20_max value: 35.421 - type: nauc_precision_at_20_std value: 14.863399999999999 - type: nauc_precision_at_20_diff1 value: 17.186899999999998 - type: nauc_precision_at_100_max value: 33.7497 - type: nauc_precision_at_100_std value: 18.5334 - type: nauc_precision_at_100_diff1 value: 10.678600000000001 - type: nauc_precision_at_1000_max value: 29.8247 - type: nauc_precision_at_1000_std value: 14.4755 - type: nauc_precision_at_1000_diff1 value: 4.1042000000000005 - type: nauc_mrr_at_1_max value: 31.897 - type: nauc_mrr_at_1_std value: 1.7016 - type: nauc_mrr_at_1_diff1 value: 46.680899999999994 - type: nauc_mrr_at_3_max value: 32.8019 - type: nauc_mrr_at_3_std value: 1.609 - type: nauc_mrr_at_3_diff1 value: 41.2746 - type: nauc_mrr_at_5_max value: 32.9538 - type: nauc_mrr_at_5_std value: 2.884 - type: nauc_mrr_at_5_diff1 value: 40.2619 - type: nauc_mrr_at_10_max value: 33.2905 - type: nauc_mrr_at_10_std value: 3.024 - type: nauc_mrr_at_10_diff1 value: 40.7879 - type: nauc_mrr_at_20_max value: 33.117000000000004 - type: nauc_mrr_at_20_std value: 3.1062 - type: nauc_mrr_at_20_diff1 value: 40.484700000000004 - type: nauc_mrr_at_100_max value: 33.083 - type: nauc_mrr_at_100_std value: 3.405 - type: nauc_mrr_at_100_diff1 value: 40.4873 - type: nauc_mrr_at_1000_max value: 33.1046 - type: nauc_mrr_at_1000_std value: 3.4228 - type: nauc_mrr_at_1000_diff1 value: 40.5107 - type: main_score value: 28.404 - task: type: Retrieval dataset: name: MTEB HotpotQA (default) type: mteb/hotpotqa config: default split: test revision: ab518f4d6fcca38d87c25209f94beba119d02014 metrics: - type: ndcg_at_1 value: 73.801 - type: ndcg_at_3 value: 54.882 - type: ndcg_at_5 value: 56.916999999999994 - type: ndcg_at_10 value: 58.766 - type: ndcg_at_20 value: 59.946999999999996 - type: ndcg_at_100 value: 61.893 - type: ndcg_at_1000 value: 63.408 - type: map_at_1 value: 36.901 - type: map_at_3 value: 46.527 - type: map_at_5 value: 48.035 - type: map_at_10 value: 49.101 - type: map_at_20 value: 49.567 - type: map_at_100 value: 49.948 - type: map_at_1000 value: 50.022 - type: recall_at_1 value: 36.901 - type: recall_at_3 value: 50.176 - type: recall_at_5 value: 54.193000000000005 - type: recall_at_10 value: 58.831999999999994 - type: recall_at_20 value: 62.633 - type: recall_at_100 value: 71.242 - type: recall_at_1000 value: 81.337 - type: precision_at_1 value: 73.801 - type: precision_at_3 value: 33.45 - type: precision_at_5 value: 21.677 - type: precision_at_10 value: 11.766 - type: precision_at_20 value: 6.263000000000001 - type: precision_at_100 value: 1.425 - type: precision_at_1000 value: 0.163 - type: mrr_at_1 value: 73.8015 - type: mrr_at_3 value: 78.44250000000001 - type: mrr_at_5 value: 79.1204 - type: mrr_at_10 value: 79.4947 - type: mrr_at_20 value: 79.6248 - type: mrr_at_100 value: 79.7258 - type: mrr_at_1000 value: 79.7391 - type: nauc_ndcg_at_1_max value: 52.782 - type: nauc_ndcg_at_1_std value: -7.0408 - type: nauc_ndcg_at_1_diff1 value: 72.8754 - type: nauc_ndcg_at_3_max value: 34.7845 - type: nauc_ndcg_at_3_std value: -2.6474 - type: nauc_ndcg_at_3_diff1 value: 36.7492 - type: nauc_ndcg_at_5_max value: 32.488299999999995 - type: nauc_ndcg_at_5_std value: -1.6659 - type: nauc_ndcg_at_5_diff1 value: 33.1499 - type: nauc_ndcg_at_10_max value: 31.2128 - type: nauc_ndcg_at_10_std value: -0.6525000000000001 - type: nauc_ndcg_at_10_diff1 value: 31.3173 - type: nauc_ndcg_at_20_max value: 30.319000000000003 - type: nauc_ndcg_at_20_std value: 0.0078 - type: nauc_ndcg_at_20_diff1 value: 30.281799999999997 - type: nauc_ndcg_at_100_max value: 29.873300000000004 - type: nauc_ndcg_at_100_std value: 1.2557 - type: nauc_ndcg_at_100_diff1 value: 29.3753 - type: nauc_ndcg_at_1000_max value: 29.8655 - type: nauc_ndcg_at_1000_std value: 1.1226999999999998 - type: nauc_ndcg_at_1000_diff1 value: 29.602899999999998 - type: nauc_map_at_1_max value: 52.782 - type: nauc_map_at_1_std value: -7.0408 - type: nauc_map_at_1_diff1 value: 72.8754 - type: nauc_map_at_3_max value: 30.2396 - type: nauc_map_at_3_std value: -2.9367 - type: nauc_map_at_3_diff1 value: 30.315900000000003 - type: nauc_map_at_5_max value: 28.6694 - type: nauc_map_at_5_std value: -2.2835 - type: nauc_map_at_5_diff1 value: 27.9185 - type: nauc_map_at_10_max value: 28.058899999999998 - type: nauc_map_at_10_std value: -1.8286 - type: nauc_map_at_10_diff1 value: 27.106400000000004 - type: nauc_map_at_20_max value: 27.763199999999998 - type: nauc_map_at_20_std value: -1.5711 - type: nauc_map_at_20_diff1 value: 26.7588 - type: nauc_map_at_100_max value: 27.700000000000003 - type: nauc_map_at_100_std value: -1.3389 - type: nauc_map_at_100_diff1 value: 26.615499999999997 - type: nauc_map_at_1000_max value: 27.701999999999998 - type: nauc_map_at_1000_std value: -1.3391 - type: nauc_map_at_1000_diff1 value: 26.628800000000002 - type: nauc_recall_at_1_max value: 52.782 - type: nauc_recall_at_1_std value: -7.0408 - type: nauc_recall_at_1_diff1 value: 72.8754 - type: nauc_recall_at_3_max value: 26.899800000000003 - type: nauc_recall_at_3_std value: -0.7169 - type: nauc_recall_at_3_diff1 value: 21.875 - type: nauc_recall_at_5_max value: 22.0409 - type: nauc_recall_at_5_std value: 1.0630000000000002 - type: nauc_recall_at_5_diff1 value: 14.9439 - type: nauc_recall_at_10_max value: 17.8827 - type: nauc_recall_at_10_std value: 3.4513000000000003 - type: nauc_recall_at_10_diff1 value: 9.6887 - type: nauc_recall_at_20_max value: 14.6979 - type: nauc_recall_at_20_std value: 5.4514 - type: nauc_recall_at_20_diff1 value: 6.103800000000001 - type: nauc_recall_at_100_max value: 10.054599999999999 - type: nauc_recall_at_100_std value: 11.4136 - type: nauc_recall_at_100_diff1 value: -1.2643 - type: nauc_recall_at_1000_max value: 3.9052000000000002 - type: nauc_recall_at_1000_std value: 13.176099999999998 - type: nauc_recall_at_1000_diff1 value: -8.8098 - type: nauc_precision_at_1_max value: 52.782 - type: nauc_precision_at_1_std value: -7.0408 - type: nauc_precision_at_1_diff1 value: 72.8754 - type: nauc_precision_at_3_max value: 26.899800000000003 - type: nauc_precision_at_3_std value: -0.7169 - type: nauc_precision_at_3_diff1 value: 21.875 - type: nauc_precision_at_5_max value: 22.0409 - type: nauc_precision_at_5_std value: 1.0630000000000002 - type: nauc_precision_at_5_diff1 value: 14.9439 - type: nauc_precision_at_10_max value: 17.8827 - type: nauc_precision_at_10_std value: 3.4513000000000003 - type: nauc_precision_at_10_diff1 value: 9.6887 - type: nauc_precision_at_20_max value: 14.6979 - type: nauc_precision_at_20_std value: 5.4514 - type: nauc_precision_at_20_diff1 value: 6.103800000000001 - type: nauc_precision_at_100_max value: 10.054599999999999 - type: nauc_precision_at_100_std value: 11.4136 - type: nauc_precision_at_100_diff1 value: -1.2643 - type: nauc_precision_at_1000_max value: 3.9052000000000002 - type: nauc_precision_at_1000_std value: 13.176099999999998 - type: nauc_precision_at_1000_diff1 value: -8.8098 - type: nauc_mrr_at_1_max value: 52.782 - type: nauc_mrr_at_1_std value: -7.0408 - type: nauc_mrr_at_1_diff1 value: 72.8754 - type: nauc_mrr_at_3_max value: 54.295700000000004 - type: nauc_mrr_at_3_std value: -4.637700000000001 - type: nauc_mrr_at_3_diff1 value: 70.1027 - type: nauc_mrr_at_5_max value: 54.3589 - type: nauc_mrr_at_5_std value: -4.1942 - type: nauc_mrr_at_5_diff1 value: 69.9827 - type: nauc_mrr_at_10_max value: 54.3287 - type: nauc_mrr_at_10_std value: -3.8112 - type: nauc_mrr_at_10_diff1 value: 69.857 - type: nauc_mrr_at_20_max value: 54.325199999999995 - type: nauc_mrr_at_20_std value: -3.7948999999999997 - type: nauc_mrr_at_20_diff1 value: 69.92699999999999 - type: nauc_mrr_at_100_max value: 54.3234 - type: nauc_mrr_at_100_std value: -3.8176 - type: nauc_mrr_at_100_diff1 value: 69.963 - type: nauc_mrr_at_1000_max value: 54.3152 - type: nauc_mrr_at_1000_std value: -3.8351 - type: nauc_mrr_at_1000_diff1 value: 69.9678 - type: main_score value: 58.766 - task: type: Classification dataset: name: MTEB ImdbClassification (default) type: mteb/imdb config: default split: test revision: 3d86128a09e091d6018b6d26cad27f2739fc2db7 metrics: - type: accuracy value: 62.91 - type: f1 value: 62.572799999999994 - type: f1_weighted value: 62.572799999999994 - type: ap value: 58.2831 - type: ap_weighted value: 58.2831 - type: main_score value: 62.91 - task: type: Retrieval dataset: name: MTEB MIRACLRetrieval (ar) type: miracl/mmteb-miracl config: ar split: dev revision: main metrics: - type: ndcg_at_1 value: 55.559000000000005 - type: ndcg_at_3 value: 56.43899999999999 - type: ndcg_at_5 value: 59.34700000000001 - type: ndcg_at_10 value: 62.541000000000004 - type: ndcg_at_20 value: 64.739 - type: ndcg_at_100 value: 67.101 - type: ndcg_at_1000 value: 68.05 - type: map_at_1 value: 37.009 - type: map_at_3 value: 49.559 - type: map_at_5 value: 52.766999999999996 - type: map_at_10 value: 54.891 - type: map_at_20 value: 55.818 - type: map_at_100 value: 56.364000000000004 - type: map_at_1000 value: 56.418 - type: recall_at_1 value: 37.009 - type: recall_at_3 value: 56.903000000000006 - type: recall_at_5 value: 65.18 - type: recall_at_10 value: 73.317 - type: recall_at_20 value: 80.205 - type: recall_at_100 value: 90.066 - type: recall_at_1000 value: 96.272 - type: precision_at_1 value: 55.559000000000005 - type: precision_at_3 value: 32.056000000000004 - type: precision_at_5 value: 22.942 - type: precision_at_10 value: 13.483999999999998 - type: precision_at_20 value: 7.548000000000001 - type: precision_at_100 value: 1.752 - type: precision_at_1000 value: 0.189 - type: mrr_at_1 value: 55.559400000000004 - type: mrr_at_3 value: 63.7201 - type: mrr_at_5 value: 65.0996 - type: mrr_at_10 value: 65.8096 - type: mrr_at_20 value: 66.1023 - type: mrr_at_100 value: 66.2427 - type: mrr_at_1000 value: 66.2595 - type: nauc_ndcg_at_1_max value: 39.1686 - type: nauc_ndcg_at_1_std value: 1.7862 - type: nauc_ndcg_at_1_diff1 value: 45.7904 - type: nauc_ndcg_at_3_max value: 37.2044 - type: nauc_ndcg_at_3_std value: -1.6014 - type: nauc_ndcg_at_3_diff1 value: 37.9844 - type: nauc_ndcg_at_5_max value: 39.2524 - type: nauc_ndcg_at_5_std value: -0.6319 - type: nauc_ndcg_at_5_diff1 value: 38.2785 - type: nauc_ndcg_at_10_max value: 40.1167 - type: nauc_ndcg_at_10_std value: 0.3359 - type: nauc_ndcg_at_10_diff1 value: 37.4785 - type: nauc_ndcg_at_20_max value: 41.3886 - type: nauc_ndcg_at_20_std value: 2.8987 - type: nauc_ndcg_at_20_diff1 value: 37.0635 - type: nauc_ndcg_at_100_max value: 42.357299999999995 - type: nauc_ndcg_at_100_std value: 5.2258 - type: nauc_ndcg_at_100_diff1 value: 37.3142 - type: nauc_ndcg_at_1000_max value: 41.9076 - type: nauc_ndcg_at_1000_std value: 4.539499999999999 - type: nauc_ndcg_at_1000_diff1 value: 37.703399999999995 - type: nauc_map_at_1_max value: 21.7624 - type: nauc_map_at_1_std value: -10.1554 - type: nauc_map_at_1_diff1 value: 45.413599999999995 - type: nauc_map_at_3_max value: 32.231 - type: nauc_map_at_3_std value: -5.7029000000000005 - type: nauc_map_at_3_diff1 value: 39.678799999999995 - type: nauc_map_at_5_max value: 35.3238 - type: nauc_map_at_5_std value: -3.3897999999999997 - type: nauc_map_at_5_diff1 value: 38.901599999999995 - type: nauc_map_at_10_max value: 36.248799999999996 - type: nauc_map_at_10_std value: -2.5503 - type: nauc_map_at_10_diff1 value: 38.2086 - type: nauc_map_at_20_max value: 36.8226 - type: nauc_map_at_20_std value: -1.5142 - type: nauc_map_at_20_diff1 value: 37.9922 - type: nauc_map_at_100_max value: 37.0911 - type: nauc_map_at_100_std value: -0.9837 - type: nauc_map_at_100_diff1 value: 37.9955 - type: nauc_map_at_1000_max value: 37.0788 - type: nauc_map_at_1000_std value: -0.9948 - type: nauc_map_at_1000_diff1 value: 38.016299999999994 - type: nauc_recall_at_1_max value: 21.7624 - type: nauc_recall_at_1_std value: -10.1554 - type: nauc_recall_at_1_diff1 value: 45.413599999999995 - type: nauc_recall_at_3_max value: 32.4031 - type: nauc_recall_at_3_std value: -5.2341999999999995 - type: nauc_recall_at_3_diff1 value: 33.6415 - type: nauc_recall_at_5_max value: 37.6932 - type: nauc_recall_at_5_std value: -1.2136 - type: nauc_recall_at_5_diff1 value: 31.629600000000003 - type: nauc_recall_at_10_max value: 39.6688 - type: nauc_recall_at_10_std value: 1.3085 - type: nauc_recall_at_10_diff1 value: 28.184900000000003 - type: nauc_recall_at_20_max value: 45.1114 - type: nauc_recall_at_20_std value: 11.9353 - type: nauc_recall_at_20_diff1 value: 24.9804 - type: nauc_recall_at_100_max value: 58.7538 - type: nauc_recall_at_100_std value: 40.016200000000005 - type: nauc_recall_at_100_diff1 value: 22.0195 - type: nauc_recall_at_1000_max value: 69.68910000000001 - type: nauc_recall_at_1000_std value: 61.42959999999999 - type: nauc_recall_at_1000_diff1 value: 18.6353 - type: nauc_precision_at_1_max value: 39.1686 - type: nauc_precision_at_1_std value: 1.7862 - type: nauc_precision_at_1_diff1 value: 45.7904 - type: nauc_precision_at_3_max value: 38.101400000000005 - type: nauc_precision_at_3_std value: 13.7012 - type: nauc_precision_at_3_diff1 value: 9.3923 - type: nauc_precision_at_5_max value: 36.4465 - type: nauc_precision_at_5_std value: 18.3961 - type: nauc_precision_at_5_diff1 value: 1.5756 - type: nauc_precision_at_10_max value: 29.869600000000002 - type: nauc_precision_at_10_std value: 19.869899999999998 - type: nauc_precision_at_10_diff1 value: -5.9939 - type: nauc_precision_at_20_max value: 26.564700000000002 - type: nauc_precision_at_20_std value: 24.7639 - type: nauc_precision_at_20_diff1 value: -10.8804 - type: nauc_precision_at_100_max value: 20.137 - type: nauc_precision_at_100_std value: 28.4182 - type: nauc_precision_at_100_diff1 value: -15.1979 - type: nauc_precision_at_1000_max value: 14.4263 - type: nauc_precision_at_1000_std value: 25.336199999999998 - type: nauc_precision_at_1000_diff1 value: -17.149800000000003 - type: nauc_mrr_at_1_max value: 39.1686 - type: nauc_mrr_at_1_std value: 1.7862 - type: nauc_mrr_at_1_diff1 value: 45.7904 - type: nauc_mrr_at_3_max value: 43.3345 - type: nauc_mrr_at_3_std value: 3.6245 - type: nauc_mrr_at_3_diff1 value: 42.332300000000004 - type: nauc_mrr_at_5_max value: 43.7305 - type: nauc_mrr_at_5_std value: 4.18 - type: nauc_mrr_at_5_diff1 value: 42.3171 - type: nauc_mrr_at_10_max value: 43.8493 - type: nauc_mrr_at_10_std value: 4.3809000000000005 - type: nauc_mrr_at_10_diff1 value: 42.3117 - type: nauc_mrr_at_20_max value: 43.8121 - type: nauc_mrr_at_20_std value: 4.526 - type: nauc_mrr_at_20_diff1 value: 42.3117 - type: nauc_mrr_at_100_max value: 43.7806 - type: nauc_mrr_at_100_std value: 4.5652 - type: nauc_mrr_at_100_diff1 value: 42.3692 - type: nauc_mrr_at_1000_max value: 43.7629 - type: nauc_mrr_at_1000_std value: 4.5475 - type: nauc_mrr_at_1000_diff1 value: 42.373 - type: main_score value: 62.541000000000004 - task: type: Retrieval dataset: name: MTEB MIRACLRetrieval (bn) type: miracl/mmteb-miracl config: bn split: dev revision: main metrics: - type: ndcg_at_1 value: 56.691 - type: ndcg_at_3 value: 59.88 - type: ndcg_at_5 value: 62.717999999999996 - type: ndcg_at_10 value: 65.484 - type: ndcg_at_20 value: 67.838 - type: ndcg_at_100 value: 70.14200000000001 - type: ndcg_at_1000 value: 70.994 - type: map_at_1 value: 36.05 - type: map_at_3 value: 51.734 - type: map_at_5 value: 55.093 - type: map_at_10 value: 57.053 - type: map_at_20 value: 58.196999999999996 - type: map_at_100 value: 58.786 - type: map_at_1000 value: 58.841 - type: recall_at_1 value: 36.05 - type: recall_at_3 value: 61.596 - type: recall_at_5 value: 69.76599999999999 - type: recall_at_10 value: 76.854 - type: recall_at_20 value: 83.667 - type: recall_at_100 value: 92.85 - type: recall_at_1000 value: 97.928 - type: precision_at_1 value: 56.691 - type: precision_at_3 value: 35.848 - type: precision_at_5 value: 25.499 - type: precision_at_10 value: 14.793000000000001 - type: precision_at_20 value: 8.37 - type: precision_at_100 value: 1.925 - type: precision_at_1000 value: 0.20600000000000002 - type: mrr_at_1 value: 56.691 - type: mrr_at_3 value: 67.1127 - type: mrr_at_5 value: 68.5604 - type: mrr_at_10 value: 69.1703 - type: mrr_at_20 value: 69.35289999999999 - type: mrr_at_100 value: 69.4819 - type: mrr_at_1000 value: 69.4957 - type: nauc_ndcg_at_1_max value: 41.866 - type: nauc_ndcg_at_1_std value: 11.7317 - type: nauc_ndcg_at_1_diff1 value: 40.4762 - type: nauc_ndcg_at_3_max value: 36.677 - type: nauc_ndcg_at_3_std value: 0.4032 - type: nauc_ndcg_at_3_diff1 value: 36.459 - type: nauc_ndcg_at_5_max value: 38.7948 - type: nauc_ndcg_at_5_std value: 2.169 - type: nauc_ndcg_at_5_diff1 value: 34.9733 - type: nauc_ndcg_at_10_max value: 41.2916 - type: nauc_ndcg_at_10_std value: 4.6691 - type: nauc_ndcg_at_10_diff1 value: 34.972300000000004 - type: nauc_ndcg_at_20_max value: 42.0471 - type: nauc_ndcg_at_20_std value: 6.9529 - type: nauc_ndcg_at_20_diff1 value: 35.2909 - type: nauc_ndcg_at_100_max value: 43.2206 - type: nauc_ndcg_at_100_std value: 9.8597 - type: nauc_ndcg_at_100_diff1 value: 34.7908 - type: nauc_ndcg_at_1000_max value: 42.9023 - type: nauc_ndcg_at_1000_std value: 9.1978 - type: nauc_ndcg_at_1000_diff1 value: 35.5526 - type: nauc_map_at_1_max value: 20.435 - type: nauc_map_at_1_std value: -8.3764 - type: nauc_map_at_1_diff1 value: 45.6061 - type: nauc_map_at_3_max value: 29.855900000000002 - type: nauc_map_at_3_std value: -6.9869 - type: nauc_map_at_3_diff1 value: 39.475 - type: nauc_map_at_5_max value: 33.9572 - type: nauc_map_at_5_std value: -3.164 - type: nauc_map_at_5_diff1 value: 37.4095 - type: nauc_map_at_10_max value: 35.8339 - type: nauc_map_at_10_std value: -0.8439 - type: nauc_map_at_10_diff1 value: 36.903999999999996 - type: nauc_map_at_20_max value: 36.1995 - type: nauc_map_at_20_std value: 0.2973 - type: nauc_map_at_20_diff1 value: 36.8904 - type: nauc_map_at_100_max value: 36.5903 - type: nauc_map_at_100_std value: 1.2213999999999998 - type: nauc_map_at_100_diff1 value: 36.6721 - type: nauc_map_at_1000_max value: 36.5844 - type: nauc_map_at_1000_std value: 1.2026000000000001 - type: nauc_map_at_1000_diff1 value: 36.7259 - type: nauc_recall_at_1_max value: 20.435 - type: nauc_recall_at_1_std value: -8.3764 - type: nauc_recall_at_1_diff1 value: 45.6061 - type: nauc_recall_at_3_max value: 26.366600000000002 - type: nauc_recall_at_3_std value: -10.0911 - type: nauc_recall_at_3_diff1 value: 33.1969 - type: nauc_recall_at_5_max value: 34.080799999999996 - type: nauc_recall_at_5_std value: -3.2670999999999997 - type: nauc_recall_at_5_diff1 value: 26.939 - type: nauc_recall_at_10_max value: 39.6727 - type: nauc_recall_at_10_std value: 3.5848999999999998 - type: nauc_recall_at_10_diff1 value: 25.359399999999997 - type: nauc_recall_at_20_max value: 42.824400000000004 - type: nauc_recall_at_20_std value: 10.9569 - type: nauc_recall_at_20_diff1 value: 25.8988 - type: nauc_recall_at_100_max value: 56.9357 - type: nauc_recall_at_100_std value: 40.6576 - type: nauc_recall_at_100_diff1 value: 17.9669 - type: nauc_recall_at_1000_max value: 77.9855 - type: nauc_recall_at_1000_std value: 69.14519999999999 - type: nauc_recall_at_1000_diff1 value: 31.317 - type: nauc_precision_at_1_max value: 41.866 - type: nauc_precision_at_1_std value: 11.7317 - type: nauc_precision_at_1_diff1 value: 40.4762 - type: nauc_precision_at_3_max value: 41.7292 - type: nauc_precision_at_3_std value: 19.4845 - type: nauc_precision_at_3_diff1 value: 2.3043 - type: nauc_precision_at_5_max value: 41.165600000000005 - type: nauc_precision_at_5_std value: 28.4709 - type: nauc_precision_at_5_diff1 value: -8.5182 - type: nauc_precision_at_10_max value: 36.8002 - type: nauc_precision_at_10_std value: 33.0094 - type: nauc_precision_at_10_diff1 value: -13.6996 - type: nauc_precision_at_20_max value: 29.5172 - type: nauc_precision_at_20_std value: 34.6802 - type: nauc_precision_at_20_diff1 value: -15.762 - type: nauc_precision_at_100_max value: 23.539099999999998 - type: nauc_precision_at_100_std value: 38.3806 - type: nauc_precision_at_100_diff1 value: -21.1116 - type: nauc_precision_at_1000_max value: 18.6827 - type: nauc_precision_at_1000_std value: 34.7766 - type: nauc_precision_at_1000_diff1 value: -20.9498 - type: nauc_mrr_at_1_max value: 41.866 - type: nauc_mrr_at_1_std value: 11.7317 - type: nauc_mrr_at_1_diff1 value: 40.4762 - type: nauc_mrr_at_3_max value: 47.225 - type: nauc_mrr_at_3_std value: 13.6943 - type: nauc_mrr_at_3_diff1 value: 37.979600000000005 - type: nauc_mrr_at_5_max value: 47.478500000000004 - type: nauc_mrr_at_5_std value: 15.2375 - type: nauc_mrr_at_5_diff1 value: 36.6924 - type: nauc_mrr_at_10_max value: 47.7794 - type: nauc_mrr_at_10_std value: 15.620899999999999 - type: nauc_mrr_at_10_diff1 value: 36.9685 - type: nauc_mrr_at_20_max value: 47.6434 - type: nauc_mrr_at_20_std value: 15.4696 - type: nauc_mrr_at_20_diff1 value: 37.1096 - type: nauc_mrr_at_100_max value: 47.5377 - type: nauc_mrr_at_100_std value: 15.360499999999998 - type: nauc_mrr_at_100_diff1 value: 37.1581 - type: nauc_mrr_at_1000_max value: 47.5182 - type: nauc_mrr_at_1000_std value: 15.345600000000001 - type: nauc_mrr_at_1000_diff1 value: 37.1651 - type: main_score value: 65.484 - task: type: Retrieval dataset: name: MTEB MIRACLRetrieval (de) type: miracl/mmteb-miracl config: de split: dev revision: main metrics: - type: ndcg_at_1 value: 41.967 - type: ndcg_at_3 value: 39.486 - type: ndcg_at_5 value: 41.496 - type: ndcg_at_10 value: 45.141 - type: ndcg_at_20 value: 49.012 - type: ndcg_at_100 value: 53.461000000000006 - type: ndcg_at_1000 value: 55.462999999999994 - type: map_at_1 value: 19.494 - type: map_at_3 value: 29.866999999999997 - type: map_at_5 value: 33.183 - type: map_at_10 value: 35.82 - type: map_at_20 value: 37.405 - type: map_at_100 value: 38.486 - type: map_at_1000 value: 38.624 - type: recall_at_1 value: 19.494 - type: recall_at_3 value: 35.56 - type: recall_at_5 value: 44.448 - type: recall_at_10 value: 53.774 - type: recall_at_20 value: 65.659 - type: recall_at_100 value: 83.314 - type: recall_at_1000 value: 95.045 - type: precision_at_1 value: 41.967 - type: precision_at_3 value: 28.633999999999997 - type: precision_at_5 value: 21.836 - type: precision_at_10 value: 13.869000000000002 - type: precision_at_20 value: 8.443000000000001 - type: precision_at_100 value: 2.193 - type: precision_at_1000 value: 0.252 - type: mrr_at_1 value: 41.9672 - type: mrr_at_3 value: 49.8361 - type: mrr_at_5 value: 51.9016 - type: mrr_at_10 value: 52.847500000000004 - type: mrr_at_20 value: 53.3528 - type: mrr_at_100 value: 53.6068 - type: mrr_at_1000 value: 53.632999999999996 - type: nauc_ndcg_at_1_max value: 47.2596 - type: nauc_ndcg_at_1_std value: 10.462100000000001 - type: nauc_ndcg_at_1_diff1 value: 30.1962 - type: nauc_ndcg_at_3_max value: 44.2307 - type: nauc_ndcg_at_3_std value: 17.5815 - type: nauc_ndcg_at_3_diff1 value: 29.371399999999998 - type: nauc_ndcg_at_5_max value: 44.07 - type: nauc_ndcg_at_5_std value: 13.7942 - type: nauc_ndcg_at_5_diff1 value: 31.1618 - type: nauc_ndcg_at_10_max value: 43.406800000000004 - type: nauc_ndcg_at_10_std value: 13.1051 - type: nauc_ndcg_at_10_diff1 value: 30.198399999999996 - type: nauc_ndcg_at_20_max value: 44.2888 - type: nauc_ndcg_at_20_std value: 16.2174 - type: nauc_ndcg_at_20_diff1 value: 31.1847 - type: nauc_ndcg_at_100_max value: 47.042899999999996 - type: nauc_ndcg_at_100_std value: 18.6719 - type: nauc_ndcg_at_100_diff1 value: 31.4178 - type: nauc_ndcg_at_1000_max value: 47.2147 - type: nauc_ndcg_at_1000_std value: 19.165 - type: nauc_ndcg_at_1000_diff1 value: 31.229400000000002 - type: nauc_map_at_1_max value: 28.3144 - type: nauc_map_at_1_std value: 4.6845 - type: nauc_map_at_1_diff1 value: 29.528 - type: nauc_map_at_3_max value: 36.9973 - type: nauc_map_at_3_std value: 11.669 - type: nauc_map_at_3_diff1 value: 32.3092 - type: nauc_map_at_5_max value: 39.4916 - type: nauc_map_at_5_std value: 12.0862 - type: nauc_map_at_5_diff1 value: 31.7635 - type: nauc_map_at_10_max value: 40.2979 - type: nauc_map_at_10_std value: 12.536 - type: nauc_map_at_10_diff1 value: 30.584600000000002 - type: nauc_map_at_20_max value: 40.7003 - type: nauc_map_at_20_std value: 13.5966 - type: nauc_map_at_20_diff1 value: 30.8718 - type: nauc_map_at_100_max value: 41.6514 - type: nauc_map_at_100_std value: 14.360500000000002 - type: nauc_map_at_100_diff1 value: 31.1345 - type: nauc_map_at_1000_max value: 41.6996 - type: nauc_map_at_1000_std value: 14.4203 - type: nauc_map_at_1000_diff1 value: 31.128600000000002 - type: nauc_recall_at_1_max value: 28.3144 - type: nauc_recall_at_1_std value: 4.6845 - type: nauc_recall_at_1_diff1 value: 29.528 - type: nauc_recall_at_3_max value: 33.567 - type: nauc_recall_at_3_std value: 12.7075 - type: nauc_recall_at_3_diff1 value: 27.9119 - type: nauc_recall_at_5_max value: 36.5991 - type: nauc_recall_at_5_std value: 8.7177 - type: nauc_recall_at_5_diff1 value: 28.3433 - type: nauc_recall_at_10_max value: 36.5863 - type: nauc_recall_at_10_std value: 8.2944 - type: nauc_recall_at_10_diff1 value: 26.411299999999997 - type: nauc_recall_at_20_max value: 35.970200000000006 - type: nauc_recall_at_20_std value: 15.487 - type: nauc_recall_at_20_diff1 value: 29.0362 - type: nauc_recall_at_100_max value: 48.892 - type: nauc_recall_at_100_std value: 30.1672 - type: nauc_recall_at_100_diff1 value: 29.9305 - type: nauc_recall_at_1000_max value: 66.36410000000001 - type: nauc_recall_at_1000_std value: 64.2413 - type: nauc_recall_at_1000_diff1 value: 32.7869 - type: nauc_precision_at_1_max value: 47.2596 - type: nauc_precision_at_1_std value: 10.462100000000001 - type: nauc_precision_at_1_diff1 value: 30.1962 - type: nauc_precision_at_3_max value: 46.6036 - type: nauc_precision_at_3_std value: 22.917 - type: nauc_precision_at_3_diff1 value: 21.104200000000002 - type: nauc_precision_at_5_max value: 44.357 - type: nauc_precision_at_5_std value: 21.4999 - type: nauc_precision_at_5_diff1 value: 16.378899999999998 - type: nauc_precision_at_10_max value: 39.1332 - type: nauc_precision_at_10_std value: 20.241500000000002 - type: nauc_precision_at_10_diff1 value: 10.2133 - type: nauc_precision_at_20_max value: 36.7308 - type: nauc_precision_at_20_std value: 26.994699999999998 - type: nauc_precision_at_20_diff1 value: 8.2737 - type: nauc_precision_at_100_max value: 33.8289 - type: nauc_precision_at_100_std value: 29.243000000000002 - type: nauc_precision_at_100_diff1 value: 2.6802 - type: nauc_precision_at_1000_max value: 27.7792 - type: nauc_precision_at_1000_std value: 30.017899999999997 - type: nauc_precision_at_1000_diff1 value: -2.3043 - type: nauc_mrr_at_1_max value: 47.2596 - type: nauc_mrr_at_1_std value: 10.462100000000001 - type: nauc_mrr_at_1_diff1 value: 30.1962 - type: nauc_mrr_at_3_max value: 47.8206 - type: nauc_mrr_at_3_std value: 15.509999999999998 - type: nauc_mrr_at_3_diff1 value: 28.4831 - type: nauc_mrr_at_5_max value: 48.4225 - type: nauc_mrr_at_5_std value: 14.0032 - type: nauc_mrr_at_5_diff1 value: 30.2989 - type: nauc_mrr_at_10_max value: 48.2881 - type: nauc_mrr_at_10_std value: 14.383199999999999 - type: nauc_mrr_at_10_diff1 value: 30.047800000000002 - type: nauc_mrr_at_20_max value: 48.2964 - type: nauc_mrr_at_20_std value: 14.7531 - type: nauc_mrr_at_20_diff1 value: 30.154199999999996 - type: nauc_mrr_at_100_max value: 48.2656 - type: nauc_mrr_at_100_std value: 14.5864 - type: nauc_mrr_at_100_diff1 value: 30.153299999999998 - type: nauc_mrr_at_1000_max value: 48.2739 - type: nauc_mrr_at_1000_std value: 14.5892 - type: nauc_mrr_at_1000_diff1 value: 30.1671 - type: main_score value: 45.141 - task: type: Retrieval dataset: name: MTEB MIRACLRetrieval (en) type: miracl/mmteb-miracl config: en split: dev revision: main metrics: - type: ndcg_at_1 value: 41.176 - type: ndcg_at_3 value: 41.197 - type: ndcg_at_5 value: 42.086 - type: ndcg_at_10 value: 46.682 - type: ndcg_at_20 value: 50.157 - type: ndcg_at_100 value: 54.32599999999999 - type: ndcg_at_1000 value: 56.567 - type: map_at_1 value: 19.322 - type: map_at_3 value: 29.965999999999998 - type: map_at_5 value: 32.767 - type: map_at_10 value: 35.961 - type: map_at_20 value: 37.506 - type: map_at_100 value: 38.585 - type: map_at_1000 value: 38.756 - type: recall_at_1 value: 19.322 - type: recall_at_3 value: 37.171 - type: recall_at_5 value: 44.695 - type: recall_at_10 value: 57.721000000000004 - type: recall_at_20 value: 67.57 - type: recall_at_100 value: 83.256 - type: recall_at_1000 value: 95.511 - type: precision_at_1 value: 41.176 - type: precision_at_3 value: 29.328 - type: precision_at_5 value: 21.552 - type: precision_at_10 value: 14.556 - type: precision_at_20 value: 8.892 - type: precision_at_100 value: 2.325 - type: precision_at_1000 value: 0.27599999999999997 - type: mrr_at_1 value: 41.1765 - type: mrr_at_3 value: 52.3571 - type: mrr_at_5 value: 53.8214 - type: mrr_at_10 value: 55.2296 - type: mrr_at_20 value: 55.58070000000001 - type: mrr_at_100 value: 55.755500000000005 - type: mrr_at_1000 value: 55.7773 - type: nauc_ndcg_at_1_max value: 34.3579 - type: nauc_ndcg_at_1_std value: 4.9725 - type: nauc_ndcg_at_1_diff1 value: 34.5973 - type: nauc_ndcg_at_3_max value: 33.4771 - type: nauc_ndcg_at_3_std value: 1.4036 - type: nauc_ndcg_at_3_diff1 value: 28.7098 - type: nauc_ndcg_at_5_max value: 32.4928 - type: nauc_ndcg_at_5_std value: -0.066 - type: nauc_ndcg_at_5_diff1 value: 28.6068 - type: nauc_ndcg_at_10_max value: 32.068999999999996 - type: nauc_ndcg_at_10_std value: 1.6602 - type: nauc_ndcg_at_10_diff1 value: 26.9818 - type: nauc_ndcg_at_20_max value: 33.9623 - type: nauc_ndcg_at_20_std value: 4.261299999999999 - type: nauc_ndcg_at_20_diff1 value: 26.4283 - type: nauc_ndcg_at_100_max value: 35.507 - type: nauc_ndcg_at_100_std value: 7.991099999999999 - type: nauc_ndcg_at_100_diff1 value: 25.9616 - type: nauc_ndcg_at_1000_max value: 35.9545 - type: nauc_ndcg_at_1000_std value: 8.1357 - type: nauc_ndcg_at_1000_diff1 value: 26.5577 - type: nauc_map_at_1_max value: 26.392300000000002 - type: nauc_map_at_1_std value: -1.0763 - type: nauc_map_at_1_diff1 value: 32.73 - type: nauc_map_at_3_max value: 29.8191 - type: nauc_map_at_3_std value: -1.8852 - type: nauc_map_at_3_diff1 value: 29.5076 - type: nauc_map_at_5_max value: 30.8727 - type: nauc_map_at_5_std value: -1.3785 - type: nauc_map_at_5_diff1 value: 29.475299999999997 - type: nauc_map_at_10_max value: 31.5092 - type: nauc_map_at_10_std value: -0.1203 - type: nauc_map_at_10_diff1 value: 28.1841 - type: nauc_map_at_20_max value: 32.6157 - type: nauc_map_at_20_std value: 0.9819 - type: nauc_map_at_20_diff1 value: 28.339399999999998 - type: nauc_map_at_100_max value: 33.1895 - type: nauc_map_at_100_std value: 2.1590000000000003 - type: nauc_map_at_100_diff1 value: 28.180100000000003 - type: nauc_map_at_1000_max value: 33.2679 - type: nauc_map_at_1000_std value: 2.2186999999999997 - type: nauc_map_at_1000_diff1 value: 28.2088 - type: nauc_recall_at_1_max value: 26.392300000000002 - type: nauc_recall_at_1_std value: -1.0763 - type: nauc_recall_at_1_diff1 value: 32.73 - type: nauc_recall_at_3_max value: 24.2787 - type: nauc_recall_at_3_std value: -4.1108 - type: nauc_recall_at_3_diff1 value: 23.903299999999998 - type: nauc_recall_at_5_max value: 23.0102 - type: nauc_recall_at_5_std value: -4.4748 - type: nauc_recall_at_5_diff1 value: 22.4027 - type: nauc_recall_at_10_max value: 20.5018 - type: nauc_recall_at_10_std value: -2.1145 - type: nauc_recall_at_10_diff1 value: 17.5745 - type: nauc_recall_at_20_max value: 23.3743 - type: nauc_recall_at_20_std value: 3.8541 - type: nauc_recall_at_20_diff1 value: 13.4776 - type: nauc_recall_at_100_max value: 27.6324 - type: nauc_recall_at_100_std value: 21.3837 - type: nauc_recall_at_100_diff1 value: 7.174600000000001 - type: nauc_recall_at_1000_max value: 45.033699999999996 - type: nauc_recall_at_1000_std value: 59.160999999999994 - type: nauc_recall_at_1000_diff1 value: -0.5903 - type: nauc_precision_at_1_max value: 34.3579 - type: nauc_precision_at_1_std value: 4.9725 - type: nauc_precision_at_1_diff1 value: 34.5973 - type: nauc_precision_at_3_max value: 33.6059 - type: nauc_precision_at_3_std value: 8.9589 - type: nauc_precision_at_3_diff1 value: 16.9583 - type: nauc_precision_at_5_max value: 30.8753 - type: nauc_precision_at_5_std value: 10.080300000000001 - type: nauc_precision_at_5_diff1 value: 13.0574 - type: nauc_precision_at_10_max value: 25.7853 - type: nauc_precision_at_10_std value: 14.349700000000002 - type: nauc_precision_at_10_diff1 value: 4.2389 - type: nauc_precision_at_20_max value: 23.3853 - type: nauc_precision_at_20_std value: 18.4597 - type: nauc_precision_at_20_diff1 value: 0.9729 - type: nauc_precision_at_100_max value: 17.3016 - type: nauc_precision_at_100_std value: 23.352500000000003 - type: nauc_precision_at_100_diff1 value: -4.4505 - type: nauc_precision_at_1000_max value: 10.7759 - type: nauc_precision_at_1000_std value: 19.098699999999997 - type: nauc_precision_at_1000_diff1 value: -5.919 - type: nauc_mrr_at_1_max value: 34.3579 - type: nauc_mrr_at_1_std value: 4.9725 - type: nauc_mrr_at_1_diff1 value: 34.5973 - type: nauc_mrr_at_3_max value: 34.8266 - type: nauc_mrr_at_3_std value: 5.6232999999999995 - type: nauc_mrr_at_3_diff1 value: 29.5624 - type: nauc_mrr_at_5_max value: 34.8732 - type: nauc_mrr_at_5_std value: 5.447699999999999 - type: nauc_mrr_at_5_diff1 value: 29.9161 - type: nauc_mrr_at_10_max value: 35.0493 - type: nauc_mrr_at_10_std value: 6.1511000000000005 - type: nauc_mrr_at_10_diff1 value: 30.117699999999996 - type: nauc_mrr_at_20_max value: 35.0425 - type: nauc_mrr_at_20_std value: 6.25 - type: nauc_mrr_at_20_diff1 value: 29.8804 - type: nauc_mrr_at_100_max value: 35.058499999999995 - type: nauc_mrr_at_100_std value: 6.1998999999999995 - type: nauc_mrr_at_100_diff1 value: 29.9613 - type: nauc_mrr_at_1000_max value: 35.0463 - type: nauc_mrr_at_1000_std value: 6.1806 - type: nauc_mrr_at_1000_diff1 value: 29.973499999999998 - type: main_score value: 46.682 - task: type: Retrieval dataset: name: MTEB MIRACLRetrieval (es) type: miracl/mmteb-miracl config: es split: dev revision: main metrics: - type: ndcg_at_1 value: 54.474999999999994 - type: ndcg_at_3 value: 45.78 - type: ndcg_at_5 value: 44.321 - type: ndcg_at_10 value: 46.593 - type: ndcg_at_20 value: 51.858000000000004 - type: ndcg_at_100 value: 58.079 - type: ndcg_at_1000 value: 60.656 - type: map_at_1 value: 15.966 - type: map_at_3 value: 25.933 - type: map_at_5 value: 30.171999999999997 - type: map_at_10 value: 34.67 - type: map_at_20 value: 37.501 - type: map_at_100 value: 39.45 - type: map_at_1000 value: 39.689 - type: recall_at_1 value: 15.966 - type: recall_at_3 value: 29.49 - type: recall_at_5 value: 37.983 - type: recall_at_10 value: 49.342999999999996 - type: recall_at_20 value: 62.367 - type: recall_at_100 value: 82.684 - type: recall_at_1000 value: 95.299 - type: precision_at_1 value: 54.474999999999994 - type: precision_at_3 value: 37.86 - type: precision_at_5 value: 30.586000000000002 - type: precision_at_10 value: 21.481 - type: precision_at_20 value: 13.796 - type: precision_at_100 value: 3.7900000000000005 - type: precision_at_1000 value: 0.441 - type: mrr_at_1 value: 54.475300000000004 - type: mrr_at_3 value: 62.191399999999994 - type: mrr_at_5 value: 63.74999999999999 - type: mrr_at_10 value: 64.4789 - type: mrr_at_20 value: 64.8911 - type: mrr_at_100 value: 65.0641 - type: mrr_at_1000 value: 65.07469999999999 - type: nauc_ndcg_at_1_max value: 42.4187 - type: nauc_ndcg_at_1_std value: 17.6337 - type: nauc_ndcg_at_1_diff1 value: 36.2923 - type: nauc_ndcg_at_3_max value: 37.073499999999996 - type: nauc_ndcg_at_3_std value: 16.0772 - type: nauc_ndcg_at_3_diff1 value: 26.4292 - type: nauc_ndcg_at_5_max value: 36.1381 - type: nauc_ndcg_at_5_std value: 14.585999999999999 - type: nauc_ndcg_at_5_diff1 value: 26.411299999999997 - type: nauc_ndcg_at_10_max value: 35.5405 - type: nauc_ndcg_at_10_std value: 15.0147 - type: nauc_ndcg_at_10_diff1 value: 26.299899999999997 - type: nauc_ndcg_at_20_max value: 39.764500000000005 - type: nauc_ndcg_at_20_std value: 20.311899999999998 - type: nauc_ndcg_at_20_diff1 value: 26.3937 - type: nauc_ndcg_at_100_max value: 44.473 - type: nauc_ndcg_at_100_std value: 26.6476 - type: nauc_ndcg_at_100_diff1 value: 26.1508 - type: nauc_ndcg_at_1000_max value: 44.1126 - type: nauc_ndcg_at_1000_std value: 25.8031 - type: nauc_ndcg_at_1000_diff1 value: 26.2323 - type: nauc_map_at_1_max value: 10.2435 - type: nauc_map_at_1_std value: -11.501999999999999 - type: nauc_map_at_1_diff1 value: 26.050800000000002 - type: nauc_map_at_3_max value: 18.8877 - type: nauc_map_at_3_std value: -3.9174 - type: nauc_map_at_3_diff1 value: 25.8438 - type: nauc_map_at_5_max value: 23.7785 - type: nauc_map_at_5_std value: 0.6597000000000001 - type: nauc_map_at_5_diff1 value: 25.2118 - type: nauc_map_at_10_max value: 28.6819 - type: nauc_map_at_10_std value: 6.741 - type: nauc_map_at_10_diff1 value: 24.6999 - type: nauc_map_at_20_max value: 31.853900000000003 - type: nauc_map_at_20_std value: 10.5967 - type: nauc_map_at_20_diff1 value: 24.8637 - type: nauc_map_at_100_max value: 33.9181 - type: nauc_map_at_100_std value: 13.254 - type: nauc_map_at_100_diff1 value: 24.759500000000003 - type: nauc_map_at_1000_max value: 33.9679 - type: nauc_map_at_1000_std value: 13.290199999999999 - type: nauc_map_at_1000_diff1 value: 24.758399999999998 - type: nauc_recall_at_1_max value: 10.2435 - type: nauc_recall_at_1_std value: -11.501999999999999 - type: nauc_recall_at_1_diff1 value: 26.050800000000002 - type: nauc_recall_at_3_max value: 16.737099999999998 - type: nauc_recall_at_3_std value: -4.3613 - type: nauc_recall_at_3_diff1 value: 23.771900000000002 - type: nauc_recall_at_5_max value: 20.0168 - type: nauc_recall_at_5_std value: 1.1395 - type: nauc_recall_at_5_diff1 value: 21.4641 - type: nauc_recall_at_10_max value: 26.6231 - type: nauc_recall_at_10_std value: 12.728700000000002 - type: nauc_recall_at_10_diff1 value: 18.947400000000002 - type: nauc_recall_at_20_max value: 31.4926 - type: nauc_recall_at_20_std value: 21.0613 - type: nauc_recall_at_20_diff1 value: 17.8382 - type: nauc_recall_at_100_max value: 46.1255 - type: nauc_recall_at_100_std value: 45.2197 - type: nauc_recall_at_100_diff1 value: 15.1202 - type: nauc_recall_at_1000_max value: 54.710499999999996 - type: nauc_recall_at_1000_std value: 68.72019999999999 - type: nauc_recall_at_1000_diff1 value: 9.2808 - type: nauc_precision_at_1_max value: 42.4187 - type: nauc_precision_at_1_std value: 17.6337 - type: nauc_precision_at_1_diff1 value: 36.2923 - type: nauc_precision_at_3_max value: 42.056900000000006 - type: nauc_precision_at_3_std value: 26.4648 - type: nauc_precision_at_3_diff1 value: 20.366500000000002 - type: nauc_precision_at_5_max value: 45.4175 - type: nauc_precision_at_5_std value: 32.2676 - type: nauc_precision_at_5_diff1 value: 14.9145 - type: nauc_precision_at_10_max value: 43.9305 - type: nauc_precision_at_10_std value: 37.9795 - type: nauc_precision_at_10_diff1 value: 8.4088 - type: nauc_precision_at_20_max value: 44.183499999999995 - type: nauc_precision_at_20_std value: 42.9261 - type: nauc_precision_at_20_diff1 value: 5.0112 - type: nauc_precision_at_100_max value: 40.8771 - type: nauc_precision_at_100_std value: 46.921800000000005 - type: nauc_precision_at_100_diff1 value: -1.6650000000000003 - type: nauc_precision_at_1000_max value: 32.0705 - type: nauc_precision_at_1000_std value: 39.5086 - type: nauc_precision_at_1000_diff1 value: -5.5237 - type: nauc_mrr_at_1_max value: 42.4187 - type: nauc_mrr_at_1_std value: 17.6337 - type: nauc_mrr_at_1_diff1 value: 36.2923 - type: nauc_mrr_at_3_max value: 47.2755 - type: nauc_mrr_at_3_std value: 23.8294 - type: nauc_mrr_at_3_diff1 value: 35.5243 - type: nauc_mrr_at_5_max value: 47.6991 - type: nauc_mrr_at_5_std value: 24.6507 - type: nauc_mrr_at_5_diff1 value: 35.5186 - type: nauc_mrr_at_10_max value: 47.726 - type: nauc_mrr_at_10_std value: 24.9941 - type: nauc_mrr_at_10_diff1 value: 35.5396 - type: nauc_mrr_at_20_max value: 47.6055 - type: nauc_mrr_at_20_std value: 24.9619 - type: nauc_mrr_at_20_diff1 value: 35.3844 - type: nauc_mrr_at_100_max value: 47.5619 - type: nauc_mrr_at_100_std value: 24.794 - type: nauc_mrr_at_100_diff1 value: 35.4683 - type: nauc_mrr_at_1000_max value: 47.545700000000004 - type: nauc_mrr_at_1000_std value: 24.7716 - type: nauc_mrr_at_1000_diff1 value: 35.4674 - type: main_score value: 46.593 - task: type: Retrieval dataset: name: MTEB MIRACLRetrieval (fa) type: miracl/mmteb-miracl config: fa split: dev revision: main metrics: - type: ndcg_at_1 value: 36.709 - type: ndcg_at_3 value: 40.235 - type: ndcg_at_5 value: 42.866 - type: ndcg_at_10 value: 46.961000000000006 - type: ndcg_at_20 value: 49.891999999999996 - type: ndcg_at_100 value: 53.262 - type: ndcg_at_1000 value: 55.023999999999994 - type: map_at_1 value: 22.735 - type: map_at_3 value: 33.446 - type: map_at_5 value: 36.199999999999996 - type: map_at_10 value: 38.707 - type: map_at_20 value: 39.931 - type: map_at_100 value: 40.601 - type: map_at_1000 value: 40.711999999999996 - type: recall_at_1 value: 22.735 - type: recall_at_3 value: 41.781 - type: recall_at_5 value: 49.374 - type: recall_at_10 value: 59.949 - type: recall_at_20 value: 68.947 - type: recall_at_100 value: 83.867 - type: recall_at_1000 value: 95.00699999999999 - type: precision_at_1 value: 36.709 - type: precision_at_3 value: 24.578 - type: precision_at_5 value: 18.133 - type: precision_at_10 value: 11.661000000000001 - type: precision_at_20 value: 6.97 - type: precision_at_100 value: 1.737 - type: precision_at_1000 value: 0.199 - type: mrr_at_1 value: 36.7089 - type: mrr_at_3 value: 46.0443 - type: mrr_at_5 value: 47.7848 - type: mrr_at_10 value: 48.908699999999996 - type: mrr_at_20 value: 49.337399999999995 - type: mrr_at_100 value: 49.580999999999996 - type: mrr_at_1000 value: 49.6135 - type: nauc_ndcg_at_1_max value: 40.3709 - type: nauc_ndcg_at_1_std value: 8.100200000000001 - type: nauc_ndcg_at_1_diff1 value: 30.2274 - type: nauc_ndcg_at_3_max value: 36.0603 - type: nauc_ndcg_at_3_std value: 5.0052 - type: nauc_ndcg_at_3_diff1 value: 28.380899999999997 - type: nauc_ndcg_at_5_max value: 36.235 - type: nauc_ndcg_at_5_std value: 4.7146 - type: nauc_ndcg_at_5_diff1 value: 27.969 - type: nauc_ndcg_at_10_max value: 38.9403 - type: nauc_ndcg_at_10_std value: 8.66 - type: nauc_ndcg_at_10_diff1 value: 26.2876 - type: nauc_ndcg_at_20_max value: 41.3286 - type: nauc_ndcg_at_20_std value: 10.9269 - type: nauc_ndcg_at_20_diff1 value: 25.859900000000003 - type: nauc_ndcg_at_100_max value: 42.8643 - type: nauc_ndcg_at_100_std value: 14.2822 - type: nauc_ndcg_at_100_diff1 value: 25.3784 - type: nauc_ndcg_at_1000_max value: 41.8778 - type: nauc_ndcg_at_1000_std value: 13.130600000000001 - type: nauc_ndcg_at_1000_diff1 value: 25.9498 - type: nauc_map_at_1_max value: 27.2644 - type: nauc_map_at_1_std value: -2.6623 - type: nauc_map_at_1_diff1 value: 40.2119 - type: nauc_map_at_3_max value: 32.121100000000006 - type: nauc_map_at_3_std value: 0.6962999999999999 - type: nauc_map_at_3_diff1 value: 33.265499999999996 - type: nauc_map_at_5_max value: 33.1237 - type: nauc_map_at_5_std value: 1.6095000000000002 - type: nauc_map_at_5_diff1 value: 30.924400000000002 - type: nauc_map_at_10_max value: 35.8464 - type: nauc_map_at_10_std value: 4.6409 - type: nauc_map_at_10_diff1 value: 29.3654 - type: nauc_map_at_20_max value: 36.967299999999994 - type: nauc_map_at_20_std value: 5.8244 - type: nauc_map_at_20_diff1 value: 29.0251 - type: nauc_map_at_100_max value: 37.3859 - type: nauc_map_at_100_std value: 6.575499999999999 - type: nauc_map_at_100_diff1 value: 28.9224 - type: nauc_map_at_1000_max value: 37.3438 - type: nauc_map_at_1000_std value: 6.5534 - type: nauc_map_at_1000_diff1 value: 28.952099999999998 - type: nauc_recall_at_1_max value: 27.2644 - type: nauc_recall_at_1_std value: -2.6623 - type: nauc_recall_at_1_diff1 value: 40.2119 - type: nauc_recall_at_3_max value: 29.0364 - type: nauc_recall_at_3_std value: 0.8965000000000001 - type: nauc_recall_at_3_diff1 value: 27.651999999999997 - type: nauc_recall_at_5_max value: 29.299799999999998 - type: nauc_recall_at_5_std value: 1.0264 - type: nauc_recall_at_5_diff1 value: 23.3762 - type: nauc_recall_at_10_max value: 34.4238 - type: nauc_recall_at_10_std value: 10.228299999999999 - type: nauc_recall_at_10_diff1 value: 17.9909 - type: nauc_recall_at_20_max value: 42.5987 - type: nauc_recall_at_20_std value: 16.880899999999997 - type: nauc_recall_at_20_diff1 value: 16.4298 - type: nauc_recall_at_100_max value: 55.767599999999995 - type: nauc_recall_at_100_std value: 44.9392 - type: nauc_recall_at_100_diff1 value: 8.6006 - type: nauc_recall_at_1000_max value: 60.8797 - type: nauc_recall_at_1000_std value: 64.1015 - type: nauc_recall_at_1000_diff1 value: 5.9098 - type: nauc_precision_at_1_max value: 40.3709 - type: nauc_precision_at_1_std value: 8.100200000000001 - type: nauc_precision_at_1_diff1 value: 30.2274 - type: nauc_precision_at_3_max value: 39.9513 - type: nauc_precision_at_3_std value: 15.568999999999999 - type: nauc_precision_at_3_diff1 value: 9.9843 - type: nauc_precision_at_5_max value: 38.1062 - type: nauc_precision_at_5_std value: 18.7953 - type: nauc_precision_at_5_diff1 value: 1.4489 - type: nauc_precision_at_10_max value: 37.601099999999995 - type: nauc_precision_at_10_std value: 26.145699999999998 - type: nauc_precision_at_10_diff1 value: -6.6542 - type: nauc_precision_at_20_max value: 35.5961 - type: nauc_precision_at_20_std value: 29.930200000000003 - type: nauc_precision_at_20_diff1 value: -9.7241 - type: nauc_precision_at_100_max value: 28.092299999999998 - type: nauc_precision_at_100_std value: 34.0409 - type: nauc_precision_at_100_diff1 value: -15.037400000000002 - type: nauc_precision_at_1000_max value: 17.1738 - type: nauc_precision_at_1000_std value: 26.948499999999996 - type: nauc_precision_at_1000_diff1 value: -17.5066 - type: nauc_mrr_at_1_max value: 40.3709 - type: nauc_mrr_at_1_std value: 8.100200000000001 - type: nauc_mrr_at_1_diff1 value: 30.2274 - type: nauc_mrr_at_3_max value: 41.971399999999996 - type: nauc_mrr_at_3_std value: 10.34 - type: nauc_mrr_at_3_diff1 value: 27.5952 - type: nauc_mrr_at_5_max value: 42.721599999999995 - type: nauc_mrr_at_5_std value: 10.796100000000001 - type: nauc_mrr_at_5_diff1 value: 27.260800000000003 - type: nauc_mrr_at_10_max value: 42.651 - type: nauc_mrr_at_10_std value: 11.397599999999999 - type: nauc_mrr_at_10_diff1 value: 26.5974 - type: nauc_mrr_at_20_max value: 42.7886 - type: nauc_mrr_at_20_std value: 11.4316 - type: nauc_mrr_at_20_diff1 value: 26.724500000000003 - type: nauc_mrr_at_100_max value: 42.8826 - type: nauc_mrr_at_100_std value: 11.549 - type: nauc_mrr_at_100_diff1 value: 26.762999999999998 - type: nauc_mrr_at_1000_max value: 42.8647 - type: nauc_mrr_at_1000_std value: 11.522300000000001 - type: nauc_mrr_at_1000_diff1 value: 26.790799999999997 - type: main_score value: 46.961000000000006 - task: type: Retrieval dataset: name: MTEB MIRACLRetrieval (fi) type: miracl/mmteb-miracl config: fi split: dev revision: main metrics: - type: ndcg_at_1 value: 59.245000000000005 - type: ndcg_at_3 value: 58.876 - type: ndcg_at_5 value: 61.778999999999996 - type: ndcg_at_10 value: 65.551 - type: ndcg_at_20 value: 67.552 - type: ndcg_at_100 value: 69.67 - type: ndcg_at_1000 value: 70.521 - type: map_at_1 value: 37.669000000000004 - type: map_at_3 value: 52.28 - type: map_at_5 value: 55.064 - type: map_at_10 value: 57.29 - type: map_at_20 value: 58.162000000000006 - type: map_at_100 value: 58.648999999999994 - type: map_at_1000 value: 58.701 - type: recall_at_1 value: 37.669000000000004 - type: recall_at_3 value: 60.234 - type: recall_at_5 value: 67.135 - type: recall_at_10 value: 76.529 - type: recall_at_20 value: 82.685 - type: recall_at_100 value: 91.56 - type: recall_at_1000 value: 96.977 - type: precision_at_1 value: 59.245000000000005 - type: precision_at_3 value: 34.435 - type: precision_at_5 value: 23.745 - type: precision_at_10 value: 13.980999999999998 - type: precision_at_20 value: 7.707 - type: precision_at_100 value: 1.7489999999999999 - type: precision_at_1000 value: 0.186 - type: mrr_at_1 value: 59.244699999999995 - type: mrr_at_3 value: 67.9517 - type: mrr_at_5 value: 68.9746 - type: mrr_at_10 value: 69.7599 - type: mrr_at_20 value: 69.9947 - type: mrr_at_100 value: 70.1058 - type: mrr_at_1000 value: 70.11749999999999 - type: nauc_ndcg_at_1_max value: 38.7543 - type: nauc_ndcg_at_1_std value: 4.2023 - type: nauc_ndcg_at_1_diff1 value: 50.8162 - type: nauc_ndcg_at_3_max value: 36.9886 - type: nauc_ndcg_at_3_std value: 2.7807 - type: nauc_ndcg_at_3_diff1 value: 39.9604 - type: nauc_ndcg_at_5_max value: 38.567800000000005 - type: nauc_ndcg_at_5_std value: 4.0823 - type: nauc_ndcg_at_5_diff1 value: 40.1034 - type: nauc_ndcg_at_10_max value: 39.6717 - type: nauc_ndcg_at_10_std value: 4.836 - type: nauc_ndcg_at_10_diff1 value: 39.546 - type: nauc_ndcg_at_20_max value: 40.860400000000006 - type: nauc_ndcg_at_20_std value: 7.385999999999999 - type: nauc_ndcg_at_20_diff1 value: 39.1921 - type: nauc_ndcg_at_100_max value: 41.021 - type: nauc_ndcg_at_100_std value: 9.0238 - type: nauc_ndcg_at_100_diff1 value: 39.6248 - type: nauc_ndcg_at_1000_max value: 40.4034 - type: nauc_ndcg_at_1000_std value: 8.204500000000001 - type: nauc_ndcg_at_1000_diff1 value: 40.0309 - type: nauc_map_at_1_max value: 26.493499999999997 - type: nauc_map_at_1_std value: -2.5927 - type: nauc_map_at_1_diff1 value: 46.5824 - type: nauc_map_at_3_max value: 34.2786 - type: nauc_map_at_3_std value: 0.5491 - type: nauc_map_at_3_diff1 value: 39.4368 - type: nauc_map_at_5_max value: 36.2078 - type: nauc_map_at_5_std value: 2.3709000000000002 - type: nauc_map_at_5_diff1 value: 39.3797 - type: nauc_map_at_10_max value: 36.9681 - type: nauc_map_at_10_std value: 2.8434999999999997 - type: nauc_map_at_10_diff1 value: 39.1311 - type: nauc_map_at_20_max value: 37.4538 - type: nauc_map_at_20_std value: 3.8388 - type: nauc_map_at_20_diff1 value: 38.9234 - type: nauc_map_at_100_max value: 37.5899 - type: nauc_map_at_100_std value: 4.2547 - type: nauc_map_at_100_diff1 value: 39.0103 - type: nauc_map_at_1000_max value: 37.5573 - type: nauc_map_at_1000_std value: 4.221699999999999 - type: nauc_map_at_1000_diff1 value: 39.0312 - type: nauc_recall_at_1_max value: 26.493499999999997 - type: nauc_recall_at_1_std value: -2.5927 - type: nauc_recall_at_1_diff1 value: 46.5824 - type: nauc_recall_at_3_max value: 33.2212 - type: nauc_recall_at_3_std value: 0.5208 - type: nauc_recall_at_3_diff1 value: 33.0793 - type: nauc_recall_at_5_max value: 36.4292 - type: nauc_recall_at_5_std value: 4.139 - type: nauc_recall_at_5_diff1 value: 32.357200000000006 - type: nauc_recall_at_10_max value: 39.473 - type: nauc_recall_at_10_std value: 5.6589 - type: nauc_recall_at_10_diff1 value: 28.176299999999998 - type: nauc_recall_at_20_max value: 45.8088 - type: nauc_recall_at_20_std value: 17.084 - type: nauc_recall_at_20_diff1 value: 25.1991 - type: nauc_recall_at_100_max value: 53.8483 - type: nauc_recall_at_100_std value: 41.8548 - type: nauc_recall_at_100_diff1 value: 20.316699999999997 - type: nauc_recall_at_1000_max value: 57.7136 - type: nauc_recall_at_1000_std value: 61.00600000000001 - type: nauc_recall_at_1000_diff1 value: 14.565900000000001 - type: nauc_precision_at_1_max value: 38.7543 - type: nauc_precision_at_1_std value: 4.2023 - type: nauc_precision_at_1_diff1 value: 50.8162 - type: nauc_precision_at_3_max value: 30.9959 - type: nauc_precision_at_3_std value: 11.363 - type: nauc_precision_at_3_diff1 value: 12.556899999999999 - type: nauc_precision_at_5_max value: 27.8411 - type: nauc_precision_at_5_std value: 15.3994 - type: nauc_precision_at_5_diff1 value: 5.9959 - type: nauc_precision_at_10_max value: 21.067700000000002 - type: nauc_precision_at_10_std value: 16.4476 - type: nauc_precision_at_10_diff1 value: -2.7433 - type: nauc_precision_at_20_max value: 17.8813 - type: nauc_precision_at_20_std value: 21.4052 - type: nauc_precision_at_20_diff1 value: -8.7583 - type: nauc_precision_at_100_max value: 8.864700000000001 - type: nauc_precision_at_100_std value: 24.1294 - type: nauc_precision_at_100_diff1 value: -14.3597 - type: nauc_precision_at_1000_max value: 1.8260999999999998 - type: nauc_precision_at_1000_std value: 20.0461 - type: nauc_precision_at_1000_diff1 value: -17.6062 - type: nauc_mrr_at_1_max value: 38.7543 - type: nauc_mrr_at_1_std value: 4.2023 - type: nauc_mrr_at_1_diff1 value: 50.8162 - type: nauc_mrr_at_3_max value: 40.8761 - type: nauc_mrr_at_3_std value: 5.5156 - type: nauc_mrr_at_3_diff1 value: 47.6824 - type: nauc_mrr_at_5_max value: 41.1811 - type: nauc_mrr_at_5_std value: 6.0588999999999995 - type: nauc_mrr_at_5_diff1 value: 47.9242 - type: nauc_mrr_at_10_max value: 41.2511 - type: nauc_mrr_at_10_std value: 6.1515 - type: nauc_mrr_at_10_diff1 value: 47.7245 - type: nauc_mrr_at_20_max value: 41.343 - type: nauc_mrr_at_20_std value: 6.4499 - type: nauc_mrr_at_20_diff1 value: 47.8506 - type: nauc_mrr_at_100_max value: 41.3067 - type: nauc_mrr_at_100_std value: 6.4111 - type: nauc_mrr_at_100_diff1 value: 47.876000000000005 - type: nauc_mrr_at_1000_max value: 41.2977 - type: nauc_mrr_at_1000_std value: 6.397899999999999 - type: nauc_mrr_at_1000_diff1 value: 47.8808 - type: main_score value: 65.551 - task: type: Retrieval dataset: name: MTEB MIRACLRetrieval (fr) type: miracl/mmteb-miracl config: fr split: dev revision: main metrics: - type: ndcg_at_1 value: 38.775999999999996 - type: ndcg_at_3 value: 38.924 - type: ndcg_at_5 value: 42.571999999999996 - type: ndcg_at_10 value: 47.589 - type: ndcg_at_20 value: 51.202999999999996 - type: ndcg_at_100 value: 54.641 - type: ndcg_at_1000 value: 56.28999999999999 - type: map_at_1 value: 22.081999999999997 - type: map_at_3 value: 32.286 - type: map_at_5 value: 35.354 - type: map_at_10 value: 38.071 - type: map_at_20 value: 39.534000000000006 - type: map_at_100 value: 40.308 - type: map_at_1000 value: 40.412 - type: recall_at_1 value: 22.081999999999997 - type: recall_at_3 value: 39.527 - type: recall_at_5 value: 48.983 - type: recall_at_10 value: 61.619 - type: recall_at_20 value: 72.68900000000001 - type: recall_at_100 value: 87.237 - type: recall_at_1000 value: 97.449 - type: precision_at_1 value: 38.775999999999996 - type: precision_at_3 value: 24.976000000000003 - type: precision_at_5 value: 18.659 - type: precision_at_10 value: 12.157 - type: precision_at_20 value: 7.405 - type: precision_at_100 value: 1.831 - type: precision_at_1000 value: 0.20600000000000002 - type: mrr_at_1 value: 38.7755 - type: mrr_at_3 value: 47.4733 - type: mrr_at_5 value: 49.5578 - type: mrr_at_10 value: 51.119400000000006 - type: mrr_at_20 value: 51.6826 - type: mrr_at_100 value: 51.8472 - type: mrr_at_1000 value: 51.87969999999999 - type: nauc_ndcg_at_1_max value: 37.6869 - type: nauc_ndcg_at_1_std value: 19.3059 - type: nauc_ndcg_at_1_diff1 value: 24.1548 - type: nauc_ndcg_at_3_max value: 33.0185 - type: nauc_ndcg_at_3_std value: 19.4304 - type: nauc_ndcg_at_3_diff1 value: 18.152099999999997 - type: nauc_ndcg_at_5_max value: 35.7529 - type: nauc_ndcg_at_5_std value: 20.8762 - type: nauc_ndcg_at_5_diff1 value: 20.9497 - type: nauc_ndcg_at_10_max value: 35.9846 - type: nauc_ndcg_at_10_std value: 21.7196 - type: nauc_ndcg_at_10_diff1 value: 19.3302 - type: nauc_ndcg_at_20_max value: 38.313199999999995 - type: nauc_ndcg_at_20_std value: 23.2567 - type: nauc_ndcg_at_20_diff1 value: 20.1896 - type: nauc_ndcg_at_100_max value: 38.2753 - type: nauc_ndcg_at_100_std value: 25.048399999999997 - type: nauc_ndcg_at_100_diff1 value: 19.5028 - type: nauc_ndcg_at_1000_max value: 37.8159 - type: nauc_ndcg_at_1000_std value: 23.8262 - type: nauc_ndcg_at_1000_diff1 value: 19.4799 - type: nauc_map_at_1_max value: 25.040200000000002 - type: nauc_map_at_1_std value: 11.5183 - type: nauc_map_at_1_diff1 value: 23.7651 - type: nauc_map_at_3_max value: 30.5355 - type: nauc_map_at_3_std value: 17.7343 - type: nauc_map_at_3_diff1 value: 19.0017 - type: nauc_map_at_5_max value: 33.492 - type: nauc_map_at_5_std value: 19.7752 - type: nauc_map_at_5_diff1 value: 20.4072 - type: nauc_map_at_10_max value: 33.3246 - type: nauc_map_at_10_std value: 19.8087 - type: nauc_map_at_10_diff1 value: 19.184 - type: nauc_map_at_20_max value: 34.3329 - type: nauc_map_at_20_std value: 20.6622 - type: nauc_map_at_20_diff1 value: 19.625 - type: nauc_map_at_100_max value: 34.407700000000006 - type: nauc_map_at_100_std value: 21.0478 - type: nauc_map_at_100_diff1 value: 19.4432 - type: nauc_map_at_1000_max value: 34.4128 - type: nauc_map_at_1000_std value: 21.0078 - type: nauc_map_at_1000_diff1 value: 19.4386 - type: nauc_recall_at_1_max value: 25.040200000000002 - type: nauc_recall_at_1_std value: 11.5183 - type: nauc_recall_at_1_diff1 value: 23.7651 - type: nauc_recall_at_3_max value: 28.0362 - type: nauc_recall_at_3_std value: 18.1405 - type: nauc_recall_at_3_diff1 value: 14.0979 - type: nauc_recall_at_5_max value: 32.6536 - type: nauc_recall_at_5_std value: 19.763 - type: nauc_recall_at_5_diff1 value: 18.5941 - type: nauc_recall_at_10_max value: 32.736399999999996 - type: nauc_recall_at_10_std value: 20.5625 - type: nauc_recall_at_10_diff1 value: 15.4366 - type: nauc_recall_at_20_max value: 41.0178 - type: nauc_recall_at_20_std value: 25.4559 - type: nauc_recall_at_20_diff1 value: 17.8615 - type: nauc_recall_at_100_max value: 47.700700000000005 - type: nauc_recall_at_100_std value: 47.386 - type: nauc_recall_at_100_diff1 value: 15.1722 - type: nauc_recall_at_1000_max value: 75.13119999999999 - type: nauc_recall_at_1000_std value: 70.6818 - type: nauc_recall_at_1000_diff1 value: 17.7539 - type: nauc_precision_at_1_max value: 37.6869 - type: nauc_precision_at_1_std value: 19.3059 - type: nauc_precision_at_1_diff1 value: 24.1548 - type: nauc_precision_at_3_max value: 37.0296 - type: nauc_precision_at_3_std value: 24.5362 - type: nauc_precision_at_3_diff1 value: 10.0428 - type: nauc_precision_at_5_max value: 38.770700000000005 - type: nauc_precision_at_5_std value: 27.290399999999998 - type: nauc_precision_at_5_diff1 value: 11.1247 - type: nauc_precision_at_10_max value: 31.2623 - type: nauc_precision_at_10_std value: 25.794099999999997 - type: nauc_precision_at_10_diff1 value: 2.1571 - type: nauc_precision_at_20_max value: 29.2963 - type: nauc_precision_at_20_std value: 25.241000000000003 - type: nauc_precision_at_20_diff1 value: 1.8568000000000002 - type: nauc_precision_at_100_max value: 18.620800000000003 - type: nauc_precision_at_100_std value: 22.6874 - type: nauc_precision_at_100_diff1 value: -5.2441 - type: nauc_precision_at_1000_max value: 10.2324 - type: nauc_precision_at_1000_std value: 13.1045 - type: nauc_precision_at_1000_diff1 value: -9.7662 - type: nauc_mrr_at_1_max value: 37.6869 - type: nauc_mrr_at_1_std value: 19.3059 - type: nauc_mrr_at_1_diff1 value: 24.1548 - type: nauc_mrr_at_3_max value: 36.3742 - type: nauc_mrr_at_3_std value: 19.2165 - type: nauc_mrr_at_3_diff1 value: 20.883399999999998 - type: nauc_mrr_at_5_max value: 37.196400000000004 - type: nauc_mrr_at_5_std value: 19.839399999999998 - type: nauc_mrr_at_5_diff1 value: 21.6132 - type: nauc_mrr_at_10_max value: 37.7804 - type: nauc_mrr_at_10_std value: 20.7829 - type: nauc_mrr_at_10_diff1 value: 21.9443 - type: nauc_mrr_at_20_max value: 37.7391 - type: nauc_mrr_at_20_std value: 20.4514 - type: nauc_mrr_at_20_diff1 value: 21.7569 - type: nauc_mrr_at_100_max value: 37.6639 - type: nauc_mrr_at_100_std value: 20.450499999999998 - type: nauc_mrr_at_100_diff1 value: 21.7914 - type: nauc_mrr_at_1000_max value: 37.6357 - type: nauc_mrr_at_1000_std value: 20.414099999999998 - type: nauc_mrr_at_1000_diff1 value: 21.7914 - type: main_score value: 47.589 - task: type: Retrieval dataset: name: MTEB MIRACLRetrieval (hi) type: miracl/mmteb-miracl config: hi split: dev revision: main metrics: - type: ndcg_at_1 value: 33.143 - type: ndcg_at_3 value: 34.988 - type: ndcg_at_5 value: 37.938 - type: ndcg_at_10 value: 42.083999999999996 - type: ndcg_at_20 value: 45.399 - type: ndcg_at_100 value: 48.647 - type: ndcg_at_1000 value: 50.712 - type: map_at_1 value: 17.852 - type: map_at_3 value: 27.405 - type: map_at_5 value: 30.781999999999996 - type: map_at_10 value: 33.391999999999996 - type: map_at_20 value: 34.833 - type: map_at_100 value: 35.501 - type: map_at_1000 value: 35.611 - type: recall_at_1 value: 17.852 - type: recall_at_3 value: 33.765 - type: recall_at_5 value: 43.828 - type: recall_at_10 value: 55.217000000000006 - type: recall_at_20 value: 65.231 - type: recall_at_100 value: 79.92899999999999 - type: recall_at_1000 value: 93.434 - type: precision_at_1 value: 33.143 - type: precision_at_3 value: 23.429 - type: precision_at_5 value: 18.229 - type: precision_at_10 value: 11.657 - type: precision_at_20 value: 7.142999999999999 - type: precision_at_100 value: 1.7229999999999999 - type: precision_at_1000 value: 0.201 - type: mrr_at_1 value: 33.1429 - type: mrr_at_3 value: 41.428599999999996 - type: mrr_at_5 value: 43.7857 - type: mrr_at_10 value: 44.9745 - type: mrr_at_20 value: 45.4552 - type: mrr_at_100 value: 45.7257 - type: mrr_at_1000 value: 45.7671 - type: nauc_ndcg_at_1_max value: 51.2111 - type: nauc_ndcg_at_1_std value: 15.146799999999999 - type: nauc_ndcg_at_1_diff1 value: 40.127 - type: nauc_ndcg_at_3_max value: 44.1081 - type: nauc_ndcg_at_3_std value: 11.708599999999999 - type: nauc_ndcg_at_3_diff1 value: 26.8834 - type: nauc_ndcg_at_5_max value: 43.077799999999996 - type: nauc_ndcg_at_5_std value: 13.570599999999999 - type: nauc_ndcg_at_5_diff1 value: 27.5263 - type: nauc_ndcg_at_10_max value: 45.1081 - type: nauc_ndcg_at_10_std value: 14.758299999999998 - type: nauc_ndcg_at_10_diff1 value: 29.3043 - type: nauc_ndcg_at_20_max value: 47.5349 - type: nauc_ndcg_at_20_std value: 17.8 - type: nauc_ndcg_at_20_diff1 value: 28.416400000000003 - type: nauc_ndcg_at_100_max value: 48.395500000000006 - type: nauc_ndcg_at_100_std value: 18.9621 - type: nauc_ndcg_at_100_diff1 value: 28.799500000000002 - type: nauc_ndcg_at_1000_max value: 48.4885 - type: nauc_ndcg_at_1000_std value: 18.296100000000003 - type: nauc_ndcg_at_1000_diff1 value: 29.5616 - type: nauc_map_at_1_max value: 31.3083 - type: nauc_map_at_1_std value: 6.462700000000001 - type: nauc_map_at_1_diff1 value: 36.2382 - type: nauc_map_at_3_max value: 35.841699999999996 - type: nauc_map_at_3_std value: 7.013800000000001 - type: nauc_map_at_3_diff1 value: 28.991699999999998 - type: nauc_map_at_5_max value: 39.0977 - type: nauc_map_at_5_std value: 9.8928 - type: nauc_map_at_5_diff1 value: 28.6183 - type: nauc_map_at_10_max value: 41.8538 - type: nauc_map_at_10_std value: 11.5648 - type: nauc_map_at_10_diff1 value: 29.1635 - type: nauc_map_at_20_max value: 43.6057 - type: nauc_map_at_20_std value: 13.382900000000001 - type: nauc_map_at_20_diff1 value: 28.6067 - type: nauc_map_at_100_max value: 43.962 - type: nauc_map_at_100_std value: 13.7517 - type: nauc_map_at_100_diff1 value: 28.841299999999997 - type: nauc_map_at_1000_max value: 43.9824 - type: nauc_map_at_1000_std value: 13.732099999999999 - type: nauc_map_at_1000_diff1 value: 28.8971 - type: nauc_recall_at_1_max value: 31.3083 - type: nauc_recall_at_1_std value: 6.462700000000001 - type: nauc_recall_at_1_diff1 value: 36.2382 - type: nauc_recall_at_3_max value: 30.605300000000003 - type: nauc_recall_at_3_std value: 7.5045 - type: nauc_recall_at_3_diff1 value: 19.0642 - type: nauc_recall_at_5_max value: 33.4179 - type: nauc_recall_at_5_std value: 13.1973 - type: nauc_recall_at_5_diff1 value: 20.1321 - type: nauc_recall_at_10_max value: 36.6194 - type: nauc_recall_at_10_std value: 15.8973 - type: nauc_recall_at_10_diff1 value: 23.1043 - type: nauc_recall_at_20_max value: 42.0702 - type: nauc_recall_at_20_std value: 24.1871 - type: nauc_recall_at_20_diff1 value: 20.7213 - type: nauc_recall_at_100_max value: 47.0142 - type: nauc_recall_at_100_std value: 34.8802 - type: nauc_recall_at_100_diff1 value: 18.8255 - type: nauc_recall_at_1000_max value: 59.413700000000006 - type: nauc_recall_at_1000_std value: 50.051199999999994 - type: nauc_recall_at_1000_diff1 value: 30.682 - type: nauc_precision_at_1_max value: 51.2111 - type: nauc_precision_at_1_std value: 15.146799999999999 - type: nauc_precision_at_1_diff1 value: 40.127 - type: nauc_precision_at_3_max value: 49.2718 - type: nauc_precision_at_3_std value: 15.658 - type: nauc_precision_at_3_diff1 value: 17.163700000000002 - type: nauc_precision_at_5_max value: 51.77349999999999 - type: nauc_precision_at_5_std value: 21.1016 - type: nauc_precision_at_5_diff1 value: 15.0559 - type: nauc_precision_at_10_max value: 51.843799999999995 - type: nauc_precision_at_10_std value: 23.2912 - type: nauc_precision_at_10_diff1 value: 14.191799999999999 - type: nauc_precision_at_20_max value: 50.41 - type: nauc_precision_at_20_std value: 28.2005 - type: nauc_precision_at_20_diff1 value: 8.2714 - type: nauc_precision_at_100_max value: 45.522600000000004 - type: nauc_precision_at_100_std value: 28.199 - type: nauc_precision_at_100_diff1 value: 7.180400000000001 - type: nauc_precision_at_1000_max value: 38.663399999999996 - type: nauc_precision_at_1000_std value: 22.781399999999998 - type: nauc_precision_at_1000_diff1 value: 3.8605 - type: nauc_mrr_at_1_max value: 51.2111 - type: nauc_mrr_at_1_std value: 15.146799999999999 - type: nauc_mrr_at_1_diff1 value: 40.127 - type: nauc_mrr_at_3_max value: 48.0836 - type: nauc_mrr_at_3_std value: 13.9619 - type: nauc_mrr_at_3_diff1 value: 30.8736 - type: nauc_mrr_at_5_max value: 49.0073 - type: nauc_mrr_at_5_std value: 15.6308 - type: nauc_mrr_at_5_diff1 value: 31.8004 - type: nauc_mrr_at_10_max value: 49.554700000000004 - type: nauc_mrr_at_10_std value: 15.7261 - type: nauc_mrr_at_10_diff1 value: 32.8141 - type: nauc_mrr_at_20_max value: 49.6722 - type: nauc_mrr_at_20_std value: 15.873000000000001 - type: nauc_mrr_at_20_diff1 value: 32.8857 - type: nauc_mrr_at_100_max value: 49.5869 - type: nauc_mrr_at_100_std value: 15.8044 - type: nauc_mrr_at_100_diff1 value: 32.811099999999996 - type: nauc_mrr_at_1000_max value: 49.5787 - type: nauc_mrr_at_1000_std value: 15.7836 - type: nauc_mrr_at_1000_diff1 value: 32.8438 - type: main_score value: 42.083999999999996 - task: type: Retrieval dataset: name: MTEB MIRACLRetrieval (id) type: miracl/mmteb-miracl config: id split: dev revision: main metrics: - type: ndcg_at_1 value: 43.854 - type: ndcg_at_3 value: 41.041 - type: ndcg_at_5 value: 42.235 - type: ndcg_at_10 value: 45.458999999999996 - type: ndcg_at_20 value: 48.795 - type: ndcg_at_100 value: 53.642999999999994 - type: ndcg_at_1000 value: 56.052 - type: map_at_1 value: 19.192999999999998 - type: map_at_3 value: 29.125 - type: map_at_5 value: 32.42 - type: map_at_10 value: 35.181000000000004 - type: map_at_20 value: 36.775000000000006 - type: map_at_100 value: 38.06 - type: map_at_1000 value: 38.246 - type: recall_at_1 value: 19.192999999999998 - type: recall_at_3 value: 35.431000000000004 - type: recall_at_5 value: 43.348 - type: recall_at_10 value: 52.89 - type: recall_at_20 value: 61.812999999999995 - type: recall_at_100 value: 79.649 - type: recall_at_1000 value: 92.937 - type: precision_at_1 value: 43.854 - type: precision_at_3 value: 29.757 - type: precision_at_5 value: 23.208000000000002 - type: precision_at_10 value: 15.177 - type: precision_at_20 value: 9.286 - type: precision_at_100 value: 2.524 - type: precision_at_1000 value: 0.299 - type: mrr_at_1 value: 43.8542 - type: mrr_at_3 value: 53.4549 - type: mrr_at_5 value: 55.2674 - type: mrr_at_10 value: 56.2576 - type: mrr_at_20 value: 56.592699999999994 - type: mrr_at_100 value: 56.7841 - type: mrr_at_1000 value: 56.8049 - type: nauc_ndcg_at_1_max value: 32.205600000000004 - type: nauc_ndcg_at_1_std value: 11.343499999999999 - type: nauc_ndcg_at_1_diff1 value: 32.8768 - type: nauc_ndcg_at_3_max value: 25.8163 - type: nauc_ndcg_at_3_std value: 5.037599999999999 - type: nauc_ndcg_at_3_diff1 value: 26.118799999999997 - type: nauc_ndcg_at_5_max value: 27.159 - type: nauc_ndcg_at_5_std value: 2.9204999999999997 - type: nauc_ndcg_at_5_diff1 value: 26.429399999999998 - type: nauc_ndcg_at_10_max value: 28.6049 - type: nauc_ndcg_at_10_std value: 3.7817000000000003 - type: nauc_ndcg_at_10_diff1 value: 25.904300000000003 - type: nauc_ndcg_at_20_max value: 30.5254 - type: nauc_ndcg_at_20_std value: 6.6297999999999995 - type: nauc_ndcg_at_20_diff1 value: 25.155300000000004 - type: nauc_ndcg_at_100_max value: 32.3477 - type: nauc_ndcg_at_100_std value: 11.7329 - type: nauc_ndcg_at_100_diff1 value: 24.038 - type: nauc_ndcg_at_1000_max value: 32.1871 - type: nauc_ndcg_at_1000_std value: 12.266 - type: nauc_ndcg_at_1000_diff1 value: 24.5005 - type: nauc_map_at_1_max value: 19.5131 - type: nauc_map_at_1_std value: 0.7939999999999999 - type: nauc_map_at_1_diff1 value: 35.4824 - type: nauc_map_at_3_max value: 21.1372 - type: nauc_map_at_3_std value: -1.4297 - type: nauc_map_at_3_diff1 value: 28.7825 - type: nauc_map_at_5_max value: 23.301099999999998 - type: nauc_map_at_5_std value: -1.6149 - type: nauc_map_at_5_diff1 value: 28.353 - type: nauc_map_at_10_max value: 25.0545 - type: nauc_map_at_10_std value: 0.29650000000000004 - type: nauc_map_at_10_diff1 value: 27.6041 - type: nauc_map_at_20_max value: 26.1938 - type: nauc_map_at_20_std value: 1.8739999999999999 - type: nauc_map_at_20_diff1 value: 26.9804 - type: nauc_map_at_100_max value: 26.9981 - type: nauc_map_at_100_std value: 3.4286 - type: nauc_map_at_100_diff1 value: 26.703599999999998 - type: nauc_map_at_1000_max value: 27.005200000000002 - type: nauc_map_at_1000_std value: 3.5663 - type: nauc_map_at_1000_diff1 value: 26.7073 - type: nauc_recall_at_1_max value: 19.5131 - type: nauc_recall_at_1_std value: 0.7939999999999999 - type: nauc_recall_at_1_diff1 value: 35.4824 - type: nauc_recall_at_3_max value: 16.8845 - type: nauc_recall_at_3_std value: -4.3322 - type: nauc_recall_at_3_diff1 value: 21.232400000000002 - type: nauc_recall_at_5_max value: 20.1938 - type: nauc_recall_at_5_std value: -4.638599999999999 - type: nauc_recall_at_5_diff1 value: 19.724 - type: nauc_recall_at_10_max value: 22.7792 - type: nauc_recall_at_10_std value: -0.7303999999999999 - type: nauc_recall_at_10_diff1 value: 17.5686 - type: nauc_recall_at_20_max value: 27.1692 - type: nauc_recall_at_20_std value: 4.6297 - type: nauc_recall_at_20_diff1 value: 15.5287 - type: nauc_recall_at_100_max value: 33.9833 - type: nauc_recall_at_100_std value: 26.366899999999998 - type: nauc_recall_at_100_diff1 value: 6.823799999999999 - type: nauc_recall_at_1000_max value: 44.722 - type: nauc_recall_at_1000_std value: 49.6373 - type: nauc_recall_at_1000_diff1 value: -1.5053 - type: nauc_precision_at_1_max value: 32.205600000000004 - type: nauc_precision_at_1_std value: 11.343499999999999 - type: nauc_precision_at_1_diff1 value: 32.8768 - type: nauc_precision_at_3_max value: 24.2364 - type: nauc_precision_at_3_std value: 8.0909 - type: nauc_precision_at_3_diff1 value: 12.090399999999999 - type: nauc_precision_at_5_max value: 26.0005 - type: nauc_precision_at_5_std value: 10.2623 - type: nauc_precision_at_5_diff1 value: 8.2296 - type: nauc_precision_at_10_max value: 24.6876 - type: nauc_precision_at_10_std value: 16.8067 - type: nauc_precision_at_10_diff1 value: 1.6472 - type: nauc_precision_at_20_max value: 22.5879 - type: nauc_precision_at_20_std value: 22.4936 - type: nauc_precision_at_20_diff1 value: -2.8762 - type: nauc_precision_at_100_max value: 17.6199 - type: nauc_precision_at_100_std value: 29.5456 - type: nauc_precision_at_100_diff1 value: -8.3992 - type: nauc_precision_at_1000_max value: 10.8473 - type: nauc_precision_at_1000_std value: 27.394600000000004 - type: nauc_precision_at_1000_diff1 value: -9.8316 - type: nauc_mrr_at_1_max value: 32.205600000000004 - type: nauc_mrr_at_1_std value: 11.343499999999999 - type: nauc_mrr_at_1_diff1 value: 32.8768 - type: nauc_mrr_at_3_max value: 32.2439 - type: nauc_mrr_at_3_std value: 11.927999999999999 - type: nauc_mrr_at_3_diff1 value: 28.501900000000003 - type: nauc_mrr_at_5_max value: 33.063500000000005 - type: nauc_mrr_at_5_std value: 12.5223 - type: nauc_mrr_at_5_diff1 value: 28.5765 - type: nauc_mrr_at_10_max value: 33.0845 - type: nauc_mrr_at_10_std value: 12.7026 - type: nauc_mrr_at_10_diff1 value: 28.5328 - type: nauc_mrr_at_20_max value: 33.1039 - type: nauc_mrr_at_20_std value: 12.7458 - type: nauc_mrr_at_20_diff1 value: 28.6635 - type: nauc_mrr_at_100_max value: 33.058 - type: nauc_mrr_at_100_std value: 12.8462 - type: nauc_mrr_at_100_diff1 value: 28.656599999999997 - type: nauc_mrr_at_1000_max value: 33.0462 - type: nauc_mrr_at_1000_std value: 12.829699999999999 - type: nauc_mrr_at_1000_diff1 value: 28.6562 - type: main_score value: 45.458999999999996 - task: type: Retrieval dataset: name: MTEB MIRACLRetrieval (ja) type: miracl/mmteb-miracl config: ja split: dev revision: main metrics: - type: ndcg_at_1 value: 53.256 - type: ndcg_at_3 value: 53.717000000000006 - type: ndcg_at_5 value: 56.523 - type: ndcg_at_10 value: 59.922 - type: ndcg_at_20 value: 62.596 - type: ndcg_at_100 value: 65.40700000000001 - type: ndcg_at_1000 value: 66.484 - type: map_at_1 value: 34.555 - type: map_at_3 value: 45.667 - type: map_at_5 value: 48.888 - type: map_at_10 value: 51.214000000000006 - type: map_at_20 value: 52.325 - type: map_at_100 value: 53.032000000000004 - type: map_at_1000 value: 53.11 - type: recall_at_1 value: 34.555 - type: recall_at_3 value: 53.482 - type: recall_at_5 value: 62.327 - type: recall_at_10 value: 71.476 - type: recall_at_20 value: 79.81099999999999 - type: recall_at_100 value: 91.152 - type: recall_at_1000 value: 97.72800000000001 - type: precision_at_1 value: 53.256 - type: precision_at_3 value: 30.697999999999997 - type: precision_at_5 value: 22.419 - type: precision_at_10 value: 13.453000000000001 - type: precision_at_20 value: 7.756 - type: precision_at_100 value: 1.856 - type: precision_at_1000 value: 0.203 - type: mrr_at_1 value: 53.2558 - type: mrr_at_3 value: 61.860499999999995 - type: mrr_at_5 value: 63.558099999999996 - type: mrr_at_10 value: 64.4037 - type: mrr_at_20 value: 64.78960000000001 - type: mrr_at_100 value: 64.9286 - type: mrr_at_1000 value: 64.9426 - type: nauc_ndcg_at_1_max value: 40.0831 - type: nauc_ndcg_at_1_std value: 5.4576 - type: nauc_ndcg_at_1_diff1 value: 43.1468 - type: nauc_ndcg_at_3_max value: 32.8799 - type: nauc_ndcg_at_3_std value: -3.7643000000000004 - type: nauc_ndcg_at_3_diff1 value: 33.0607 - type: nauc_ndcg_at_5_max value: 32.6847 - type: nauc_ndcg_at_5_std value: -4.4878 - type: nauc_ndcg_at_5_diff1 value: 33.7729 - type: nauc_ndcg_at_10_max value: 34.0334 - type: nauc_ndcg_at_10_std value: -3.2938 - type: nauc_ndcg_at_10_diff1 value: 33.9215 - type: nauc_ndcg_at_20_max value: 35.032799999999995 - type: nauc_ndcg_at_20_std value: -0.9834 - type: nauc_ndcg_at_20_diff1 value: 33.4568 - type: nauc_ndcg_at_100_max value: 37.2464 - type: nauc_ndcg_at_100_std value: 1.9361 - type: nauc_ndcg_at_100_diff1 value: 34.844 - type: nauc_ndcg_at_1000_max value: 37.0714 - type: nauc_ndcg_at_1000_std value: 1.7745 - type: nauc_ndcg_at_1000_diff1 value: 35.123 - type: nauc_map_at_1_max value: 21.2553 - type: nauc_map_at_1_std value: -11.0112 - type: nauc_map_at_1_diff1 value: 38.4142 - type: nauc_map_at_3_max value: 25.6791 - type: nauc_map_at_3_std value: -10.7165 - type: nauc_map_at_3_diff1 value: 33.1602 - type: nauc_map_at_5_max value: 27.790300000000002 - type: nauc_map_at_5_std value: -9.0268 - type: nauc_map_at_5_diff1 value: 33.2551 - type: nauc_map_at_10_max value: 29.4317 - type: nauc_map_at_10_std value: -7.606300000000001 - type: nauc_map_at_10_diff1 value: 33.456399999999995 - type: nauc_map_at_20_max value: 30.0805 - type: nauc_map_at_20_std value: -6.482 - type: nauc_map_at_20_diff1 value: 33.3844 - type: nauc_map_at_100_max value: 30.7427 - type: nauc_map_at_100_std value: -5.6065 - type: nauc_map_at_100_diff1 value: 33.650600000000004 - type: nauc_map_at_1000_max value: 30.763099999999998 - type: nauc_map_at_1000_std value: -5.5541 - type: nauc_map_at_1000_diff1 value: 33.677 - type: nauc_recall_at_1_max value: 21.2553 - type: nauc_recall_at_1_std value: -11.0112 - type: nauc_recall_at_1_diff1 value: 38.4142 - type: nauc_recall_at_3_max value: 22.537399999999998 - type: nauc_recall_at_3_std value: -12.565000000000001 - type: nauc_recall_at_3_diff1 value: 26.549 - type: nauc_recall_at_5_max value: 23.329900000000002 - type: nauc_recall_at_5_std value: -10.4524 - type: nauc_recall_at_5_diff1 value: 24.7008 - type: nauc_recall_at_10_max value: 26.0061 - type: nauc_recall_at_10_std value: -6.1622 - type: nauc_recall_at_10_diff1 value: 22.880300000000002 - type: nauc_recall_at_20_max value: 26.820300000000003 - type: nauc_recall_at_20_std value: 0.49820000000000003 - type: nauc_recall_at_20_diff1 value: 17.1066 - type: nauc_recall_at_100_max value: 41.4851 - type: nauc_recall_at_100_std value: 24.1372 - type: nauc_recall_at_100_diff1 value: 20.2474 - type: nauc_recall_at_1000_max value: 46.699 - type: nauc_recall_at_1000_std value: 43.6571 - type: nauc_recall_at_1000_diff1 value: 12.969800000000001 - type: nauc_precision_at_1_max value: 40.0831 - type: nauc_precision_at_1_std value: 5.4576 - type: nauc_precision_at_1_diff1 value: 43.1468 - type: nauc_precision_at_3_max value: 35.862500000000004 - type: nauc_precision_at_3_std value: 12.6798 - type: nauc_precision_at_3_diff1 value: 13.8812 - type: nauc_precision_at_5_max value: 34.525800000000004 - type: nauc_precision_at_5_std value: 19.4325 - type: nauc_precision_at_5_diff1 value: 8.5877 - type: nauc_precision_at_10_max value: 31.776500000000002 - type: nauc_precision_at_10_std value: 24.4128 - type: nauc_precision_at_10_diff1 value: 2.8872999999999998 - type: nauc_precision_at_20_max value: 27.1526 - type: nauc_precision_at_20_std value: 29.1072 - type: nauc_precision_at_20_diff1 value: -1.5491 - type: nauc_precision_at_100_max value: 23.9636 - type: nauc_precision_at_100_std value: 34.5439 - type: nauc_precision_at_100_diff1 value: -3.8294 - type: nauc_precision_at_1000_max value: 19.2461 - type: nauc_precision_at_1000_std value: 33.466499999999996 - type: nauc_precision_at_1000_diff1 value: -5.7622 - type: nauc_mrr_at_1_max value: 40.0831 - type: nauc_mrr_at_1_std value: 5.4576 - type: nauc_mrr_at_1_diff1 value: 43.1468 - type: nauc_mrr_at_3_max value: 44.1712 - type: nauc_mrr_at_3_std value: 6.1216 - type: nauc_mrr_at_3_diff1 value: 41.1386 - type: nauc_mrr_at_5_max value: 44.0165 - type: nauc_mrr_at_5_std value: 6.9895 - type: nauc_mrr_at_5_diff1 value: 41.124 - type: nauc_mrr_at_10_max value: 43.9807 - type: nauc_mrr_at_10_std value: 7.1412 - type: nauc_mrr_at_10_diff1 value: 41.0447 - type: nauc_mrr_at_20_max value: 43.9406 - type: nauc_mrr_at_20_std value: 7.2738 - type: nauc_mrr_at_20_diff1 value: 40.9775 - type: nauc_mrr_at_100_max value: 43.9141 - type: nauc_mrr_at_100_std value: 7.212300000000001 - type: nauc_mrr_at_100_diff1 value: 41.112700000000004 - type: nauc_mrr_at_1000_max value: 43.9012 - type: nauc_mrr_at_1000_std value: 7.1947 - type: nauc_mrr_at_1000_diff1 value: 41.1126 - type: main_score value: 59.922 - task: type: Retrieval dataset: name: MTEB MIRACLRetrieval (ko) type: miracl/mmteb-miracl config: ko split: dev revision: main metrics: - type: ndcg_at_1 value: 54.93 - type: ndcg_at_3 value: 53.068000000000005 - type: ndcg_at_5 value: 55.202 - type: ndcg_at_10 value: 58.413000000000004 - type: ndcg_at_20 value: 61.732 - type: ndcg_at_100 value: 64.374 - type: ndcg_at_1000 value: 65.655 - type: map_at_1 value: 32.602 - type: map_at_3 value: 42.591 - type: map_at_5 value: 46.466 - type: map_at_10 value: 49.38 - type: map_at_20 value: 51.044999999999995 - type: map_at_100 value: 51.842 - type: map_at_1000 value: 51.92 - type: recall_at_1 value: 32.602 - type: recall_at_3 value: 49.173 - type: recall_at_5 value: 58.269999999999996 - type: recall_at_10 value: 68.647 - type: recall_at_20 value: 78.089 - type: recall_at_100 value: 87.746 - type: recall_at_1000 value: 95.524 - type: precision_at_1 value: 54.93 - type: precision_at_3 value: 31.455 - type: precision_at_5 value: 24.413 - type: precision_at_10 value: 15.399 - type: precision_at_20 value: 9.366 - type: precision_at_100 value: 2.235 - type: precision_at_1000 value: 0.246 - type: mrr_at_1 value: 54.9296 - type: mrr_at_3 value: 62.0501 - type: mrr_at_5 value: 63.7167 - type: mrr_at_10 value: 64.7179 - type: mrr_at_20 value: 65.0792 - type: mrr_at_100 value: 65.1651 - type: mrr_at_1000 value: 65.1775 - type: nauc_ndcg_at_1_max value: 56.11150000000001 - type: nauc_ndcg_at_1_std value: 30.1071 - type: nauc_ndcg_at_1_diff1 value: 34.2026 - type: nauc_ndcg_at_3_max value: 34.164899999999996 - type: nauc_ndcg_at_3_std value: 8.9616 - type: nauc_ndcg_at_3_diff1 value: 33.8594 - type: nauc_ndcg_at_5_max value: 35.988 - type: nauc_ndcg_at_5_std value: 9.1819 - type: nauc_ndcg_at_5_diff1 value: 34.3302 - type: nauc_ndcg_at_10_max value: 33.9669 - type: nauc_ndcg_at_10_std value: 9.9015 - type: nauc_ndcg_at_10_diff1 value: 34.7522 - type: nauc_ndcg_at_20_max value: 38.7156 - type: nauc_ndcg_at_20_std value: 13.478299999999999 - type: nauc_ndcg_at_20_diff1 value: 34.5892 - type: nauc_ndcg_at_100_max value: 43.2542 - type: nauc_ndcg_at_100_std value: 19.6461 - type: nauc_ndcg_at_100_diff1 value: 33.5102 - type: nauc_ndcg_at_1000_max value: 43.5965 - type: nauc_ndcg_at_1000_std value: 20.1448 - type: nauc_ndcg_at_1000_diff1 value: 33.508500000000005 - type: nauc_map_at_1_max value: 4.7901 - type: nauc_map_at_1_std value: -11.3406 - type: nauc_map_at_1_diff1 value: 47.3089 - type: nauc_map_at_3_max value: 10.8067 - type: nauc_map_at_3_std value: -11.149000000000001 - type: nauc_map_at_3_diff1 value: 40.8163 - type: nauc_map_at_5_max value: 19.4936 - type: nauc_map_at_5_std value: -4.9421 - type: nauc_map_at_5_diff1 value: 38.1108 - type: nauc_map_at_10_max value: 23.4772 - type: nauc_map_at_10_std value: 0.5471 - type: nauc_map_at_10_diff1 value: 37.0351 - type: nauc_map_at_20_max value: 27.0291 - type: nauc_map_at_20_std value: 3.2716000000000003 - type: nauc_map_at_20_diff1 value: 36.835 - type: nauc_map_at_100_max value: 28.7591 - type: nauc_map_at_100_std value: 5.4503 - type: nauc_map_at_100_diff1 value: 36.3655 - type: nauc_map_at_1000_max value: 28.8292 - type: nauc_map_at_1000_std value: 5.5265 - type: nauc_map_at_1000_diff1 value: 36.3425 - type: nauc_recall_at_1_max value: 4.7901 - type: nauc_recall_at_1_std value: -11.3406 - type: nauc_recall_at_1_diff1 value: 47.3089 - type: nauc_recall_at_3_max value: 6.1487 - type: nauc_recall_at_3_std value: -16.451999999999998 - type: nauc_recall_at_3_diff1 value: 35.876200000000004 - type: nauc_recall_at_5_max value: 17.4052 - type: nauc_recall_at_5_std value: -8.3001 - type: nauc_recall_at_5_diff1 value: 31.986700000000003 - type: nauc_recall_at_10_max value: 19.932 - type: nauc_recall_at_10_std value: -0.6047 - type: nauc_recall_at_10_diff1 value: 29.7464 - type: nauc_recall_at_20_max value: 27.2026 - type: nauc_recall_at_20_std value: 3.4061 - type: nauc_recall_at_20_diff1 value: 29.7029 - type: nauc_recall_at_100_max value: 49.4794 - type: nauc_recall_at_100_std value: 33.5322 - type: nauc_recall_at_100_diff1 value: 25.5531 - type: nauc_recall_at_1000_max value: 66.1815 - type: nauc_recall_at_1000_std value: 62.81529999999999 - type: nauc_recall_at_1000_diff1 value: 27.209699999999998 - type: nauc_precision_at_1_max value: 56.11150000000001 - type: nauc_precision_at_1_std value: 30.1071 - type: nauc_precision_at_1_diff1 value: 34.2026 - type: nauc_precision_at_3_max value: 56.5357 - type: nauc_precision_at_3_std value: 34.1074 - type: nauc_precision_at_3_diff1 value: 2.1084 - type: nauc_precision_at_5_max value: 67.0257 - type: nauc_precision_at_5_std value: 48.780699999999996 - type: nauc_precision_at_5_diff1 value: -9.4319 - type: nauc_precision_at_10_max value: 64.3278 - type: nauc_precision_at_10_std value: 57.504 - type: nauc_precision_at_10_diff1 value: -15.3767 - type: nauc_precision_at_20_max value: 65.8933 - type: nauc_precision_at_20_std value: 60.3452 - type: nauc_precision_at_20_diff1 value: -19.1514 - type: nauc_precision_at_100_max value: 63.3574 - type: nauc_precision_at_100_std value: 64.9713 - type: nauc_precision_at_100_diff1 value: -22.4344 - type: nauc_precision_at_1000_max value: 59.358599999999996 - type: nauc_precision_at_1000_std value: 62.943000000000005 - type: nauc_precision_at_1000_diff1 value: -24.9167 - type: nauc_mrr_at_1_max value: 56.11150000000001 - type: nauc_mrr_at_1_std value: 30.1071 - type: nauc_mrr_at_1_diff1 value: 34.2026 - type: nauc_mrr_at_3_max value: 59.3661 - type: nauc_mrr_at_3_std value: 30.759999999999998 - type: nauc_mrr_at_3_diff1 value: 31.9662 - type: nauc_mrr_at_5_max value: 60.6752 - type: nauc_mrr_at_5_std value: 32.477000000000004 - type: nauc_mrr_at_5_diff1 value: 32.235200000000006 - type: nauc_mrr_at_10_max value: 60.222500000000004 - type: nauc_mrr_at_10_std value: 32.4976 - type: nauc_mrr_at_10_diff1 value: 31.8963 - type: nauc_mrr_at_20_max value: 60.0608 - type: nauc_mrr_at_20_std value: 32.421 - type: nauc_mrr_at_20_diff1 value: 31.812600000000003 - type: nauc_mrr_at_100_max value: 60.0846 - type: nauc_mrr_at_100_std value: 32.3954 - type: nauc_mrr_at_100_diff1 value: 31.8055 - type: nauc_mrr_at_1000_max value: 60.0763 - type: nauc_mrr_at_1000_std value: 32.403999999999996 - type: nauc_mrr_at_1000_diff1 value: 31.8195 - type: main_score value: 58.413000000000004 - task: type: Retrieval dataset: name: MTEB MIRACLRetrieval (ru) type: miracl/mmteb-miracl config: ru split: dev revision: main metrics: - type: ndcg_at_1 value: 43.131 - type: ndcg_at_3 value: 42.808 - type: ndcg_at_5 value: 44.373000000000005 - type: ndcg_at_10 value: 48.262 - type: ndcg_at_20 value: 52.022999999999996 - type: ndcg_at_100 value: 56.157999999999994 - type: ndcg_at_1000 value: 57.928999999999995 - type: map_at_1 value: 22.017999999999997 - type: map_at_3 value: 32.41 - type: map_at_5 value: 35.558 - type: map_at_10 value: 38.449 - type: map_at_20 value: 40.144000000000005 - type: map_at_100 value: 41.219 - type: map_at_1000 value: 41.355 - type: recall_at_1 value: 22.017999999999997 - type: recall_at_3 value: 39.306999999999995 - type: recall_at_5 value: 47.077000000000005 - type: recall_at_10 value: 58.034 - type: recall_at_20 value: 68.60300000000001 - type: recall_at_100 value: 84.074 - type: recall_at_1000 value: 93.938 - type: precision_at_1 value: 43.131 - type: precision_at_3 value: 29.127 - type: precision_at_5 value: 22.076999999999998 - type: precision_at_10 value: 14.441 - type: precision_at_20 value: 8.958 - type: precision_at_100 value: 2.331 - type: precision_at_1000 value: 0.267 - type: mrr_at_1 value: 43.131 - type: mrr_at_3 value: 53.28810000000001 - type: mrr_at_5 value: 54.785700000000006 - type: mrr_at_10 value: 55.948100000000004 - type: mrr_at_20 value: 56.422799999999995 - type: mrr_at_100 value: 56.5998 - type: mrr_at_1000 value: 56.615 - type: nauc_ndcg_at_1_max value: 37.0316 - type: nauc_ndcg_at_1_std value: 16.0392 - type: nauc_ndcg_at_1_diff1 value: 35.6661 - type: nauc_ndcg_at_3_max value: 32.547 - type: nauc_ndcg_at_3_std value: 12.7791 - type: nauc_ndcg_at_3_diff1 value: 27.252599999999997 - type: nauc_ndcg_at_5_max value: 33.2141 - type: nauc_ndcg_at_5_std value: 12.16 - type: nauc_ndcg_at_5_diff1 value: 26.5849 - type: nauc_ndcg_at_10_max value: 34.6417 - type: nauc_ndcg_at_10_std value: 13.350699999999998 - type: nauc_ndcg_at_10_diff1 value: 26.616600000000002 - type: nauc_ndcg_at_20_max value: 36.94 - type: nauc_ndcg_at_20_std value: 16.3221 - type: nauc_ndcg_at_20_diff1 value: 26.3159 - type: nauc_ndcg_at_100_max value: 39.050200000000004 - type: nauc_ndcg_at_100_std value: 19.5849 - type: nauc_ndcg_at_100_diff1 value: 26.6473 - type: nauc_ndcg_at_1000_max value: 39.030300000000004 - type: nauc_ndcg_at_1000_std value: 19.6508 - type: nauc_ndcg_at_1000_diff1 value: 27.0546 - type: nauc_map_at_1_max value: 21.368599999999997 - type: nauc_map_at_1_std value: -0.9005000000000001 - type: nauc_map_at_1_diff1 value: 35.212500000000006 - type: nauc_map_at_3_max value: 26.070700000000002 - type: nauc_map_at_3_std value: 3.9229 - type: nauc_map_at_3_diff1 value: 29.1293 - type: nauc_map_at_5_max value: 29.032999999999998 - type: nauc_map_at_5_std value: 6.5134 - type: nauc_map_at_5_diff1 value: 27.908699999999996 - type: nauc_map_at_10_max value: 30.7252 - type: nauc_map_at_10_std value: 8.2968 - type: nauc_map_at_10_diff1 value: 27.6959 - type: nauc_map_at_20_max value: 31.926900000000003 - type: nauc_map_at_20_std value: 9.7313 - type: nauc_map_at_20_diff1 value: 27.441300000000002 - type: nauc_map_at_100_max value: 32.7179 - type: nauc_map_at_100_std value: 10.8331 - type: nauc_map_at_100_diff1 value: 27.458 - type: nauc_map_at_1000_max value: 32.7499 - type: nauc_map_at_1000_std value: 10.898900000000001 - type: nauc_map_at_1000_diff1 value: 27.476699999999997 - type: nauc_recall_at_1_max value: 21.368599999999997 - type: nauc_recall_at_1_std value: -0.9005000000000001 - type: nauc_recall_at_1_diff1 value: 35.212500000000006 - type: nauc_recall_at_3_max value: 22.0607 - type: nauc_recall_at_3_std value: 3.9726 - type: nauc_recall_at_3_diff1 value: 21.705 - type: nauc_recall_at_5_max value: 25.915300000000002 - type: nauc_recall_at_5_std value: 7.4636 - type: nauc_recall_at_5_diff1 value: 18.7443 - type: nauc_recall_at_10_max value: 28.7142 - type: nauc_recall_at_10_std value: 11.5264 - type: nauc_recall_at_10_diff1 value: 16.7709 - type: nauc_recall_at_20_max value: 33.5513 - type: nauc_recall_at_20_std value: 18.5489 - type: nauc_recall_at_20_diff1 value: 14.751900000000001 - type: nauc_recall_at_100_max value: 45.7418 - type: nauc_recall_at_100_std value: 37.693 - type: nauc_recall_at_100_diff1 value: 13.589699999999999 - type: nauc_recall_at_1000_max value: 62.0517 - type: nauc_recall_at_1000_std value: 61.5653 - type: nauc_recall_at_1000_diff1 value: 12.8732 - type: nauc_precision_at_1_max value: 37.0316 - type: nauc_precision_at_1_std value: 16.0392 - type: nauc_precision_at_1_diff1 value: 35.6661 - type: nauc_precision_at_3_max value: 36.3558 - type: nauc_precision_at_3_std value: 24.7253 - type: nauc_precision_at_3_diff1 value: 13.029499999999999 - type: nauc_precision_at_5_max value: 36.3254 - type: nauc_precision_at_5_std value: 26.7762 - type: nauc_precision_at_5_diff1 value: 7.561 - type: nauc_precision_at_10_max value: 32.2831 - type: nauc_precision_at_10_std value: 27.621499999999997 - type: nauc_precision_at_10_diff1 value: 2.9292 - type: nauc_precision_at_20_max value: 30.0072 - type: nauc_precision_at_20_std value: 30.3405 - type: nauc_precision_at_20_diff1 value: -1.4427 - type: nauc_precision_at_100_max value: 23.4879 - type: nauc_precision_at_100_std value: 30.9203 - type: nauc_precision_at_100_diff1 value: -5.0680000000000005 - type: nauc_precision_at_1000_max value: 16.6706 - type: nauc_precision_at_1000_std value: 26.621899999999997 - type: nauc_precision_at_1000_diff1 value: -6.5622 - type: nauc_mrr_at_1_max value: 37.0316 - type: nauc_mrr_at_1_std value: 16.0392 - type: nauc_mrr_at_1_diff1 value: 35.6661 - type: nauc_mrr_at_3_max value: 39.3089 - type: nauc_mrr_at_3_std value: 19.7933 - type: nauc_mrr_at_3_diff1 value: 30.968600000000002 - type: nauc_mrr_at_5_max value: 39.641 - type: nauc_mrr_at_5_std value: 20.052300000000002 - type: nauc_mrr_at_5_diff1 value: 31.3307 - type: nauc_mrr_at_10_max value: 40.1004 - type: nauc_mrr_at_10_std value: 20.5772 - type: nauc_mrr_at_10_diff1 value: 31.423000000000002 - type: nauc_mrr_at_20_max value: 40.14 - type: nauc_mrr_at_20_std value: 20.677400000000002 - type: nauc_mrr_at_20_diff1 value: 31.568800000000003 - type: nauc_mrr_at_100_max value: 40.0878 - type: nauc_mrr_at_100_std value: 20.6034 - type: nauc_mrr_at_100_diff1 value: 31.5872 - type: nauc_mrr_at_1000_max value: 40.078 - type: nauc_mrr_at_1000_std value: 20.589399999999998 - type: nauc_mrr_at_1000_diff1 value: 31.591599999999996 - type: main_score value: 48.262 - task: type: Retrieval dataset: name: MTEB MIRACLRetrieval (sw) type: miracl/mmteb-miracl config: sw split: dev revision: main metrics: - type: ndcg_at_1 value: 50.415 - type: ndcg_at_3 value: 53.04 - type: ndcg_at_5 value: 56.138999999999996 - type: ndcg_at_10 value: 59.111000000000004 - type: ndcg_at_20 value: 61.651 - type: ndcg_at_100 value: 64.312 - type: ndcg_at_1000 value: 65.089 - type: map_at_1 value: 33.267 - type: map_at_3 value: 46.152 - type: map_at_5 value: 49.293 - type: map_at_10 value: 51.06699999999999 - type: map_at_20 value: 52.051 - type: map_at_100 value: 52.632 - type: map_at_1000 value: 52.686 - type: recall_at_1 value: 33.267 - type: recall_at_3 value: 55.48 - type: recall_at_5 value: 64.302 - type: recall_at_10 value: 72.08200000000001 - type: recall_at_20 value: 79.943 - type: recall_at_100 value: 91.377 - type: recall_at_1000 value: 96.152 - type: precision_at_1 value: 50.415 - type: precision_at_3 value: 30.152 - type: precision_at_5 value: 21.576999999999998 - type: precision_at_10 value: 12.49 - type: precision_at_20 value: 7.199 - type: precision_at_100 value: 1.699 - type: precision_at_1000 value: 0.182 - type: mrr_at_1 value: 50.414899999999996 - type: mrr_at_3 value: 58.9903 - type: mrr_at_5 value: 60.7123 - type: mrr_at_10 value: 61.388799999999996 - type: mrr_at_20 value: 61.804700000000004 - type: mrr_at_100 value: 61.9677 - type: mrr_at_1000 value: 61.9774 - type: nauc_ndcg_at_1_max value: 38.0582 - type: nauc_ndcg_at_1_std value: 10.7971 - type: nauc_ndcg_at_1_diff1 value: 39.3361 - type: nauc_ndcg_at_3_max value: 36.1772 - type: nauc_ndcg_at_3_std value: 6.7326 - type: nauc_ndcg_at_3_diff1 value: 35.3446 - type: nauc_ndcg_at_5_max value: 34.8851 - type: nauc_ndcg_at_5_std value: 6.4693000000000005 - type: nauc_ndcg_at_5_diff1 value: 36.4089 - type: nauc_ndcg_at_10_max value: 38.800200000000004 - type: nauc_ndcg_at_10_std value: 5.9294 - type: nauc_ndcg_at_10_diff1 value: 36.1487 - type: nauc_ndcg_at_20_max value: 39.557700000000004 - type: nauc_ndcg_at_20_std value: 7.1913 - type: nauc_ndcg_at_20_diff1 value: 35.476200000000006 - type: nauc_ndcg_at_100_max value: 40.7973 - type: nauc_ndcg_at_100_std value: 12.0762 - type: nauc_ndcg_at_100_diff1 value: 35.9479 - type: nauc_ndcg_at_1000_max value: 41.133900000000004 - type: nauc_ndcg_at_1000_std value: 12.3712 - type: nauc_ndcg_at_1000_diff1 value: 35.6136 - type: nauc_map_at_1_max value: 16.2887 - type: nauc_map_at_1_std value: -5.9883 - type: nauc_map_at_1_diff1 value: 44.4133 - type: nauc_map_at_3_max value: 30.484499999999997 - type: nauc_map_at_3_std value: 2.8722000000000003 - type: nauc_map_at_3_diff1 value: 37.9749 - type: nauc_map_at_5_max value: 31.883499999999998 - type: nauc_map_at_5_std value: 3.7571 - type: nauc_map_at_5_diff1 value: 37.655300000000004 - type: nauc_map_at_10_max value: 34.440799999999996 - type: nauc_map_at_10_std value: 3.7608 - type: nauc_map_at_10_diff1 value: 37.2883 - type: nauc_map_at_20_max value: 34.9033 - type: nauc_map_at_20_std value: 4.3576 - type: nauc_map_at_20_diff1 value: 37.0318 - type: nauc_map_at_100_max value: 35.2377 - type: nauc_map_at_100_std value: 5.3088999999999995 - type: nauc_map_at_100_diff1 value: 37.1107 - type: nauc_map_at_1000_max value: 35.281099999999995 - type: nauc_map_at_1000_std value: 5.3637999999999995 - type: nauc_map_at_1000_diff1 value: 37.0696 - type: nauc_recall_at_1_max value: 16.2887 - type: nauc_recall_at_1_std value: -5.9883 - type: nauc_recall_at_1_diff1 value: 44.4133 - type: nauc_recall_at_3_max value: 28.2547 - type: nauc_recall_at_3_std value: 1.4864 - type: nauc_recall_at_3_diff1 value: 32.121100000000006 - type: nauc_recall_at_5_max value: 27.503899999999998 - type: nauc_recall_at_5_std value: 2.3485 - type: nauc_recall_at_5_diff1 value: 31.1749 - type: nauc_recall_at_10_max value: 37.1037 - type: nauc_recall_at_10_std value: -1.0915 - type: nauc_recall_at_10_diff1 value: 30.7288 - type: nauc_recall_at_20_max value: 38.685900000000004 - type: nauc_recall_at_20_std value: -0.39540000000000003 - type: nauc_recall_at_20_diff1 value: 26.9173 - type: nauc_recall_at_100_max value: 52.7177 - type: nauc_recall_at_100_std value: 45.8168 - type: nauc_recall_at_100_diff1 value: 29.572599999999998 - type: nauc_recall_at_1000_max value: 81.5773 - type: nauc_recall_at_1000_std value: 86.1207 - type: nauc_recall_at_1000_diff1 value: 26.2688 - type: nauc_precision_at_1_max value: 38.0582 - type: nauc_precision_at_1_std value: 10.7971 - type: nauc_precision_at_1_diff1 value: 39.3361 - type: nauc_precision_at_3_max value: 48.16 - type: nauc_precision_at_3_std value: 25.037100000000002 - type: nauc_precision_at_3_diff1 value: 9.8087 - type: nauc_precision_at_5_max value: 45.5463 - type: nauc_precision_at_5_std value: 25.275399999999998 - type: nauc_precision_at_5_diff1 value: 3.3124000000000002 - type: nauc_precision_at_10_max value: 45.3542 - type: nauc_precision_at_10_std value: 21.1762 - type: nauc_precision_at_10_diff1 value: -3.5867999999999998 - type: nauc_precision_at_20_max value: 40.4771 - type: nauc_precision_at_20_std value: 25.006800000000002 - type: nauc_precision_at_20_diff1 value: -10.331700000000001 - type: nauc_precision_at_100_max value: 32.6887 - type: nauc_precision_at_100_std value: 34.5781 - type: nauc_precision_at_100_diff1 value: -16.628999999999998 - type: nauc_precision_at_1000_max value: 29.033399999999997 - type: nauc_precision_at_1000_std value: 33.129 - type: nauc_precision_at_1000_diff1 value: -19.7542 - type: nauc_mrr_at_1_max value: 38.0582 - type: nauc_mrr_at_1_std value: 10.7971 - type: nauc_mrr_at_1_diff1 value: 39.3361 - type: nauc_mrr_at_3_max value: 42.2985 - type: nauc_mrr_at_3_std value: 13.949900000000001 - type: nauc_mrr_at_3_diff1 value: 36.0085 - type: nauc_mrr_at_5_max value: 42.3132 - type: nauc_mrr_at_5_std value: 14.8284 - type: nauc_mrr_at_5_diff1 value: 36.0635 - type: nauc_mrr_at_10_max value: 42.6836 - type: nauc_mrr_at_10_std value: 14.1374 - type: nauc_mrr_at_10_diff1 value: 36.2117 - type: nauc_mrr_at_20_max value: 42.6572 - type: nauc_mrr_at_20_std value: 14.2714 - type: nauc_mrr_at_20_diff1 value: 36.0993 - type: nauc_mrr_at_100_max value: 42.663000000000004 - type: nauc_mrr_at_100_std value: 14.5399 - type: nauc_mrr_at_100_diff1 value: 36.214600000000004 - type: nauc_mrr_at_1000_max value: 42.6543 - type: nauc_mrr_at_1000_std value: 14.5232 - type: nauc_mrr_at_1000_diff1 value: 36.219699999999996 - type: main_score value: 59.111000000000004 - task: type: Retrieval dataset: name: MTEB MIRACLRetrieval (te) type: miracl/mmteb-miracl config: te split: dev revision: main metrics: - type: ndcg_at_1 value: 64.372 - type: ndcg_at_3 value: 74.856 - type: ndcg_at_5 value: 77.128 - type: ndcg_at_10 value: 78.175 - type: ndcg_at_20 value: 78.826 - type: ndcg_at_100 value: 79.523 - type: ndcg_at_1000 value: 79.774 - type: map_at_1 value: 63.688 - type: map_at_3 value: 72.262 - type: map_at_5 value: 73.56700000000001 - type: map_at_10 value: 74.022 - type: map_at_20 value: 74.217 - type: map_at_100 value: 74.316 - type: map_at_1000 value: 74.32600000000001 - type: recall_at_1 value: 63.688 - type: recall_at_3 value: 81.804 - type: recall_at_5 value: 87.198 - type: recall_at_10 value: 90.358 - type: recall_at_20 value: 92.834 - type: recall_at_100 value: 96.55799999999999 - type: recall_at_1000 value: 98.47 - type: precision_at_1 value: 64.372 - type: precision_at_3 value: 27.858 - type: precision_at_5 value: 17.849999999999998 - type: precision_at_10 value: 9.263 - type: precision_at_20 value: 4.771 - type: precision_at_100 value: 0.993 - type: precision_at_1000 value: 0.101 - type: mrr_at_1 value: 64.372 - type: mrr_at_3 value: 72.7456 - type: mrr_at_5 value: 73.9654 - type: mrr_at_10 value: 74.3824 - type: mrr_at_20 value: 74.5572 - type: mrr_at_100 value: 74.6496 - type: mrr_at_1000 value: 74.65889999999999 - type: nauc_ndcg_at_1_max value: 39.271699999999996 - type: nauc_ndcg_at_1_std value: -24.310499999999998 - type: nauc_ndcg_at_1_diff1 value: 58.76440000000001 - type: nauc_ndcg_at_3_max value: 42.7376 - type: nauc_ndcg_at_3_std value: -25.2897 - type: nauc_ndcg_at_3_diff1 value: 55.2624 - type: nauc_ndcg_at_5_max value: 45.5625 - type: nauc_ndcg_at_5_std value: -22.595299999999998 - type: nauc_ndcg_at_5_diff1 value: 54.2902 - type: nauc_ndcg_at_10_max value: 46.581 - type: nauc_ndcg_at_10_std value: -20.4188 - type: nauc_ndcg_at_10_diff1 value: 53.76800000000001 - type: nauc_ndcg_at_20_max value: 45.912 - type: nauc_ndcg_at_20_std value: -20.7345 - type: nauc_ndcg_at_20_diff1 value: 53.597300000000004 - type: nauc_ndcg_at_100_max value: 45.4388 - type: nauc_ndcg_at_100_std value: -20.569499999999998 - type: nauc_ndcg_at_100_diff1 value: 54.1768 - type: nauc_ndcg_at_1000_max value: 44.8662 - type: nauc_ndcg_at_1000_std value: -20.9083 - type: nauc_ndcg_at_1000_diff1 value: 54.316900000000004 - type: nauc_map_at_1_max value: 38.1714 - type: nauc_map_at_1_std value: -25.8547 - type: nauc_map_at_1_diff1 value: 58.801700000000004 - type: nauc_map_at_3_max value: 41.6072 - type: nauc_map_at_3_std value: -25.716299999999997 - type: nauc_map_at_3_diff1 value: 55.9906 - type: nauc_map_at_5_max value: 43.061899999999994 - type: nauc_map_at_5_std value: -24.2147 - type: nauc_map_at_5_diff1 value: 55.4852 - type: nauc_map_at_10_max value: 43.452 - type: nauc_map_at_10_std value: -23.4256 - type: nauc_map_at_10_diff1 value: 55.3427 - type: nauc_map_at_20_max value: 43.305 - type: nauc_map_at_20_std value: -23.424500000000002 - type: nauc_map_at_20_diff1 value: 55.31120000000001 - type: nauc_map_at_100_max value: 43.2512 - type: nauc_map_at_100_std value: -23.3786 - type: nauc_map_at_100_diff1 value: 55.3755 - type: nauc_map_at_1000_max value: 43.2306 - type: nauc_map_at_1000_std value: -23.380699999999997 - type: nauc_map_at_1000_diff1 value: 55.378899999999994 - type: nauc_recall_at_1_max value: 38.1714 - type: nauc_recall_at_1_std value: -25.8547 - type: nauc_recall_at_1_diff1 value: 58.801700000000004 - type: nauc_recall_at_3_max value: 46.7953 - type: nauc_recall_at_3_std value: -25.092100000000002 - type: nauc_recall_at_3_diff1 value: 52.0717 - type: nauc_recall_at_5_max value: 58.675399999999996 - type: nauc_recall_at_5_std value: -15.456100000000001 - type: nauc_recall_at_5_diff1 value: 47.4131 - type: nauc_recall_at_10_max value: 67.7093 - type: nauc_recall_at_10_std value: -0.5740000000000001 - type: nauc_recall_at_10_diff1 value: 42.2693 - type: nauc_recall_at_20_max value: 68.11160000000001 - type: nauc_recall_at_20_std value: 1.8836 - type: nauc_recall_at_20_diff1 value: 36.960300000000004 - type: nauc_recall_at_100_max value: 78.39620000000001 - type: nauc_recall_at_100_std value: 27.515299999999996 - type: nauc_recall_at_100_diff1 value: 35.8977 - type: nauc_recall_at_1000_max value: 71.4983 - type: nauc_recall_at_1000_std value: 50.89939999999999 - type: nauc_recall_at_1000_diff1 value: 28.7768 - type: nauc_precision_at_1_max value: 39.271699999999996 - type: nauc_precision_at_1_std value: -24.310499999999998 - type: nauc_precision_at_1_diff1 value: 58.76440000000001 - type: nauc_precision_at_3_max value: 46.5473 - type: nauc_precision_at_3_std value: -16.3903 - type: nauc_precision_at_3_diff1 value: 43.1862 - type: nauc_precision_at_5_max value: 53.557500000000005 - type: nauc_precision_at_5_std value: -1.2877 - type: nauc_precision_at_5_diff1 value: 31.9181 - type: nauc_precision_at_10_max value: 55.428599999999996 - type: nauc_precision_at_10_std value: 12.8033 - type: nauc_precision_at_10_diff1 value: 22.756 - type: nauc_precision_at_20_max value: 49.0193 - type: nauc_precision_at_20_std value: 19.6821 - type: nauc_precision_at_20_diff1 value: 12.0609 - type: nauc_precision_at_100_max value: 40.4145 - type: nauc_precision_at_100_std value: 38.3506 - type: nauc_precision_at_100_diff1 value: -1.6396000000000002 - type: nauc_precision_at_1000_max value: 19.25 - type: nauc_precision_at_1000_std value: 41.2279 - type: nauc_precision_at_1000_diff1 value: -17.3722 - type: nauc_mrr_at_1_max value: 39.271699999999996 - type: nauc_mrr_at_1_std value: -24.310499999999998 - type: nauc_mrr_at_1_diff1 value: 58.76440000000001 - type: nauc_mrr_at_3_max value: 41.6685 - type: nauc_mrr_at_3_std value: -24.4404 - type: nauc_mrr_at_3_diff1 value: 56.1212 - type: nauc_mrr_at_5_max value: 42.9495 - type: nauc_mrr_at_5_std value: -23.378899999999998 - type: nauc_mrr_at_5_diff1 value: 55.7671 - type: nauc_mrr_at_10_max value: 43.371900000000004 - type: nauc_mrr_at_10_std value: -22.5248 - type: nauc_mrr_at_10_diff1 value: 55.5427 - type: nauc_mrr_at_20_max value: 43.1738 - type: nauc_mrr_at_20_std value: -22.6888 - type: nauc_mrr_at_20_diff1 value: 55.5207 - type: nauc_mrr_at_100_max value: 43.1156 - type: nauc_mrr_at_100_std value: -22.6434 - type: nauc_mrr_at_100_diff1 value: 55.5733 - type: nauc_mrr_at_1000_max value: 43.0971 - type: nauc_mrr_at_1000_std value: -22.6431 - type: nauc_mrr_at_1000_diff1 value: 55.5782 - type: main_score value: 78.175 - task: type: Retrieval dataset: name: MTEB MIRACLRetrieval (th) type: miracl/mmteb-miracl config: th split: dev revision: main metrics: - type: ndcg_at_1 value: 65.484 - type: ndcg_at_3 value: 66.199 - type: ndcg_at_5 value: 68.451 - type: ndcg_at_10 value: 71.774 - type: ndcg_at_20 value: 73.709 - type: ndcg_at_100 value: 75.362 - type: ndcg_at_1000 value: 75.898 - type: map_at_1 value: 45.911 - type: map_at_3 value: 59.53000000000001 - type: map_at_5 value: 62.150000000000006 - type: map_at_10 value: 64.336 - type: map_at_20 value: 65.262 - type: map_at_100 value: 65.659 - type: map_at_1000 value: 65.694 - type: recall_at_1 value: 45.911 - type: recall_at_3 value: 67.437 - type: recall_at_5 value: 73.786 - type: recall_at_10 value: 82.619 - type: recall_at_20 value: 88.447 - type: recall_at_100 value: 95.515 - type: recall_at_1000 value: 98.854 - type: precision_at_1 value: 65.484 - type: precision_at_3 value: 35.471000000000004 - type: precision_at_5 value: 24.229 - type: precision_at_10 value: 14.188 - type: precision_at_20 value: 7.843999999999999 - type: precision_at_100 value: 1.733 - type: precision_at_1000 value: 0.181 - type: mrr_at_1 value: 65.48429999999999 - type: mrr_at_3 value: 73.2378 - type: mrr_at_5 value: 74.1314 - type: mrr_at_10 value: 74.8844 - type: mrr_at_20 value: 75.07639999999999 - type: mrr_at_100 value: 75.1632 - type: mrr_at_1000 value: 75.1698 - type: nauc_ndcg_at_1_max value: 42.345 - type: nauc_ndcg_at_1_std value: 12.6892 - type: nauc_ndcg_at_1_diff1 value: 42.4669 - type: nauc_ndcg_at_3_max value: 38.8148 - type: nauc_ndcg_at_3_std value: 3.5637000000000003 - type: nauc_ndcg_at_3_diff1 value: 34.8248 - type: nauc_ndcg_at_5_max value: 38.7175 - type: nauc_ndcg_at_5_std value: 1.6251000000000002 - type: nauc_ndcg_at_5_diff1 value: 34.1513 - type: nauc_ndcg_at_10_max value: 40.038000000000004 - type: nauc_ndcg_at_10_std value: 2.8985 - type: nauc_ndcg_at_10_diff1 value: 33.4189 - type: nauc_ndcg_at_20_max value: 41.722 - type: nauc_ndcg_at_20_std value: 6.819100000000001 - type: nauc_ndcg_at_20_diff1 value: 33.5606 - type: nauc_ndcg_at_100_max value: 42.2102 - type: nauc_ndcg_at_100_std value: 8.309099999999999 - type: nauc_ndcg_at_100_diff1 value: 34.036899999999996 - type: nauc_ndcg_at_1000_max value: 41.9273 - type: nauc_ndcg_at_1000_std value: 8.3582 - type: nauc_ndcg_at_1000_diff1 value: 34.4614 - type: nauc_map_at_1_max value: 20.202 - type: nauc_map_at_1_std value: -8.095099999999999 - type: nauc_map_at_1_diff1 value: 42.2902 - type: nauc_map_at_3_max value: 33.0956 - type: nauc_map_at_3_std value: -3.7472 - type: nauc_map_at_3_diff1 value: 36.3181 - type: nauc_map_at_5_max value: 34.3309 - type: nauc_map_at_5_std value: -3.0949999999999998 - type: nauc_map_at_5_diff1 value: 35.441 - type: nauc_map_at_10_max value: 35.924 - type: nauc_map_at_10_std value: -1.3787 - type: nauc_map_at_10_diff1 value: 35.0315 - type: nauc_map_at_20_max value: 36.7677 - type: nauc_map_at_20_std value: 0.4997 - type: nauc_map_at_20_diff1 value: 35.037600000000005 - type: nauc_map_at_100_max value: 36.8927 - type: nauc_map_at_100_std value: 0.8881999999999999 - type: nauc_map_at_100_diff1 value: 35.0792 - type: nauc_map_at_1000_max value: 36.897999999999996 - type: nauc_map_at_1000_std value: 0.9301 - type: nauc_map_at_1000_diff1 value: 35.0961 - type: nauc_recall_at_1_max value: 20.202 - type: nauc_recall_at_1_std value: -8.095099999999999 - type: nauc_recall_at_1_diff1 value: 42.2902 - type: nauc_recall_at_3_max value: 33.1749 - type: nauc_recall_at_3_std value: -4.6383 - type: nauc_recall_at_3_diff1 value: 30.5276 - type: nauc_recall_at_5_max value: 35.2372 - type: nauc_recall_at_5_std value: -6.0825 - type: nauc_recall_at_5_diff1 value: 27.128200000000003 - type: nauc_recall_at_10_max value: 37.465199999999996 - type: nauc_recall_at_10_std value: -4.937600000000001 - type: nauc_recall_at_10_diff1 value: 21.6784 - type: nauc_recall_at_20_max value: 45.9944 - type: nauc_recall_at_20_std value: 10.5054 - type: nauc_recall_at_20_diff1 value: 19.4427 - type: nauc_recall_at_100_max value: 60.7611 - type: nauc_recall_at_100_std value: 35.4282 - type: nauc_recall_at_100_diff1 value: 14.2406 - type: nauc_recall_at_1000_max value: 83.2149 - type: nauc_recall_at_1000_std value: 87.3129 - type: nauc_recall_at_1000_diff1 value: 15.7695 - type: nauc_precision_at_1_max value: 42.345 - type: nauc_precision_at_1_std value: 12.6892 - type: nauc_precision_at_1_diff1 value: 42.4669 - type: nauc_precision_at_3_max value: 38.0839 - type: nauc_precision_at_3_std value: 22.0767 - type: nauc_precision_at_3_diff1 value: 1.4477 - type: nauc_precision_at_5_max value: 31.290499999999998 - type: nauc_precision_at_5_std value: 23.3095 - type: nauc_precision_at_5_diff1 value: -5.9094 - type: nauc_precision_at_10_max value: 25.186199999999996 - type: nauc_precision_at_10_std value: 27.7866 - type: nauc_precision_at_10_diff1 value: -12.773200000000001 - type: nauc_precision_at_20_max value: 21.0353 - type: nauc_precision_at_20_std value: 33.7266 - type: nauc_precision_at_20_diff1 value: -15.188699999999999 - type: nauc_precision_at_100_max value: 16.1451 - type: nauc_precision_at_100_std value: 35.4163 - type: nauc_precision_at_100_diff1 value: -17.631800000000002 - type: nauc_precision_at_1000_max value: 12.2855 - type: nauc_precision_at_1000_std value: 34.2766 - type: nauc_precision_at_1000_diff1 value: -17.664099999999998 - type: nauc_mrr_at_1_max value: 42.345 - type: nauc_mrr_at_1_std value: 12.6892 - type: nauc_mrr_at_1_diff1 value: 42.4669 - type: nauc_mrr_at_3_max value: 47.5742 - type: nauc_mrr_at_3_std value: 17.136499999999998 - type: nauc_mrr_at_3_diff1 value: 37.68 - type: nauc_mrr_at_5_max value: 47.510799999999996 - type: nauc_mrr_at_5_std value: 17.1225 - type: nauc_mrr_at_5_diff1 value: 37.485099999999996 - type: nauc_mrr_at_10_max value: 47.2849 - type: nauc_mrr_at_10_std value: 17.2096 - type: nauc_mrr_at_10_diff1 value: 37.2312 - type: nauc_mrr_at_20_max value: 47.3962 - type: nauc_mrr_at_20_std value: 17.2426 - type: nauc_mrr_at_20_diff1 value: 37.500499999999995 - type: nauc_mrr_at_100_max value: 47.344 - type: nauc_mrr_at_100_std value: 17.144499999999997 - type: nauc_mrr_at_100_diff1 value: 37.5291 - type: nauc_mrr_at_1000_max value: 47.3332 - type: nauc_mrr_at_1000_std value: 17.1381 - type: nauc_mrr_at_1000_diff1 value: 37.532199999999996 - type: main_score value: 71.774 - task: type: Retrieval dataset: name: MTEB MIRACLRetrieval (yo) type: miracl/mmteb-miracl config: yo split: dev revision: main metrics: - type: ndcg_at_1 value: 46.217999999999996 - type: ndcg_at_3 value: 57.609 - type: ndcg_at_5 value: 62.021 - type: ndcg_at_10 value: 64.685 - type: ndcg_at_20 value: 65.548 - type: ndcg_at_100 value: 66.94099999999999 - type: ndcg_at_1000 value: 67.361 - type: map_at_1 value: 42.787 - type: map_at_3 value: 53.852 - type: map_at_5 value: 56.541 - type: map_at_10 value: 57.924 - type: map_at_20 value: 58.223 - type: map_at_100 value: 58.41499999999999 - type: map_at_1000 value: 58.43000000000001 - type: recall_at_1 value: 42.787 - type: recall_at_3 value: 65.40599999999999 - type: recall_at_5 value: 75.42 - type: recall_at_10 value: 82.913 - type: recall_at_20 value: 85.994 - type: recall_at_100 value: 93.277 - type: recall_at_1000 value: 96.499 - type: precision_at_1 value: 46.217999999999996 - type: precision_at_3 value: 24.37 - type: precision_at_5 value: 17.479 - type: precision_at_10 value: 9.748 - type: precision_at_20 value: 5.0840000000000005 - type: precision_at_100 value: 1.109 - type: precision_at_1000 value: 0.116 - type: mrr_at_1 value: 46.2185 - type: mrr_at_3 value: 56.582600000000006 - type: mrr_at_5 value: 58.977599999999995 - type: mrr_at_10 value: 59.890299999999996 - type: mrr_at_20 value: 60.077999999999996 - type: mrr_at_100 value: 60.2472 - type: mrr_at_1000 value: 60.2553 - type: nauc_ndcg_at_1_max value: 15.3057 - type: nauc_ndcg_at_1_std value: -20.3881 - type: nauc_ndcg_at_1_diff1 value: 51.7456 - type: nauc_ndcg_at_3_max value: 17.750799999999998 - type: nauc_ndcg_at_3_std value: -9.165 - type: nauc_ndcg_at_3_diff1 value: 53.4833 - type: nauc_ndcg_at_5_max value: 18.6146 - type: nauc_ndcg_at_5_std value: -3.832 - type: nauc_ndcg_at_5_diff1 value: 52.8833 - type: nauc_ndcg_at_10_max value: 20.4881 - type: nauc_ndcg_at_10_std value: -3.7813 - type: nauc_ndcg_at_10_diff1 value: 53.873099999999994 - type: nauc_ndcg_at_20_max value: 22.234499999999997 - type: nauc_ndcg_at_20_std value: -4.5588999999999995 - type: nauc_ndcg_at_20_diff1 value: 53.75149999999999 - type: nauc_ndcg_at_100_max value: 22.5348 - type: nauc_ndcg_at_100_std value: -5.6818 - type: nauc_ndcg_at_100_diff1 value: 54.996199999999995 - type: nauc_ndcg_at_1000_max value: 21.8399 - type: nauc_ndcg_at_1000_std value: -6.904000000000001 - type: nauc_ndcg_at_1000_diff1 value: 54.5607 - type: nauc_map_at_1_max value: 11.5768 - type: nauc_map_at_1_std value: -16.317400000000003 - type: nauc_map_at_1_diff1 value: 56.0748 - type: nauc_map_at_3_max value: 14.5127 - type: nauc_map_at_3_std value: -9.9466 - type: nauc_map_at_3_diff1 value: 54.4564 - type: nauc_map_at_5_max value: 15.6777 - type: nauc_map_at_5_std value: -7.3351 - type: nauc_map_at_5_diff1 value: 53.8739 - type: nauc_map_at_10_max value: 17.380200000000002 - type: nauc_map_at_10_std value: -7.8866000000000005 - type: nauc_map_at_10_diff1 value: 54.17380000000001 - type: nauc_map_at_20_max value: 17.7812 - type: nauc_map_at_20_std value: -8.1005 - type: nauc_map_at_20_diff1 value: 54.16029999999999 - type: nauc_map_at_100_max value: 17.8472 - type: nauc_map_at_100_std value: -8.197899999999999 - type: nauc_map_at_100_diff1 value: 54.3604 - type: nauc_map_at_1000_max value: 17.838 - type: nauc_map_at_1000_std value: -8.241800000000001 - type: nauc_map_at_1000_diff1 value: 54.3379 - type: nauc_recall_at_1_max value: 11.5768 - type: nauc_recall_at_1_std value: -16.317400000000003 - type: nauc_recall_at_1_diff1 value: 56.0748 - type: nauc_recall_at_3_max value: 19.2218 - type: nauc_recall_at_3_std value: -0.9331 - type: nauc_recall_at_3_diff1 value: 52.159299999999995 - type: nauc_recall_at_5_max value: 23.1526 - type: nauc_recall_at_5_std value: 18.569399999999998 - type: nauc_recall_at_5_diff1 value: 49.3007 - type: nauc_recall_at_10_max value: 30.9861 - type: nauc_recall_at_10_std value: 29.1945 - type: nauc_recall_at_10_diff1 value: 53.94520000000001 - type: nauc_recall_at_20_max value: 45.5532 - type: nauc_recall_at_20_std value: 30.500500000000002 - type: nauc_recall_at_20_diff1 value: 53.197799999999994 - type: nauc_recall_at_100_max value: 69.0118 - type: nauc_recall_at_100_std value: 42.4681 - type: nauc_recall_at_100_diff1 value: 73.61229999999999 - type: nauc_recall_at_1000_max value: 73.9661 - type: nauc_recall_at_1000_std value: 27.5085 - type: nauc_recall_at_1000_diff1 value: 75.1985 - type: nauc_precision_at_1_max value: 15.3057 - type: nauc_precision_at_1_std value: -20.3881 - type: nauc_precision_at_1_diff1 value: 51.7456 - type: nauc_precision_at_3_max value: 24.9404 - type: nauc_precision_at_3_std value: -5.6223 - type: nauc_precision_at_3_diff1 value: 33.2281 - type: nauc_precision_at_5_max value: 23.1681 - type: nauc_precision_at_5_std value: 3.7264 - type: nauc_precision_at_5_diff1 value: 13.463700000000001 - type: nauc_precision_at_10_max value: 27.1828 - type: nauc_precision_at_10_std value: 0.2287 - type: nauc_precision_at_10_diff1 value: 3.3236000000000003 - type: nauc_precision_at_20_max value: 30.8431 - type: nauc_precision_at_20_std value: -1.7745 - type: nauc_precision_at_20_diff1 value: -1.4821 - type: nauc_precision_at_100_max value: 31.920399999999997 - type: nauc_precision_at_100_std value: -9.9216 - type: nauc_precision_at_100_diff1 value: -12.0477 - type: nauc_precision_at_1000_max value: 21.9173 - type: nauc_precision_at_1000_std value: -20.7394 - type: nauc_precision_at_1000_diff1 value: -23.9441 - type: nauc_mrr_at_1_max value: 15.3057 - type: nauc_mrr_at_1_std value: -20.3881 - type: nauc_mrr_at_1_diff1 value: 51.7456 - type: nauc_mrr_at_3_max value: 20.1871 - type: nauc_mrr_at_3_std value: -15.1173 - type: nauc_mrr_at_3_diff1 value: 52.30089999999999 - type: nauc_mrr_at_5_max value: 20.514599999999998 - type: nauc_mrr_at_5_std value: -12.8977 - type: nauc_mrr_at_5_diff1 value: 52.350300000000004 - type: nauc_mrr_at_10_max value: 20.4557 - type: nauc_mrr_at_10_std value: -12.6083 - type: nauc_mrr_at_10_diff1 value: 52.766000000000005 - type: nauc_mrr_at_20_max value: 20.7793 - type: nauc_mrr_at_20_std value: -12.8431 - type: nauc_mrr_at_20_diff1 value: 52.6664 - type: nauc_mrr_at_100_max value: 20.8067 - type: nauc_mrr_at_100_std value: -12.9037 - type: nauc_mrr_at_100_diff1 value: 52.86729999999999 - type: nauc_mrr_at_1000_max value: 20.793 - type: nauc_mrr_at_1000_std value: -12.924900000000001 - type: nauc_mrr_at_1000_diff1 value: 52.8605 - type: main_score value: 64.685 - task: type: Retrieval dataset: name: MTEB MIRACLRetrieval (zh) type: miracl/mmteb-miracl config: zh split: dev revision: main metrics: - type: ndcg_at_1 value: 41.985 - type: ndcg_at_3 value: 42.094 - type: ndcg_at_5 value: 44.273 - type: ndcg_at_10 value: 48.370000000000005 - type: ndcg_at_20 value: 51.595 - type: ndcg_at_100 value: 55.961000000000006 - type: ndcg_at_1000 value: 57.620000000000005 - type: map_at_1 value: 21.446 - type: map_at_3 value: 32.499 - type: map_at_5 value: 35.772 - type: map_at_10 value: 38.567 - type: map_at_20 value: 39.98 - type: map_at_100 value: 40.992 - type: map_at_1000 value: 41.119 - type: recall_at_1 value: 21.446 - type: recall_at_3 value: 40.377 - type: recall_at_5 value: 49.03 - type: recall_at_10 value: 59.695 - type: recall_at_20 value: 69.25200000000001 - type: recall_at_100 value: 87.388 - type: recall_at_1000 value: 96.833 - type: precision_at_1 value: 41.985 - type: precision_at_3 value: 29.008 - type: precision_at_5 value: 21.985 - type: precision_at_10 value: 14.097000000000001 - type: precision_at_20 value: 8.346 - type: precision_at_100 value: 2.155 - type: precision_at_1000 value: 0.243 - type: mrr_at_1 value: 41.984700000000004 - type: mrr_at_3 value: 52.078 - type: mrr_at_5 value: 53.5284 - type: mrr_at_10 value: 54.4979 - type: mrr_at_20 value: 54.9953 - type: mrr_at_100 value: 55.2428 - type: mrr_at_1000 value: 55.263 - type: nauc_ndcg_at_1_max value: 41.7348 - type: nauc_ndcg_at_1_std value: 23.8594 - type: nauc_ndcg_at_1_diff1 value: 31.156299999999998 - type: nauc_ndcg_at_3_max value: 39.0525 - type: nauc_ndcg_at_3_std value: 21.7916 - type: nauc_ndcg_at_3_diff1 value: 23.9925 - type: nauc_ndcg_at_5_max value: 33.8643 - type: nauc_ndcg_at_5_std value: 16.3399 - type: nauc_ndcg_at_5_diff1 value: 26.001 - type: nauc_ndcg_at_10_max value: 35.3007 - type: nauc_ndcg_at_10_std value: 19.127 - type: nauc_ndcg_at_10_diff1 value: 25.444899999999997 - type: nauc_ndcg_at_20_max value: 37.6068 - type: nauc_ndcg_at_20_std value: 23.0043 - type: nauc_ndcg_at_20_diff1 value: 23.7603 - type: nauc_ndcg_at_100_max value: 40.4028 - type: nauc_ndcg_at_100_std value: 25.0083 - type: nauc_ndcg_at_100_diff1 value: 23.491999999999997 - type: nauc_ndcg_at_1000_max value: 39.8716 - type: nauc_ndcg_at_1000_std value: 24.7264 - type: nauc_ndcg_at_1000_diff1 value: 24.6697 - type: nauc_map_at_1_max value: 25.7275 - type: nauc_map_at_1_std value: 7.7392 - type: nauc_map_at_1_diff1 value: 36.5897 - type: nauc_map_at_3_max value: 32.2774 - type: nauc_map_at_3_std value: 12.2275 - type: nauc_map_at_3_diff1 value: 28.8092 - type: nauc_map_at_5_max value: 31.183899999999998 - type: nauc_map_at_5_std value: 12.1811 - type: nauc_map_at_5_diff1 value: 28.532400000000003 - type: nauc_map_at_10_max value: 33.4812 - type: nauc_map_at_10_std value: 15.6339 - type: nauc_map_at_10_diff1 value: 27.695999999999998 - type: nauc_map_at_20_max value: 34.855999999999995 - type: nauc_map_at_20_std value: 17.8001 - type: nauc_map_at_20_diff1 value: 26.3975 - type: nauc_map_at_100_max value: 35.8497 - type: nauc_map_at_100_std value: 18.688 - type: nauc_map_at_100_diff1 value: 26.177899999999998 - type: nauc_map_at_1000_max value: 35.8459 - type: nauc_map_at_1000_std value: 18.7007 - type: nauc_map_at_1000_diff1 value: 26.257200000000005 - type: nauc_recall_at_1_max value: 25.7275 - type: nauc_recall_at_1_std value: 7.7392 - type: nauc_recall_at_1_diff1 value: 36.5897 - type: nauc_recall_at_3_max value: 27.052100000000003 - type: nauc_recall_at_3_std value: 9.632100000000001 - type: nauc_recall_at_3_diff1 value: 21.557399999999998 - type: nauc_recall_at_5_max value: 21.0442 - type: nauc_recall_at_5_std value: 5.7371 - type: nauc_recall_at_5_diff1 value: 20.653399999999998 - type: nauc_recall_at_10_max value: 23.794 - type: nauc_recall_at_10_std value: 12.2208 - type: nauc_recall_at_10_diff1 value: 17.305899999999998 - type: nauc_recall_at_20_max value: 27.5932 - type: nauc_recall_at_20_std value: 21.4346 - type: nauc_recall_at_20_diff1 value: 12.7064 - type: nauc_recall_at_100_max value: 41.801300000000005 - type: nauc_recall_at_100_std value: 36.4593 - type: nauc_recall_at_100_diff1 value: 5.7783 - type: nauc_recall_at_1000_max value: 45.8507 - type: nauc_recall_at_1000_std value: 66.6031 - type: nauc_recall_at_1000_diff1 value: 25.4961 - type: nauc_precision_at_1_max value: 41.7348 - type: nauc_precision_at_1_std value: 23.8594 - type: nauc_precision_at_1_diff1 value: 31.156299999999998 - type: nauc_precision_at_3_max value: 43.336999999999996 - type: nauc_precision_at_3_std value: 29.3989 - type: nauc_precision_at_3_diff1 value: 6.0378 - type: nauc_precision_at_5_max value: 33.3518 - type: nauc_precision_at_5_std value: 25.115199999999998 - type: nauc_precision_at_5_diff1 value: 3.9284 - type: nauc_precision_at_10_max value: 33.466699999999996 - type: nauc_precision_at_10_std value: 31.710300000000004 - type: nauc_precision_at_10_diff1 value: -2.0225 - type: nauc_precision_at_20_max value: 33.651199999999996 - type: nauc_precision_at_20_std value: 37.601600000000005 - type: nauc_precision_at_20_diff1 value: -9.591 - type: nauc_precision_at_100_max value: 28.992 - type: nauc_precision_at_100_std value: 33.631499999999996 - type: nauc_precision_at_100_diff1 value: -13.5546 - type: nauc_precision_at_1000_max value: 20.091 - type: nauc_precision_at_1000_std value: 26.9179 - type: nauc_precision_at_1000_diff1 value: -12.1766 - type: nauc_mrr_at_1_max value: 41.7348 - type: nauc_mrr_at_1_std value: 23.8594 - type: nauc_mrr_at_1_diff1 value: 31.156299999999998 - type: nauc_mrr_at_3_max value: 43.2795 - type: nauc_mrr_at_3_std value: 26.991500000000002 - type: nauc_mrr_at_3_diff1 value: 25.8376 - type: nauc_mrr_at_5_max value: 42.1564 - type: nauc_mrr_at_5_std value: 25.923299999999998 - type: nauc_mrr_at_5_diff1 value: 26.770500000000002 - type: nauc_mrr_at_10_max value: 42.054 - type: nauc_mrr_at_10_std value: 26.1554 - type: nauc_mrr_at_10_diff1 value: 26.4021 - type: nauc_mrr_at_20_max value: 42.3932 - type: nauc_mrr_at_20_std value: 26.5486 - type: nauc_mrr_at_20_diff1 value: 26.616400000000002 - type: nauc_mrr_at_100_max value: 42.4887 - type: nauc_mrr_at_100_std value: 26.4708 - type: nauc_mrr_at_100_diff1 value: 26.671899999999997 - type: nauc_mrr_at_1000_max value: 42.478500000000004 - type: nauc_mrr_at_1000_std value: 26.4606 - type: nauc_mrr_at_1000_diff1 value: 26.6946 - type: main_score value: 48.370000000000005 - task: type: Retrieval dataset: name: MTEB MSMARCO (default) type: mteb/msmarco config: default split: dev revision: c5a29a104738b98a9e76336939199e264163d4a0 metrics: - type: ndcg_at_1 value: 13.653 - type: ndcg_at_3 value: 21.836 - type: ndcg_at_5 value: 25.014999999999997 - type: ndcg_at_10 value: 28.319 - type: ndcg_at_20 value: 30.818 - type: ndcg_at_100 value: 34.527 - type: ndcg_at_1000 value: 36.702 - type: map_at_1 value: 13.313 - type: map_at_3 value: 19.615 - type: map_at_5 value: 21.389 - type: map_at_10 value: 22.768 - type: map_at_20 value: 23.465 - type: map_at_100 value: 23.976 - type: map_at_1000 value: 24.058 - type: recall_at_1 value: 13.313 - type: recall_at_3 value: 27.839999999999996 - type: recall_at_5 value: 35.481 - type: recall_at_10 value: 45.559 - type: recall_at_20 value: 55.301 - type: recall_at_100 value: 75.11 - type: recall_at_1000 value: 92.052 - type: precision_at_1 value: 13.653 - type: precision_at_3 value: 9.565 - type: precision_at_5 value: 7.338 - type: precision_at_10 value: 4.726 - type: precision_at_20 value: 2.8819999999999997 - type: precision_at_100 value: 0.79 - type: precision_at_1000 value: 0.098 - type: mrr_at_1 value: 13.653299999999998 - type: mrr_at_3 value: 20.0573 - type: mrr_at_5 value: 21.8295 - type: mrr_at_10 value: 23.1997 - type: mrr_at_20 value: 23.8785 - type: mrr_at_100 value: 24.3729 - type: mrr_at_1000 value: 24.448600000000003 - type: nauc_ndcg_at_1_max value: 0.364 - type: nauc_ndcg_at_1_std value: -12.840399999999999 - type: nauc_ndcg_at_1_diff1 value: 29.834699999999998 - type: nauc_ndcg_at_3_max value: 1.9428 - type: nauc_ndcg_at_3_std value: -13.696 - type: nauc_ndcg_at_3_diff1 value: 24.9774 - type: nauc_ndcg_at_5_max value: 2.5951 - type: nauc_ndcg_at_5_std value: -13.2667 - type: nauc_ndcg_at_5_diff1 value: 24.7581 - type: nauc_ndcg_at_10_max value: 3.0274 - type: nauc_ndcg_at_10_std value: -11.790799999999999 - type: nauc_ndcg_at_10_diff1 value: 23.9473 - type: nauc_ndcg_at_20_max value: 3.5682 - type: nauc_ndcg_at_20_std value: -10.132299999999999 - type: nauc_ndcg_at_20_diff1 value: 23.744100000000003 - type: nauc_ndcg_at_100_max value: 5.1290000000000004 - type: nauc_ndcg_at_100_std value: -6.8011 - type: nauc_ndcg_at_100_diff1 value: 23.6972 - type: nauc_ndcg_at_1000_max value: 5.1967 - type: nauc_ndcg_at_1000_std value: -7.396700000000001 - type: nauc_ndcg_at_1000_diff1 value: 24.1353 - type: nauc_map_at_1_max value: 0.35200000000000004 - type: nauc_map_at_1_std value: -12.8008 - type: nauc_map_at_1_diff1 value: 30.121199999999998 - type: nauc_map_at_3_max value: 1.6415 - type: nauc_map_at_3_std value: -13.5187 - type: nauc_map_at_3_diff1 value: 25.9894 - type: nauc_map_at_5_max value: 2.0264 - type: nauc_map_at_5_std value: -13.281 - type: nauc_map_at_5_diff1 value: 25.849 - type: nauc_map_at_10_max value: 2.1982 - type: nauc_map_at_10_std value: -12.6435 - type: nauc_map_at_10_diff1 value: 25.477100000000004 - type: nauc_map_at_20_max value: 2.3562 - type: nauc_map_at_20_std value: -12.1675 - type: nauc_map_at_20_diff1 value: 25.4162 - type: nauc_map_at_100_max value: 2.5839999999999996 - type: nauc_map_at_100_std value: -11.7018 - type: nauc_map_at_100_diff1 value: 25.4093 - type: nauc_map_at_1000_max value: 2.5871999999999997 - type: nauc_map_at_1000_std value: -11.7103 - type: nauc_map_at_1000_diff1 value: 25.424999999999997 - type: nauc_recall_at_1_max value: 0.35200000000000004 - type: nauc_recall_at_1_std value: -12.8008 - type: nauc_recall_at_1_diff1 value: 30.121199999999998 - type: nauc_recall_at_3_max value: 2.6834000000000002 - type: nauc_recall_at_3_std value: -14.0991 - type: nauc_recall_at_3_diff1 value: 22.6158 - type: nauc_recall_at_5_max value: 3.9472 - type: nauc_recall_at_5_std value: -13.167499999999999 - type: nauc_recall_at_5_diff1 value: 22.2686 - type: nauc_recall_at_10_max value: 4.9908 - type: nauc_recall_at_10_std value: -9.4435 - type: nauc_recall_at_10_diff1 value: 20.185200000000002 - type: nauc_recall_at_20_max value: 6.880999999999999 - type: nauc_recall_at_20_std value: -3.7041999999999997 - type: nauc_recall_at_20_diff1 value: 19.2889 - type: nauc_recall_at_100_max value: 18.0012 - type: nauc_recall_at_100_std value: 20.404600000000002 - type: nauc_recall_at_100_diff1 value: 17.1382 - type: nauc_recall_at_1000_max value: 41.3456 - type: nauc_recall_at_1000_std value: 50.3786 - type: nauc_recall_at_1000_diff1 value: 17.2713 - type: nauc_precision_at_1_max value: 0.364 - type: nauc_precision_at_1_std value: -12.840399999999999 - type: nauc_precision_at_1_diff1 value: 29.834699999999998 - type: nauc_precision_at_3_max value: 2.7525 - type: nauc_precision_at_3_std value: -13.992099999999999 - type: nauc_precision_at_3_diff1 value: 22.4985 - type: nauc_precision_at_5_max value: 4.0076 - type: nauc_precision_at_5_std value: -13.011800000000001 - type: nauc_precision_at_5_diff1 value: 21.9577 - type: nauc_precision_at_10_max value: 5.3558 - type: nauc_precision_at_10_std value: -8.8703 - type: nauc_precision_at_10_diff1 value: 19.5594 - type: nauc_precision_at_20_max value: 7.764500000000001 - type: nauc_precision_at_20_std value: -2.5067 - type: nauc_precision_at_20_diff1 value: 17.766199999999998 - type: nauc_precision_at_100_max value: 17.8184 - type: nauc_precision_at_100_std value: 20.153 - type: nauc_precision_at_100_diff1 value: 13.255500000000001 - type: nauc_precision_at_1000_max value: 26.7508 - type: nauc_precision_at_1000_std value: 31.494299999999996 - type: nauc_precision_at_1000_diff1 value: 5.8916 - type: nauc_mrr_at_1_max value: 0.364 - type: nauc_mrr_at_1_std value: -12.840399999999999 - type: nauc_mrr_at_1_diff1 value: 29.834699999999998 - type: nauc_mrr_at_3_max value: 1.5876000000000001 - type: nauc_mrr_at_3_std value: -13.4944 - type: nauc_mrr_at_3_diff1 value: 25.894099999999998 - type: nauc_mrr_at_5_max value: 1.9839 - type: nauc_mrr_at_5_std value: -13.1955 - type: nauc_mrr_at_5_diff1 value: 25.695899999999998 - type: nauc_mrr_at_10_max value: 2.2034000000000002 - type: nauc_mrr_at_10_std value: -12.504499999999998 - type: nauc_mrr_at_10_diff1 value: 25.3497 - type: nauc_mrr_at_20_max value: 2.334 - type: nauc_mrr_at_20_std value: -12.0259 - type: nauc_mrr_at_20_diff1 value: 25.3055 - type: nauc_mrr_at_100_max value: 2.5492999999999997 - type: nauc_mrr_at_100_std value: -11.6039 - type: nauc_mrr_at_100_diff1 value: 25.298 - type: nauc_mrr_at_1000_max value: 2.5439 - type: nauc_mrr_at_1000_std value: -11.6219 - type: nauc_mrr_at_1000_diff1 value: 25.312099999999997 - type: main_score value: 28.319 - task: type: Classification dataset: name: MTEB MTOPDomainClassification (en) type: mteb/mtop_domain config: en split: test revision: d80d48c1eb48d3562165c59d59d0034df9fff0bf metrics: - type: accuracy value: 88.9193 - type: f1 value: 88.6731 - type: f1_weighted value: 88.8695 - type: main_score value: 88.9193 - task: type: Classification dataset: name: MTEB MTOPIntentClassification (en) type: mteb/mtop_intent config: en split: test revision: ae001d0e6b1228650b7bd1c2c65fb50ad11a8aba metrics: - type: accuracy value: 57.6448 - type: f1 value: 38.9997 - type: f1_weighted value: 60.377 - type: main_score value: 57.6448 - task: type: Classification dataset: name: MTEB MassiveIntentClassification (en) type: mteb/amazon_massive_intent config: en split: test revision: 4672e20407010da34463acc759c162ca9734bca6 metrics: - type: accuracy value: 62.518499999999996 - type: f1 value: 59.2963 - type: f1_weighted value: 61.365700000000004 - type: main_score value: 62.518499999999996 - task: type: Classification dataset: name: MTEB MassiveScenarioClassification (en) type: mteb/amazon_massive_scenario config: en split: test revision: fad2c6e8459f9e1c45d9315f4953d921437d70f8 metrics: - type: accuracy value: 69.36449999999999 - type: f1 value: 67.56259999999999 - type: f1_weighted value: 68.9987 - type: main_score value: 69.36449999999999 - task: type: Clustering dataset: name: MTEB MedrxivClusteringP2P (default) type: mteb/medrxiv-clustering-p2p config: default split: test revision: e7a26af6f3ae46b30dde8737f02c07b1505bcc73 metrics: - type: v_measure value: 31.3521 - type: v_measure_std value: 1.3192000000000002 - type: main_score value: 31.3521 - task: type: Clustering dataset: name: MTEB MedrxivClusteringS2S (default) type: mteb/medrxiv-clustering-s2s config: default split: test revision: 35191c8c0dca72d8ff3efcd72aa802307d469663 metrics: - type: v_measure value: 28.020899999999997 - type: v_measure_std value: 1.3569 - type: main_score value: 28.020899999999997 - task: type: Reranking dataset: name: MTEB MindSmallReranking (default) type: mteb/mind_small config: default split: test revision: 59042f120c80e8afa9cdbb224f67076cec0fc9a7 metrics: - type: map value: 30.3048 - type: mrr value: 31.326500000000003 - type: nAUC_map_max value: -19.322300000000002 - type: nAUC_map_std value: -4.424 - type: nAUC_map_diff1 value: 13.645299999999999 - type: nAUC_mrr_max value: -13.5457 - type: nAUC_mrr_std value: -2.0976000000000004 - type: nAUC_mrr_diff1 value: 12.965499999999999 - type: main_score value: 30.3048 - task: type: Retrieval dataset: name: MTEB NFCorpus (default) type: mteb/nfcorpus config: default split: test revision: ec0fa4fe99da2ff19ca1214b7966684033a58814 metrics: - type: ndcg_at_1 value: 36.997 - type: ndcg_at_3 value: 32.279 - type: ndcg_at_5 value: 30.232 - type: ndcg_at_10 value: 26.991 - type: ndcg_at_20 value: 25.223000000000003 - type: ndcg_at_100 value: 24.953 - type: ndcg_at_1000 value: 33.881 - type: map_at_1 value: 4.2139999999999995 - type: map_at_3 value: 7.013999999999999 - type: map_at_5 value: 8.189 - type: map_at_10 value: 9.468 - type: map_at_20 value: 10.441 - type: map_at_100 value: 11.729000000000001 - type: map_at_1000 value: 12.920000000000002 - type: recall_at_1 value: 4.2139999999999995 - type: recall_at_3 value: 7.981000000000001 - type: recall_at_5 value: 10.306 - type: recall_at_10 value: 13.053999999999998 - type: recall_at_20 value: 16.499 - type: recall_at_100 value: 25.501 - type: recall_at_1000 value: 57.103 - type: precision_at_1 value: 38.39 - type: precision_at_3 value: 30.237000000000002 - type: precision_at_5 value: 26.006 - type: precision_at_10 value: 19.567 - type: precision_at_20 value: 14.613000000000001 - type: precision_at_100 value: 6.393 - type: precision_at_1000 value: 1.9 - type: mrr_at_1 value: 38.6997 - type: mrr_at_3 value: 45.0464 - type: mrr_at_5 value: 46.3622 - type: mrr_at_10 value: 46.9177 - type: mrr_at_20 value: 47.4995 - type: mrr_at_100 value: 47.7284 - type: mrr_at_1000 value: 47.7892 - type: nauc_ndcg_at_1_max value: 30.8996 - type: nauc_ndcg_at_1_std value: 18.2721 - type: nauc_ndcg_at_1_diff1 value: 34.836600000000004 - type: nauc_ndcg_at_3_max value: 35.3352 - type: nauc_ndcg_at_3_std value: 22.345699999999997 - type: nauc_ndcg_at_3_diff1 value: 29.5163 - type: nauc_ndcg_at_5_max value: 36.8152 - type: nauc_ndcg_at_5_std value: 25.799899999999997 - type: nauc_ndcg_at_5_diff1 value: 28.1756 - type: nauc_ndcg_at_10_max value: 37.752599999999994 - type: nauc_ndcg_at_10_std value: 28.2564 - type: nauc_ndcg_at_10_diff1 value: 25.9405 - type: nauc_ndcg_at_20_max value: 36.0517 - type: nauc_ndcg_at_20_std value: 29.4238 - type: nauc_ndcg_at_20_diff1 value: 23.8385 - type: nauc_ndcg_at_100_max value: 39.027499999999996 - type: nauc_ndcg_at_100_std value: 30.0156 - type: nauc_ndcg_at_100_diff1 value: 23.3814 - type: nauc_ndcg_at_1000_max value: 43.9552 - type: nauc_ndcg_at_1000_std value: 36.7709 - type: nauc_ndcg_at_1000_diff1 value: 23.2691 - type: nauc_map_at_1_max value: 13.7444 - type: nauc_map_at_1_std value: -3.6901 - type: nauc_map_at_1_diff1 value: 44.304700000000004 - type: nauc_map_at_3_max value: 18.061 - type: nauc_map_at_3_std value: -0.8826 - type: nauc_map_at_3_diff1 value: 34.1935 - type: nauc_map_at_5_max value: 20.4082 - type: nauc_map_at_5_std value: 1.6634 - type: nauc_map_at_5_diff1 value: 30.903999999999996 - type: nauc_map_at_10_max value: 25.414900000000003 - type: nauc_map_at_10_std value: 6.704899999999999 - type: nauc_map_at_10_diff1 value: 27.5783 - type: nauc_map_at_20_max value: 27.746199999999998 - type: nauc_map_at_20_std value: 10.5171 - type: nauc_map_at_20_diff1 value: 26.3814 - type: nauc_map_at_100_max value: 29.7035 - type: nauc_map_at_100_std value: 16.173000000000002 - type: nauc_map_at_100_diff1 value: 25.2415 - type: nauc_map_at_1000_max value: 29.8974 - type: nauc_map_at_1000_std value: 19.7694 - type: nauc_map_at_1000_diff1 value: 24.1468 - type: nauc_recall_at_1_max value: 13.7444 - type: nauc_recall_at_1_std value: -3.6901 - type: nauc_recall_at_1_diff1 value: 44.304700000000004 - type: nauc_recall_at_3_max value: 18.4883 - type: nauc_recall_at_3_std value: -0.9726999999999999 - type: nauc_recall_at_3_diff1 value: 29.502499999999998 - type: nauc_recall_at_5_max value: 20.3422 - type: nauc_recall_at_5_std value: 2.8535 - type: nauc_recall_at_5_diff1 value: 23.688100000000002 - type: nauc_recall_at_10_max value: 26.8137 - type: nauc_recall_at_10_std value: 6.3345 - type: nauc_recall_at_10_diff1 value: 19.5952 - type: nauc_recall_at_20_max value: 25.4056 - type: nauc_recall_at_20_std value: 8.8684 - type: nauc_recall_at_20_diff1 value: 16.9286 - type: nauc_recall_at_100_max value: 29.1932 - type: nauc_recall_at_100_std value: 19.6664 - type: nauc_recall_at_100_diff1 value: 14.8893 - type: nauc_recall_at_1000_max value: 23.0622 - type: nauc_recall_at_1000_std value: 25.8533 - type: nauc_recall_at_1000_diff1 value: 10.0844 - type: nauc_precision_at_1_max value: 32.948699999999995 - type: nauc_precision_at_1_std value: 19.2494 - type: nauc_precision_at_1_diff1 value: 33.955200000000005 - type: nauc_precision_at_3_max value: 39.4863 - type: nauc_precision_at_3_std value: 27.7083 - type: nauc_precision_at_3_diff1 value: 22.4854 - type: nauc_precision_at_5_max value: 40.1376 - type: nauc_precision_at_5_std value: 33.4658 - type: nauc_precision_at_5_diff1 value: 18.108 - type: nauc_precision_at_10_max value: 39.333200000000005 - type: nauc_precision_at_10_std value: 39.949600000000004 - type: nauc_precision_at_10_diff1 value: 11.7183 - type: nauc_precision_at_20_max value: 32.0094 - type: nauc_precision_at_20_std value: 45.1815 - type: nauc_precision_at_20_diff1 value: 7.2424 - type: nauc_precision_at_100_max value: 18.073 - type: nauc_precision_at_100_std value: 46.7008 - type: nauc_precision_at_100_diff1 value: -0.6927 - type: nauc_precision_at_1000_max value: 2.9552 - type: nauc_precision_at_1000_std value: 32.691199999999995 - type: nauc_precision_at_1000_diff1 value: -4.3427 - type: nauc_mrr_at_1_max value: 32.7952 - type: nauc_mrr_at_1_std value: 20.716 - type: nauc_mrr_at_1_diff1 value: 33.047 - type: nauc_mrr_at_3_max value: 39.5698 - type: nauc_mrr_at_3_std value: 25.674200000000003 - type: nauc_mrr_at_3_diff1 value: 31.7916 - type: nauc_mrr_at_5_max value: 40.7711 - type: nauc_mrr_at_5_std value: 27.2756 - type: nauc_mrr_at_5_diff1 value: 31.5432 - type: nauc_mrr_at_10_max value: 41.033500000000004 - type: nauc_mrr_at_10_std value: 27.364500000000003 - type: nauc_mrr_at_10_diff1 value: 31.394899999999996 - type: nauc_mrr_at_20_max value: 40.9665 - type: nauc_mrr_at_20_std value: 27.5866 - type: nauc_mrr_at_20_diff1 value: 31.6835 - type: nauc_mrr_at_100_max value: 40.9471 - type: nauc_mrr_at_100_std value: 27.643 - type: nauc_mrr_at_100_diff1 value: 31.553900000000002 - type: nauc_mrr_at_1000_max value: 40.9207 - type: nauc_mrr_at_1000_std value: 27.6206 - type: nauc_mrr_at_1000_diff1 value: 31.5596 - type: main_score value: 26.991 - task: type: Retrieval dataset: name: MTEB NQ (default) type: mteb/nq config: default split: test revision: b774495ed302d8c44a3a7ea25c90dbce03968f31 metrics: - type: ndcg_at_1 value: 32.937 - type: ndcg_at_3 value: 42.939 - type: ndcg_at_5 value: 47.044000000000004 - type: ndcg_at_10 value: 50.893 - type: ndcg_at_20 value: 53.093 - type: ndcg_at_100 value: 55.369 - type: ndcg_at_1000 value: 56.285 - type: map_at_1 value: 29.087000000000003 - type: map_at_3 value: 39.263 - type: map_at_5 value: 41.708 - type: map_at_10 value: 43.471 - type: map_at_20 value: 44.155 - type: map_at_100 value: 44.528 - type: map_at_1000 value: 44.568999999999996 - type: recall_at_1 value: 29.087000000000003 - type: recall_at_3 value: 50.451 - type: recall_at_5 value: 59.946 - type: recall_at_10 value: 71.109 - type: recall_at_20 value: 79.26299999999999 - type: recall_at_100 value: 90.51 - type: recall_at_1000 value: 97.277 - type: precision_at_1 value: 32.937 - type: precision_at_3 value: 19.602 - type: precision_at_5 value: 14.113999999999999 - type: precision_at_10 value: 8.462 - type: precision_at_20 value: 4.758 - type: precision_at_100 value: 1.0999999999999999 - type: precision_at_1000 value: 0.11800000000000001 - type: mrr_at_1 value: 32.9374 - type: mrr_at_3 value: 42.3957 - type: mrr_at_5 value: 44.5148 - type: mrr_at_10 value: 45.9459 - type: mrr_at_20 value: 46.4559 - type: mrr_at_100 value: 46.7367 - type: mrr_at_1000 value: 46.765 - type: nauc_ndcg_at_1_max value: 24.0105 - type: nauc_ndcg_at_1_std value: -1.5957 - type: nauc_ndcg_at_1_diff1 value: 33.1575 - type: nauc_ndcg_at_3_max value: 26.388 - type: nauc_ndcg_at_3_std value: -1.6223 - type: nauc_ndcg_at_3_diff1 value: 29.1908 - type: nauc_ndcg_at_5_max value: 28.188800000000004 - type: nauc_ndcg_at_5_std value: -0.3491 - type: nauc_ndcg_at_5_diff1 value: 28.287499999999998 - type: nauc_ndcg_at_10_max value: 29.768800000000002 - type: nauc_ndcg_at_10_std value: 2.093 - type: nauc_ndcg_at_10_diff1 value: 28.257700000000003 - type: nauc_ndcg_at_20_max value: 30.8687 - type: nauc_ndcg_at_20_std value: 3.4320000000000004 - type: nauc_ndcg_at_20_diff1 value: 28.220699999999997 - type: nauc_ndcg_at_100_max value: 30.692199999999996 - type: nauc_ndcg_at_100_std value: 4.0889 - type: nauc_ndcg_at_100_diff1 value: 28.468 - type: nauc_ndcg_at_1000_max value: 29.9378 - type: nauc_ndcg_at_1000_std value: 3.1003 - type: nauc_ndcg_at_1000_diff1 value: 28.8642 - type: nauc_map_at_1_max value: 21.948999999999998 - type: nauc_map_at_1_std value: -3.4299000000000004 - type: nauc_map_at_1_diff1 value: 33.5905 - type: nauc_map_at_3_max value: 25.600299999999997 - type: nauc_map_at_3_std value: -2.2762000000000002 - type: nauc_map_at_3_diff1 value: 30.235 - type: nauc_map_at_5_max value: 26.6859 - type: nauc_map_at_5_std value: -1.4717 - type: nauc_map_at_5_diff1 value: 29.6397 - type: nauc_map_at_10_max value: 27.3731 - type: nauc_map_at_10_std value: -0.4928 - type: nauc_map_at_10_diff1 value: 29.7079 - type: nauc_map_at_20_max value: 27.668799999999997 - type: nauc_map_at_20_std value: -0.0964 - type: nauc_map_at_20_diff1 value: 29.6945 - type: nauc_map_at_100_max value: 27.675 - type: nauc_map_at_100_std value: 0.0414 - type: nauc_map_at_100_diff1 value: 29.709000000000003 - type: nauc_map_at_1000_max value: 27.647 - type: nauc_map_at_1000_std value: 0.0063999999999999994 - type: nauc_map_at_1000_diff1 value: 29.724099999999996 - type: nauc_recall_at_1_max value: 21.948999999999998 - type: nauc_recall_at_1_std value: -3.4299000000000004 - type: nauc_recall_at_1_diff1 value: 33.5905 - type: nauc_recall_at_3_max value: 27.2388 - type: nauc_recall_at_3_std value: -1.4857 - type: nauc_recall_at_3_diff1 value: 25.991500000000002 - type: nauc_recall_at_5_max value: 31.4282 - type: nauc_recall_at_5_std value: 1.2066000000000001 - type: nauc_recall_at_5_diff1 value: 23.5681 - type: nauc_recall_at_10_max value: 37.4517 - type: nauc_recall_at_10_std value: 10.1238 - type: nauc_recall_at_10_diff1 value: 22.2133 - type: nauc_recall_at_20_max value: 46.4783 - type: nauc_recall_at_20_std value: 19.8515 - type: nauc_recall_at_20_diff1 value: 20.6028 - type: nauc_recall_at_100_max value: 58.7011 - type: nauc_recall_at_100_std value: 43.6264 - type: nauc_recall_at_100_diff1 value: 18.3446 - type: nauc_recall_at_1000_max value: 74.3733 - type: nauc_recall_at_1000_std value: 67.4933 - type: nauc_recall_at_1000_diff1 value: 25.375500000000002 - type: nauc_precision_at_1_max value: 24.0105 - type: nauc_precision_at_1_std value: -1.5957 - type: nauc_precision_at_1_diff1 value: 33.1575 - type: nauc_precision_at_3_max value: 27.406399999999998 - type: nauc_precision_at_3_std value: 0.9842 - type: nauc_precision_at_3_diff1 value: 21.793599999999998 - type: nauc_precision_at_5_max value: 29.145 - type: nauc_precision_at_5_std value: 4.6154 - type: nauc_precision_at_5_diff1 value: 16.8 - type: nauc_precision_at_10_max value: 29.480600000000003 - type: nauc_precision_at_10_std value: 12.286900000000001 - type: nauc_precision_at_10_diff1 value: 11.7686 - type: nauc_precision_at_20_max value: 29.791 - type: nauc_precision_at_20_std value: 18.0686 - type: nauc_precision_at_20_diff1 value: 7.2818 - type: nauc_precision_at_100_max value: 22.605900000000002 - type: nauc_precision_at_100_std value: 22.4834 - type: nauc_precision_at_100_diff1 value: -0.1403 - type: nauc_precision_at_1000_max value: 11.637599999999999 - type: nauc_precision_at_1000_std value: 16.299 - type: nauc_precision_at_1000_diff1 value: -4.3052 - type: nauc_mrr_at_1_max value: 24.0105 - type: nauc_mrr_at_1_std value: -1.5957 - type: nauc_mrr_at_1_diff1 value: 33.1575 - type: nauc_mrr_at_3_max value: 26.375 - type: nauc_mrr_at_3_std value: -0.2874 - type: nauc_mrr_at_3_diff1 value: 29.8333 - type: nauc_mrr_at_5_max value: 27.2656 - type: nauc_mrr_at_5_std value: 0.37 - type: nauc_mrr_at_5_diff1 value: 29.461900000000004 - type: nauc_mrr_at_10_max value: 27.7811 - type: nauc_mrr_at_10_std value: 1.2722 - type: nauc_mrr_at_10_diff1 value: 29.456 - type: nauc_mrr_at_20_max value: 27.9525 - type: nauc_mrr_at_20_std value: 1.4394 - type: nauc_mrr_at_20_diff1 value: 29.5184 - type: nauc_mrr_at_100_max value: 27.887099999999997 - type: nauc_mrr_at_100_std value: 1.4539 - type: nauc_mrr_at_100_diff1 value: 29.5789 - type: nauc_mrr_at_1000_max value: 27.865499999999997 - type: nauc_mrr_at_1000_std value: 1.4233 - type: nauc_mrr_at_1000_diff1 value: 29.5896 - type: main_score value: 50.893 - task: type: Retrieval dataset: name: MTEB QuoraRetrieval (default) type: mteb/quora config: default split: test revision: e4e08e0b7dbe3c8700f0daef558ff32256715259 metrics: - type: ndcg_at_1 value: 78.41 - type: ndcg_at_3 value: 82.614 - type: ndcg_at_5 value: 84.443 - type: ndcg_at_10 value: 85.845 - type: ndcg_at_20 value: 86.615 - type: ndcg_at_100 value: 87.313 - type: ndcg_at_1000 value: 87.492 - type: map_at_1 value: 68.092 - type: map_at_3 value: 78.604 - type: map_at_5 value: 80.527 - type: map_at_10 value: 81.639 - type: map_at_20 value: 82.07900000000001 - type: map_at_100 value: 82.314 - type: map_at_1000 value: 82.336 - type: recall_at_1 value: 68.092 - type: recall_at_3 value: 84.66900000000001 - type: recall_at_5 value: 89.751 - type: recall_at_10 value: 93.888 - type: recall_at_20 value: 96.389 - type: recall_at_100 value: 99.042 - type: recall_at_1000 value: 99.929 - type: precision_at_1 value: 78.41 - type: precision_at_3 value: 36.027 - type: precision_at_5 value: 23.844 - type: precision_at_10 value: 13.043 - type: precision_at_20 value: 6.946 - type: precision_at_100 value: 1.506 - type: precision_at_1000 value: 0.156 - type: mrr_at_1 value: 78.4 - type: mrr_at_3 value: 83.9867 - type: mrr_at_5 value: 84.7992 - type: mrr_at_10 value: 85.1577 - type: mrr_at_20 value: 85.2505 - type: mrr_at_100 value: 85.2855 - type: mrr_at_1000 value: 85.2877 - type: nauc_ndcg_at_1_max value: 39.8081 - type: nauc_ndcg_at_1_std value: -28.606399999999997 - type: nauc_ndcg_at_1_diff1 value: 75.9446 - type: nauc_ndcg_at_3_max value: 37.7924 - type: nauc_ndcg_at_3_std value: -33.5391 - type: nauc_ndcg_at_3_diff1 value: 73.3973 - type: nauc_ndcg_at_5_max value: 38.047 - type: nauc_ndcg_at_5_std value: -33.5943 - type: nauc_ndcg_at_5_diff1 value: 73.7645 - type: nauc_ndcg_at_10_max value: 39.0948 - type: nauc_ndcg_at_10_std value: -32.3805 - type: nauc_ndcg_at_10_diff1 value: 74.2689 - type: nauc_ndcg_at_20_max value: 39.4193 - type: nauc_ndcg_at_20_std value: -31.309900000000003 - type: nauc_ndcg_at_20_diff1 value: 74.2915 - type: nauc_ndcg_at_100_max value: 39.6566 - type: nauc_ndcg_at_100_std value: -30.3777 - type: nauc_ndcg_at_100_diff1 value: 74.2375 - type: nauc_ndcg_at_1000_max value: 39.6656 - type: nauc_ndcg_at_1000_std value: -30.2466 - type: nauc_ndcg_at_1000_diff1 value: 74.22609999999999 - type: nauc_map_at_1_max value: 29.1625 - type: nauc_map_at_1_std value: -31.4393 - type: nauc_map_at_1_diff1 value: 77.41 - type: nauc_map_at_3_max value: 35.3371 - type: nauc_map_at_3_std value: -35.2729 - type: nauc_map_at_3_diff1 value: 74.6367 - type: nauc_map_at_5_max value: 36.600100000000005 - type: nauc_map_at_5_std value: -34.9097 - type: nauc_map_at_5_diff1 value: 74.48479999999999 - type: nauc_map_at_10_max value: 37.5994 - type: nauc_map_at_10_std value: -33.702 - type: nauc_map_at_10_diff1 value: 74.4678 - type: nauc_map_at_20_max value: 37.890299999999996 - type: nauc_map_at_20_std value: -32.9179 - type: nauc_map_at_20_diff1 value: 74.3744 - type: nauc_map_at_100_max value: 38.0205 - type: nauc_map_at_100_std value: -32.4364 - type: nauc_map_at_100_diff1 value: 74.3232 - type: nauc_map_at_1000_max value: 38.0296 - type: nauc_map_at_1000_std value: -32.390600000000006 - type: nauc_map_at_1000_diff1 value: 74.323 - type: nauc_recall_at_1_max value: 29.1625 - type: nauc_recall_at_1_std value: -31.4393 - type: nauc_recall_at_1_diff1 value: 77.41 - type: nauc_recall_at_3_max value: 32.2751 - type: nauc_recall_at_3_std value: -39.215 - type: nauc_recall_at_3_diff1 value: 70.3264 - type: nauc_recall_at_5_max value: 32.9445 - type: nauc_recall_at_5_std value: -40.7042 - type: nauc_recall_at_5_diff1 value: 68.803 - type: nauc_recall_at_10_max value: 36.6396 - type: nauc_recall_at_10_std value: -37.5092 - type: nauc_recall_at_10_diff1 value: 68.8674 - type: nauc_recall_at_20_max value: 38.8048 - type: nauc_recall_at_20_std value: -31.1471 - type: nauc_recall_at_20_diff1 value: 69.5775 - type: nauc_recall_at_100_max value: 42.9809 - type: nauc_recall_at_100_std value: -18.932299999999998 - type: nauc_recall_at_100_diff1 value: 69.4688 - type: nauc_recall_at_1000_max value: 67.836 - type: nauc_recall_at_1000_std value: 38.124 - type: nauc_recall_at_1000_diff1 value: 71.4131 - type: nauc_precision_at_1_max value: 39.8081 - type: nauc_precision_at_1_std value: -28.606399999999997 - type: nauc_precision_at_1_diff1 value: 75.9446 - type: nauc_precision_at_3_max value: 14.0877 - type: nauc_precision_at_3_std value: 2.1809 - type: nauc_precision_at_3_diff1 value: -8.5037 - type: nauc_precision_at_5_max value: 7.3131 - type: nauc_precision_at_5_std value: 11.67 - type: nauc_precision_at_5_diff1 value: -23.663500000000003 - type: nauc_precision_at_10_max value: 2.4924999999999997 - type: nauc_precision_at_10_std value: 20.4298 - type: nauc_precision_at_10_diff1 value: -32.5249 - type: nauc_precision_at_20_max value: -0.8340000000000001 - type: nauc_precision_at_20_std value: 25.5814 - type: nauc_precision_at_20_diff1 value: -36.879 - type: nauc_precision_at_100_max value: -4.2415 - type: nauc_precision_at_100_std value: 30.588700000000003 - type: nauc_precision_at_100_diff1 value: -40.0441 - type: nauc_precision_at_1000_max value: -5.7567 - type: nauc_precision_at_1000_std value: 31.6137 - type: nauc_precision_at_1000_diff1 value: -40.8601 - type: nauc_mrr_at_1_max value: 39.7059 - type: nauc_mrr_at_1_std value: -28.6757 - type: nauc_mrr_at_1_diff1 value: 75.96730000000001 - type: nauc_mrr_at_3_max value: 40.842 - type: nauc_mrr_at_3_std value: -29.4321 - type: nauc_mrr_at_3_diff1 value: 74.588 - type: nauc_mrr_at_5_max value: 40.8178 - type: nauc_mrr_at_5_std value: -29.343700000000002 - type: nauc_mrr_at_5_diff1 value: 74.7965 - type: nauc_mrr_at_10_max value: 40.9508 - type: nauc_mrr_at_10_std value: -29.1159 - type: nauc_mrr_at_10_diff1 value: 74.9315 - type: nauc_mrr_at_20_max value: 40.9157 - type: nauc_mrr_at_20_std value: -29.040899999999997 - type: nauc_mrr_at_20_diff1 value: 74.9526 - type: nauc_mrr_at_100_max value: 40.8672 - type: nauc_mrr_at_100_std value: -29.0691 - type: nauc_mrr_at_100_diff1 value: 74.9558 - type: nauc_mrr_at_1000_max value: 40.8655 - type: nauc_mrr_at_1000_std value: -29.0682 - type: nauc_mrr_at_1000_diff1 value: 74.9558 - type: main_score value: 85.845 - task: type: Clustering dataset: name: MTEB RedditClustering (default) type: mteb/reddit-clustering config: default split: test revision: 24640382cdbf8abc73003fb0fa6d111a705499eb metrics: - type: v_measure value: 43.7063 - type: v_measure_std value: 4.7175 - type: main_score value: 43.7063 - task: type: Clustering dataset: name: MTEB RedditClusteringP2P (default) type: mteb/reddit-clustering-p2p config: default split: test revision: 385e3cb46b4cfa89021f56c4380204149d0efe33 metrics: - type: v_measure value: 53.54 - type: v_measure_std value: 11.809600000000001 - type: main_score value: 53.54 - task: type: Retrieval dataset: name: MTEB SCIDOCS (default) type: mteb/scidocs config: default split: test revision: f8c2fcf00f625baaa80f62ec5bd9e1fff3b8ae88 metrics: - type: ndcg_at_1 value: 20.7 - type: ndcg_at_3 value: 16.518 - type: ndcg_at_5 value: 14.441 - type: ndcg_at_10 value: 17.380000000000003 - type: ndcg_at_20 value: 19.991 - type: ndcg_at_100 value: 24.747 - type: ndcg_at_1000 value: 30.296 - type: map_at_1 value: 4.208 - type: map_at_3 value: 7.335 - type: map_at_5 value: 8.712 - type: map_at_10 value: 10.135 - type: map_at_20 value: 11.068999999999999 - type: map_at_100 value: 11.951 - type: map_at_1000 value: 12.245000000000001 - type: recall_at_1 value: 4.208 - type: recall_at_3 value: 9.303 - type: recall_at_5 value: 12.797 - type: recall_at_10 value: 18.195 - type: recall_at_20 value: 24.318 - type: recall_at_100 value: 39.803 - type: recall_at_1000 value: 66.99000000000001 - type: precision_at_1 value: 20.7 - type: precision_at_3 value: 15.299999999999999 - type: precision_at_5 value: 12.6 - type: precision_at_10 value: 8.959999999999999 - type: precision_at_20 value: 5.985 - type: precision_at_100 value: 1.959 - type: precision_at_1000 value: 0.33 - type: mrr_at_1 value: 20.7 - type: mrr_at_3 value: 27.3833 - type: mrr_at_5 value: 29.168300000000002 - type: mrr_at_10 value: 30.598799999999997 - type: mrr_at_20 value: 31.217 - type: mrr_at_100 value: 31.688499999999998 - type: mrr_at_1000 value: 31.763599999999997 - type: nauc_ndcg_at_1_max value: 21.5429 - type: nauc_ndcg_at_1_std value: 4.718 - type: nauc_ndcg_at_1_diff1 value: 19.3827 - type: nauc_ndcg_at_3_max value: 32.1126 - type: nauc_ndcg_at_3_std value: 9.314400000000001 - type: nauc_ndcg_at_3_diff1 value: 20.0916 - type: nauc_ndcg_at_5_max value: 31.849800000000002 - type: nauc_ndcg_at_5_std value: 10.8725 - type: nauc_ndcg_at_5_diff1 value: 17.7008 - type: nauc_ndcg_at_10_max value: 33.366600000000005 - type: nauc_ndcg_at_10_std value: 13.625399999999999 - type: nauc_ndcg_at_10_diff1 value: 16.375 - type: nauc_ndcg_at_20_max value: 34.6677 - type: nauc_ndcg_at_20_std value: 15.3872 - type: nauc_ndcg_at_20_diff1 value: 16.8414 - type: nauc_ndcg_at_100_max value: 37.2778 - type: nauc_ndcg_at_100_std value: 20.4858 - type: nauc_ndcg_at_100_diff1 value: 16.7288 - type: nauc_ndcg_at_1000_max value: 36.601 - type: nauc_ndcg_at_1000_std value: 22.312199999999997 - type: nauc_ndcg_at_1000_diff1 value: 16.2465 - type: nauc_map_at_1_max value: 21.2741 - type: nauc_map_at_1_std value: 4.7143 - type: nauc_map_at_1_diff1 value: 18.8297 - type: nauc_map_at_3_max value: 31.727800000000002 - type: nauc_map_at_3_std value: 6.8229999999999995 - type: nauc_map_at_3_diff1 value: 20.4232 - type: nauc_map_at_5_max value: 32.3588 - type: nauc_map_at_5_std value: 8.565100000000001 - type: nauc_map_at_5_diff1 value: 18.9604 - type: nauc_map_at_10_max value: 33.6113 - type: nauc_map_at_10_std value: 10.743 - type: nauc_map_at_10_diff1 value: 17.6337 - type: nauc_map_at_20_max value: 34.7121 - type: nauc_map_at_20_std value: 11.9819 - type: nauc_map_at_20_diff1 value: 18.0342 - type: nauc_map_at_100_max value: 35.6623 - type: nauc_map_at_100_std value: 13.7498 - type: nauc_map_at_100_diff1 value: 17.985300000000002 - type: nauc_map_at_1000_max value: 35.663 - type: nauc_map_at_1000_std value: 14.050099999999999 - type: nauc_map_at_1000_diff1 value: 17.9269 - type: nauc_recall_at_1_max value: 21.2741 - type: nauc_recall_at_1_std value: 4.7143 - type: nauc_recall_at_1_diff1 value: 18.8297 - type: nauc_recall_at_3_max value: 36.2097 - type: nauc_recall_at_3_std value: 11.6014 - type: nauc_recall_at_3_diff1 value: 20.0114 - type: nauc_recall_at_5_max value: 33.7826 - type: nauc_recall_at_5_std value: 13.603000000000002 - type: nauc_recall_at_5_diff1 value: 15.4714 - type: nauc_recall_at_10_max value: 34.105999999999995 - type: nauc_recall_at_10_std value: 17.4216 - type: nauc_recall_at_10_diff1 value: 12.3734 - type: nauc_recall_at_20_max value: 35.2885 - type: nauc_recall_at_20_std value: 19.9833 - type: nauc_recall_at_20_diff1 value: 13.2726 - type: nauc_recall_at_100_max value: 37.3523 - type: nauc_recall_at_100_std value: 30.2207 - type: nauc_recall_at_100_diff1 value: 11.437700000000001 - type: nauc_recall_at_1000_max value: 29.276000000000003 - type: nauc_recall_at_1000_std value: 35.906 - type: nauc_recall_at_1000_diff1 value: 6.281499999999999 - type: nauc_precision_at_1_max value: 21.5429 - type: nauc_precision_at_1_std value: 4.718 - type: nauc_precision_at_1_diff1 value: 19.3827 - type: nauc_precision_at_3_max value: 36.609 - type: nauc_precision_at_3_std value: 11.863700000000001 - type: nauc_precision_at_3_diff1 value: 20.4735 - type: nauc_precision_at_5_max value: 34.3364 - type: nauc_precision_at_5_std value: 13.7951 - type: nauc_precision_at_5_diff1 value: 15.992700000000001 - type: nauc_precision_at_10_max value: 34.6556 - type: nauc_precision_at_10_std value: 17.4014 - type: nauc_precision_at_10_diff1 value: 12.981699999999998 - type: nauc_precision_at_20_max value: 35.836 - type: nauc_precision_at_20_std value: 20.1892 - type: nauc_precision_at_20_diff1 value: 13.6046 - type: nauc_precision_at_100_max value: 37.9677 - type: nauc_precision_at_100_std value: 30.3386 - type: nauc_precision_at_100_diff1 value: 11.8783 - type: nauc_precision_at_1000_max value: 29.795700000000004 - type: nauc_precision_at_1000_std value: 35.4107 - type: nauc_precision_at_1000_diff1 value: 6.6238 - type: nauc_mrr_at_1_max value: 21.5429 - type: nauc_mrr_at_1_std value: 4.718 - type: nauc_mrr_at_1_diff1 value: 19.3827 - type: nauc_mrr_at_3_max value: 27.635900000000003 - type: nauc_mrr_at_3_std value: 9.5593 - type: nauc_mrr_at_3_diff1 value: 18.4684 - type: nauc_mrr_at_5_max value: 26.682499999999997 - type: nauc_mrr_at_5_std value: 9.7369 - type: nauc_mrr_at_5_diff1 value: 17.4317 - type: nauc_mrr_at_10_max value: 27.032400000000003 - type: nauc_mrr_at_10_std value: 10.4662 - type: nauc_mrr_at_10_diff1 value: 17.3209 - type: nauc_mrr_at_20_max value: 27.1752 - type: nauc_mrr_at_20_std value: 10.5774 - type: nauc_mrr_at_20_diff1 value: 17.3725 - type: nauc_mrr_at_100_max value: 27.228099999999998 - type: nauc_mrr_at_100_std value: 10.710600000000001 - type: nauc_mrr_at_100_diff1 value: 17.4312 - type: nauc_mrr_at_1000_max value: 27.172600000000003 - type: nauc_mrr_at_1000_std value: 10.6434 - type: nauc_mrr_at_1000_diff1 value: 17.421400000000002 - type: main_score value: 17.380000000000003 - task: type: STS dataset: name: MTEB SICK-R (default) type: mteb/sickr-sts config: default split: test revision: 20a6d6f312dd54037fe07a32d58e5e168867909d metrics: - type: pearson value: 75.385 - type: spearman value: 68.46560000000001 - type: cosine_pearson value: 75.385 - type: cosine_spearman value: 68.46560000000001 - type: manhattan_pearson value: 72.53309999999999 - type: manhattan_spearman value: 68.79899999999999 - type: euclidean_pearson value: 72.5239 - type: euclidean_spearman value: 68.46560000000001 - type: main_score value: 68.46560000000001 - task: type: STS dataset: name: MTEB STS12 (default) type: mteb/sts12-sts config: default split: test revision: a0d554a64d88156834ff5ae9920b964011b16384 metrics: - type: pearson value: 80.9088 - type: spearman value: 74.7362 - type: cosine_pearson value: 80.9088 - type: cosine_spearman value: 74.7362 - type: manhattan_pearson value: 77.3291 - type: manhattan_spearman value: 75.0881 - type: euclidean_pearson value: 77.5321 - type: euclidean_spearman value: 74.7347 - type: main_score value: 74.7362 - task: type: STS dataset: name: MTEB STS13 (default) type: mteb/sts13-sts config: default split: test revision: 7e90230a92c190f1bf69ae9002b8cea547a64cca metrics: - type: pearson value: 74.6345 - type: spearman value: 75.63990000000001 - type: cosine_pearson value: 74.6345 - type: cosine_spearman value: 75.63990000000001 - type: manhattan_pearson value: 75.5227 - type: manhattan_spearman value: 75.5136 - type: euclidean_pearson value: 75.5744 - type: euclidean_spearman value: 75.63990000000001 - type: main_score value: 75.63990000000001 - task: type: STS dataset: name: MTEB STS14 (default) type: mteb/sts14-sts config: default split: test revision: 6031580fec1f6af667f0bd2da0a551cf4f0b2375 metrics: - type: pearson value: 76.66629999999999 - type: spearman value: 73.1976 - type: cosine_pearson value: 76.66629999999999 - type: cosine_spearman value: 73.1976 - type: manhattan_pearson value: 75.0827 - type: manhattan_spearman value: 73.2472 - type: euclidean_pearson value: 75.2873 - type: euclidean_spearman value: 73.1976 - type: main_score value: 73.1976 - task: type: STS dataset: name: MTEB STS15 (default) type: mteb/sts15-sts config: default split: test revision: ae752c7c21bf194d8b67fd573edf7ae58183cbe3 metrics: - type: pearson value: 84.33810000000001 - type: spearman value: 85.0551 - type: cosine_pearson value: 84.33810000000001 - type: cosine_spearman value: 85.0551 - type: manhattan_pearson value: 84.5984 - type: manhattan_spearman value: 85.1619 - type: euclidean_pearson value: 84.529 - type: euclidean_spearman value: 85.0551 - type: main_score value: 85.0551 - task: type: STS dataset: name: MTEB STS16 (default) type: mteb/sts16-sts config: default split: test revision: 4d8694f8f0e0100860b497b999b3dbed754a0513 metrics: - type: pearson value: 79.5933 - type: spearman value: 81.11120000000001 - type: cosine_pearson value: 79.5933 - type: cosine_spearman value: 81.11120000000001 - type: manhattan_pearson value: 80.136 - type: manhattan_spearman value: 80.8767 - type: euclidean_pearson value: 80.3305 - type: euclidean_spearman value: 81.11120000000001 - type: main_score value: 81.11120000000001 - task: type: STS dataset: name: MTEB STS17 (en-tr) type: mteb/sts17-crosslingual-sts config: en-tr split: test revision: faeb762787bd10488a50c8b5be4a3b82e411949c metrics: - type: pearson value: 38.2331 - type: spearman value: 33.7346 - type: cosine_pearson value: 38.2331 - type: cosine_spearman value: 33.7346 - type: manhattan_pearson value: 40.986 - type: manhattan_spearman value: 34.253099999999996 - type: euclidean_pearson value: 40.2622 - type: euclidean_spearman value: 33.7346 - type: main_score value: 33.7346 - task: type: STS dataset: name: MTEB STS17 (fr-en) type: mteb/sts17-crosslingual-sts config: fr-en split: test revision: faeb762787bd10488a50c8b5be4a3b82e411949c metrics: - type: pearson value: 73.5477 - type: spearman value: 74.1745 - type: cosine_pearson value: 73.5477 - type: cosine_spearman value: 74.1745 - type: manhattan_pearson value: 74.84920000000001 - type: manhattan_spearman value: 74.49900000000001 - type: euclidean_pearson value: 74.14 - type: euclidean_spearman value: 74.1745 - type: main_score value: 74.1745 - task: type: STS dataset: name: MTEB STS17 (it-en) type: mteb/sts17-crosslingual-sts config: it-en split: test revision: faeb762787bd10488a50c8b5be4a3b82e411949c metrics: - type: pearson value: 66.7169 - type: spearman value: 66.864 - type: cosine_pearson value: 66.7169 - type: cosine_spearman value: 66.864 - type: manhattan_pearson value: 67.39359999999999 - type: manhattan_spearman value: 67.0985 - type: euclidean_pearson value: 66.9389 - type: euclidean_spearman value: 66.864 - type: main_score value: 66.864 - task: type: STS dataset: name: MTEB STS17 (es-en) type: mteb/sts17-crosslingual-sts config: es-en split: test revision: faeb762787bd10488a50c8b5be4a3b82e411949c metrics: - type: pearson value: 70.5101 - type: spearman value: 70.05930000000001 - type: cosine_pearson value: 70.5101 - type: cosine_spearman value: 70.05930000000001 - type: manhattan_pearson value: 72.7524 - type: manhattan_spearman value: 71.2907 - type: euclidean_pearson value: 71.148 - type: euclidean_spearman value: 70.05930000000001 - type: main_score value: 70.05930000000001 - task: type: STS dataset: name: MTEB STS17 (nl-en) type: mteb/sts17-crosslingual-sts config: nl-en split: test revision: faeb762787bd10488a50c8b5be4a3b82e411949c metrics: - type: pearson value: 68.3089 - type: spearman value: 68.4899 - type: cosine_pearson value: 68.3089 - type: cosine_spearman value: 68.4899 - type: manhattan_pearson value: 69.3956 - type: manhattan_spearman value: 68.9486 - type: euclidean_pearson value: 68.8059 - type: euclidean_spearman value: 68.4899 - type: main_score value: 68.4899 - task: type: STS dataset: name: MTEB STS17 (en-en) type: mteb/sts17-crosslingual-sts config: en-en split: test revision: faeb762787bd10488a50c8b5be4a3b82e411949c metrics: - type: pearson value: 78.28739999999999 - type: spearman value: 78.6966 - type: cosine_pearson value: 78.28739999999999 - type: cosine_spearman value: 78.6966 - type: manhattan_pearson value: 78.97070000000001 - type: manhattan_spearman value: 79.1907 - type: euclidean_pearson value: 78.36070000000001 - type: euclidean_spearman value: 78.6966 - type: main_score value: 78.6966 - task: type: STS dataset: name: MTEB STS17 (en-ar) type: mteb/sts17-crosslingual-sts config: en-ar split: test revision: faeb762787bd10488a50c8b5be4a3b82e411949c metrics: - type: pearson value: 59.611999999999995 - type: spearman value: 59.9288 - type: cosine_pearson value: 59.611999999999995 - type: cosine_spearman value: 59.9288 - type: manhattan_pearson value: 60.3549 - type: manhattan_spearman value: 59.696099999999994 - type: euclidean_pearson value: 60.4754 - type: euclidean_spearman value: 59.9288 - type: main_score value: 59.9288 - task: type: STS dataset: name: MTEB STS17 (en-de) type: mteb/sts17-crosslingual-sts config: en-de split: test revision: faeb762787bd10488a50c8b5be4a3b82e411949c metrics: - type: pearson value: 70.6341 - type: spearman value: 69.9775 - type: cosine_pearson value: 70.6341 - type: cosine_spearman value: 69.9775 - type: manhattan_pearson value: 72.7788 - type: manhattan_spearman value: 71.2033 - type: euclidean_pearson value: 71.5822 - type: euclidean_spearman value: 69.9775 - type: main_score value: 69.9775 - task: type: STS dataset: name: MTEB STS22 (en) type: mteb/sts22-crosslingual-sts config: en split: test revision: de9d86b3b84231dc21f76c7b7af1f28e2f57f6e3 metrics: - type: pearson value: 67.2703 - type: spearman value: 67.58229999999999 - type: cosine_pearson value: 67.2703 - type: cosine_spearman value: 67.58229999999999 - type: manhattan_pearson value: 68.1768 - type: manhattan_spearman value: 67.6479 - type: euclidean_pearson value: 67.9708 - type: euclidean_spearman value: 67.58229999999999 - type: main_score value: 67.58229999999999 - task: type: STS dataset: name: MTEB STS22 (de-en) type: mteb/sts22-crosslingual-sts config: de-en split: test revision: de9d86b3b84231dc21f76c7b7af1f28e2f57f6e3 metrics: - type: pearson value: 62.2109 - type: spearman value: 56.2314 - type: cosine_pearson value: 62.2109 - type: cosine_spearman value: 56.2314 - type: manhattan_pearson value: 65.9455 - type: manhattan_spearman value: 56.5496 - type: euclidean_pearson value: 65.30550000000001 - type: euclidean_spearman value: 56.2314 - type: main_score value: 56.2314 - task: type: STS dataset: name: MTEB STS22 (pl-en) type: mteb/sts22-crosslingual-sts config: pl-en split: test revision: de9d86b3b84231dc21f76c7b7af1f28e2f57f6e3 metrics: - type: pearson value: 74.4185 - type: spearman value: 72.82119999999999 - type: cosine_pearson value: 74.4185 - type: cosine_spearman value: 72.82119999999999 - type: manhattan_pearson value: 75.6921 - type: manhattan_spearman value: 72.3315 - type: euclidean_pearson value: 75.1725 - type: euclidean_spearman value: 72.82119999999999 - type: main_score value: 72.82119999999999 - task: type: STS dataset: name: MTEB STS22 (es-en) type: mteb/sts22-crosslingual-sts config: es-en split: test revision: de9d86b3b84231dc21f76c7b7af1f28e2f57f6e3 metrics: - type: pearson value: 78.6974 - type: spearman value: 79.5845 - type: cosine_pearson value: 78.6974 - type: cosine_spearman value: 79.5845 - type: manhattan_pearson value: 79.6724 - type: manhattan_spearman value: 79.668 - type: euclidean_pearson value: 79.69380000000001 - type: euclidean_spearman value: 79.5845 - type: main_score value: 79.5845 - task: type: STS dataset: name: MTEB STS22 (zh-en) type: mteb/sts22-crosslingual-sts config: zh-en split: test revision: de9d86b3b84231dc21f76c7b7af1f28e2f57f6e3 metrics: - type: pearson value: 71.3237 - type: spearman value: 71.5178 - type: cosine_pearson value: 71.3237 - type: cosine_spearman value: 71.5178 - type: manhattan_pearson value: 73.3948 - type: manhattan_spearman value: 71.5607 - type: euclidean_pearson value: 73.1403 - type: euclidean_spearman value: 71.5178 - type: main_score value: 71.5178 - task: type: STS dataset: name: MTEB STSBenchmark (default) type: mteb/stsbenchmark-sts config: default split: test revision: b0fddb56ed78048fa8b90373c8a3cfc37b684831 metrics: - type: pearson value: 75.5279 - type: spearman value: 76.9844 - type: cosine_pearson value: 75.5279 - type: cosine_spearman value: 76.9844 - type: manhattan_pearson value: 77.5474 - type: manhattan_spearman value: 77.4353 - type: euclidean_pearson value: 77.1612 - type: euclidean_spearman value: 76.9844 - type: main_score value: 76.9844 - task: type: Reranking dataset: name: MTEB SciDocsRR (default) type: mteb/scidocs-reranking config: default split: test revision: d3c5e1fc0b855ab6097bf1cda04dd73947d7caab metrics: - type: map value: 79.33109999999999 - type: mrr value: 94.0725 - type: nAUC_map_max value: 59.0089 - type: nAUC_map_std value: 69.9131 - type: nAUC_map_diff1 value: 5.900600000000001 - type: nAUC_mrr_max value: 84.5132 - type: nAUC_mrr_std value: 77.767 - type: nAUC_mrr_diff1 value: 46.5557 - type: main_score value: 79.33109999999999 - task: type: Retrieval dataset: name: MTEB SciFact (default) type: mteb/scifact config: default split: test revision: 0228b52cf27578f30900b9e5271d331663a030d7 metrics: - type: ndcg_at_1 value: 51.333 - type: ndcg_at_3 value: 57.781000000000006 - type: ndcg_at_5 value: 60.925 - type: ndcg_at_10 value: 63.254 - type: ndcg_at_20 value: 64.955 - type: ndcg_at_100 value: 66.155 - type: ndcg_at_1000 value: 67.193 - type: map_at_1 value: 48.428 - type: map_at_3 value: 55.145999999999994 - type: map_at_5 value: 57.055 - type: map_at_10 value: 58.17 - type: map_at_20 value: 58.723000000000006 - type: map_at_100 value: 58.901 - type: map_at_1000 value: 58.940000000000005 - type: recall_at_1 value: 48.428 - type: recall_at_3 value: 62.55 - type: recall_at_5 value: 70.367 - type: recall_at_10 value: 76.972 - type: recall_at_20 value: 83.317 - type: recall_at_100 value: 89.7 - type: recall_at_1000 value: 98.0 - type: precision_at_1 value: 51.333 - type: precision_at_3 value: 22.444 - type: precision_at_5 value: 15.4 - type: precision_at_10 value: 8.6 - type: precision_at_20 value: 4.717 - type: precision_at_100 value: 1.02 - type: precision_at_1000 value: 0.11100000000000002 - type: mrr_at_1 value: 51.3333 - type: mrr_at_3 value: 57.5556 - type: mrr_at_5 value: 59.255599999999994 - type: mrr_at_10 value: 60.104400000000005 - type: mrr_at_20 value: 60.4592 - type: mrr_at_100 value: 60.590999999999994 - type: mrr_at_1000 value: 60.622299999999996 - type: nauc_ndcg_at_1_max value: 55.0684 - type: nauc_ndcg_at_1_std value: 13.461200000000002 - type: nauc_ndcg_at_1_diff1 value: 67.4931 - type: nauc_ndcg_at_3_max value: 54.1942 - type: nauc_ndcg_at_3_std value: 11.029300000000001 - type: nauc_ndcg_at_3_diff1 value: 61.4423 - type: nauc_ndcg_at_5_max value: 53.712199999999996 - type: nauc_ndcg_at_5_std value: 11.0586 - type: nauc_ndcg_at_5_diff1 value: 59.3723 - type: nauc_ndcg_at_10_max value: 55.2513 - type: nauc_ndcg_at_10_std value: 13.413400000000001 - type: nauc_ndcg_at_10_diff1 value: 58.5176 - type: nauc_ndcg_at_20_max value: 56.721900000000005 - type: nauc_ndcg_at_20_std value: 14.9832 - type: nauc_ndcg_at_20_diff1 value: 59.1445 - type: nauc_ndcg_at_100_max value: 56.5049 - type: nauc_ndcg_at_100_std value: 15.021799999999999 - type: nauc_ndcg_at_100_diff1 value: 59.4117 - type: nauc_ndcg_at_1000_max value: 56.0829 - type: nauc_ndcg_at_1000_std value: 14.4429 - type: nauc_ndcg_at_1000_diff1 value: 60.45700000000001 - type: nauc_map_at_1_max value: 50.901799999999994 - type: nauc_map_at_1_std value: 6.0093 - type: nauc_map_at_1_diff1 value: 66.6214 - type: nauc_map_at_3_max value: 52.684200000000004 - type: nauc_map_at_3_std value: 7.9088 - type: nauc_map_at_3_diff1 value: 62.906600000000005 - type: nauc_map_at_5_max value: 52.6187 - type: nauc_map_at_5_std value: 8.2372 - type: nauc_map_at_5_diff1 value: 61.772000000000006 - type: nauc_map_at_10_max value: 53.317899999999995 - type: nauc_map_at_10_std value: 9.397 - type: nauc_map_at_10_diff1 value: 61.355599999999995 - type: nauc_map_at_20_max value: 54.04259999999999 - type: nauc_map_at_20_std value: 10.2201 - type: nauc_map_at_20_diff1 value: 61.684000000000005 - type: nauc_map_at_100_max value: 54.0394 - type: nauc_map_at_100_std value: 10.2894 - type: nauc_map_at_100_diff1 value: 61.7302 - type: nauc_map_at_1000_max value: 54.024300000000004 - type: nauc_map_at_1000_std value: 10.2881 - type: nauc_map_at_1000_diff1 value: 61.7661 - type: nauc_recall_at_1_max value: 50.901799999999994 - type: nauc_recall_at_1_std value: 6.0093 - type: nauc_recall_at_1_diff1 value: 66.6214 - type: nauc_recall_at_3_max value: 52.8806 - type: nauc_recall_at_3_std value: 10.7463 - type: nauc_recall_at_3_diff1 value: 55.5486 - type: nauc_recall_at_5_max value: 52.277300000000004 - type: nauc_recall_at_5_std value: 12.2395 - type: nauc_recall_at_5_diff1 value: 49.147800000000004 - type: nauc_recall_at_10_max value: 57.403499999999994 - type: nauc_recall_at_10_std value: 20.4581 - type: nauc_recall_at_10_diff1 value: 44.0595 - type: nauc_recall_at_20_max value: 65.5378 - type: nauc_recall_at_20_std value: 29.5288 - type: nauc_recall_at_20_diff1 value: 43.2217 - type: nauc_recall_at_100_max value: 67.4941 - type: nauc_recall_at_100_std value: 36.178399999999996 - type: nauc_recall_at_100_diff1 value: 39.3443 - type: nauc_recall_at_1000_max value: 72.50229999999999 - type: nauc_recall_at_1000_std value: 51.455 - type: nauc_recall_at_1000_diff1 value: 62.153800000000004 - type: nauc_precision_at_1_max value: 55.0684 - type: nauc_precision_at_1_std value: 13.461200000000002 - type: nauc_precision_at_1_diff1 value: 67.4931 - type: nauc_precision_at_3_max value: 54.947599999999994 - type: nauc_precision_at_3_std value: 23.1875 - type: nauc_precision_at_3_diff1 value: 51.166199999999996 - type: nauc_precision_at_5_max value: 50.1483 - type: nauc_precision_at_5_std value: 27.1119 - type: nauc_precision_at_5_diff1 value: 37.3846 - type: nauc_precision_at_10_max value: 46.800799999999995 - type: nauc_precision_at_10_std value: 37.737500000000004 - type: nauc_precision_at_10_diff1 value: 22.945999999999998 - type: nauc_precision_at_20_max value: 43.980000000000004 - type: nauc_precision_at_20_std value: 46.3352 - type: nauc_precision_at_20_diff1 value: 14.718300000000001 - type: nauc_precision_at_100_max value: 34.8346 - type: nauc_precision_at_100_std value: 49.0032 - type: nauc_precision_at_100_diff1 value: 4.7538 - type: nauc_precision_at_1000_max value: 19.9994 - type: nauc_precision_at_1000_std value: 51.132999999999996 - type: nauc_precision_at_1000_diff1 value: -6.5839 - type: nauc_mrr_at_1_max value: 55.0684 - type: nauc_mrr_at_1_std value: 13.461200000000002 - type: nauc_mrr_at_1_diff1 value: 67.4931 - type: nauc_mrr_at_3_max value: 56.2153 - type: nauc_mrr_at_3_std value: 15.4146 - type: nauc_mrr_at_3_diff1 value: 63.273199999999996 - type: nauc_mrr_at_5_max value: 56.0011 - type: nauc_mrr_at_5_std value: 15.7535 - type: nauc_mrr_at_5_diff1 value: 62.1466 - type: nauc_mrr_at_10_max value: 56.643100000000004 - type: nauc_mrr_at_10_std value: 16.354 - type: nauc_mrr_at_10_diff1 value: 62.0124 - type: nauc_mrr_at_20_max value: 56.686800000000005 - type: nauc_mrr_at_20_std value: 16.1984 - type: nauc_mrr_at_20_diff1 value: 62.095 - type: nauc_mrr_at_100_max value: 56.6659 - type: nauc_mrr_at_100_std value: 16.1601 - type: nauc_mrr_at_100_diff1 value: 62.157399999999996 - type: nauc_mrr_at_1000_max value: 56.657599999999995 - type: nauc_mrr_at_1000_std value: 16.1579 - type: nauc_mrr_at_1000_diff1 value: 62.195 - type: main_score value: 63.254 - task: type: PairClassification dataset: name: MTEB SprintDuplicateQuestions (default) type: mteb/sprintduplicatequestions-pairclassification config: default split: test revision: d66bd1f72af766a5cc4b0ca5e00c162f89e8cc46 metrics: - type: similarity_accuracy value: 99.7465 - type: similarity_accuracy_threshold value: 84.08489999999999 - type: similarity_f1 value: 86.9388 - type: similarity_f1_threshold value: 84.08489999999999 - type: similarity_precision value: 88.75 - type: similarity_recall value: 85.2 - type: similarity_ap value: 93.56139999999999 - type: cosine_accuracy value: 99.7465 - type: cosine_accuracy_threshold value: 84.08489999999999 - type: cosine_f1 value: 86.9388 - type: cosine_f1_threshold value: 84.08489999999999 - type: cosine_precision value: 88.75 - type: cosine_recall value: 85.2 - type: cosine_ap value: 93.56139999999999 - type: manhattan_accuracy value: 99.7614 - type: manhattan_accuracy_threshold value: 853.1299 - type: manhattan_f1 value: 87.7053 - type: manhattan_f1_threshold value: 888.5799999999999 - type: manhattan_precision value: 87.3142 - type: manhattan_recall value: 88.1 - type: manhattan_ap value: 94.0777 - type: euclidean_accuracy value: 99.7465 - type: euclidean_accuracy_threshold value: 56.4183 - type: euclidean_f1 value: 86.9388 - type: euclidean_f1_threshold value: 56.4183 - type: euclidean_precision value: 88.75 - type: euclidean_recall value: 85.2 - type: euclidean_ap value: 93.5613 - type: dot_accuracy value: 99.7465 - type: dot_accuracy_threshold value: 84.08489999999999 - type: dot_f1 value: 86.9388 - type: dot_f1_threshold value: 84.08489999999999 - type: dot_precision value: 88.75 - type: dot_recall value: 85.2 - type: dot_ap value: 93.56139999999999 - type: max_accuracy value: 99.7614 - type: max_f1 value: 87.7053 - type: max_precision value: 88.75 - type: max_recall value: 88.1 - type: max_ap value: 94.0777 - type: main_score value: 94.0777 - task: type: Clustering dataset: name: MTEB StackExchangeClustering (default) type: mteb/stackexchange-clustering config: default split: test revision: 6cbc1f7b2bc0622f2e39d2c77fa502909748c259 metrics: - type: v_measure value: 54.13980000000001 - type: v_measure_std value: 5.5665 - type: main_score value: 54.13980000000001 - task: type: Clustering dataset: name: MTEB StackExchangeClusteringP2P (default) type: mteb/stackexchange-clustering-p2p config: default split: test revision: 815ca46b2622cec33ccafc3735d572c266efdb44 metrics: - type: v_measure value: 32.6113 - type: v_measure_std value: 1.6389999999999998 - type: main_score value: 32.6113 - task: type: Reranking dataset: name: MTEB StackOverflowDupQuestions (default) type: mteb/stackoverflowdupquestions-reranking config: default split: test revision: e185fbe320c72810689fc5848eb6114e1ef5ec69 metrics: - type: map value: 50.813900000000004 - type: mrr value: 51.702099999999994 - type: nAUC_map_max value: 14.127600000000001 - type: nAUC_map_std value: 8.6735 - type: nAUC_map_diff1 value: 36.4317 - type: nAUC_mrr_max value: 15.504399999999999 - type: nAUC_mrr_std value: 9.7053 - type: nAUC_mrr_diff1 value: 36.7021 - type: main_score value: 50.813900000000004 - task: type: Retrieval dataset: name: MTEB StackOverflowQA (default) type: CoIR-Retrieval/stackoverflow-qa config: default split: test revision: db8f169f3894c14a00251061f957b2063eef2bd5 metrics: - type: ndcg_at_1 value: 54.26299999999999 - type: ndcg_at_3 value: 62.395 - type: ndcg_at_5 value: 64.603 - type: ndcg_at_10 value: 66.57600000000001 - type: ndcg_at_20 value: 68.089 - type: ndcg_at_100 value: 69.587 - type: ndcg_at_1000 value: 70.216 - type: map_at_1 value: 54.26299999999999 - type: map_at_3 value: 60.373 - type: map_at_5 value: 61.609 - type: map_at_10 value: 62.419999999999995 - type: map_at_20 value: 62.83800000000001 - type: map_at_100 value: 63.04 - type: map_at_1000 value: 63.063 - type: recall_at_1 value: 54.26299999999999 - type: recall_at_3 value: 68.255 - type: recall_at_5 value: 73.571 - type: recall_at_10 value: 79.689 - type: recall_at_20 value: 85.65700000000001 - type: recall_at_100 value: 93.781 - type: recall_at_1000 value: 98.79599999999999 - type: precision_at_1 value: 54.26299999999999 - type: precision_at_3 value: 22.752 - type: precision_at_5 value: 14.713999999999999 - type: precision_at_10 value: 7.968999999999999 - type: precision_at_20 value: 4.283 - type: precision_at_100 value: 0.938 - type: precision_at_1000 value: 0.099 - type: mrr_at_1 value: 54.2628 - type: mrr_at_3 value: 60.372800000000005 - type: mrr_at_5 value: 61.609 - type: mrr_at_10 value: 62.4202 - type: mrr_at_20 value: 62.83800000000001 - type: mrr_at_100 value: 63.0402 - type: mrr_at_1000 value: 63.06270000000001 - type: nauc_ndcg_at_1_max value: 61.3558 - type: nauc_ndcg_at_1_std value: -7.5783000000000005 - type: nauc_ndcg_at_1_diff1 value: 72.637 - type: nauc_ndcg_at_3_max value: 59.621900000000004 - type: nauc_ndcg_at_3_std value: -7.8752 - type: nauc_ndcg_at_3_diff1 value: 67.341 - type: nauc_ndcg_at_5_max value: 59.32150000000001 - type: nauc_ndcg_at_5_std value: -6.783500000000001 - type: nauc_ndcg_at_5_diff1 value: 66.3908 - type: nauc_ndcg_at_10_max value: 58.8665 - type: nauc_ndcg_at_10_std value: -6.8839999999999995 - type: nauc_ndcg_at_10_diff1 value: 65.5914 - type: nauc_ndcg_at_20_max value: 59.071 - type: nauc_ndcg_at_20_std value: -6.7216 - type: nauc_ndcg_at_20_diff1 value: 66.0076 - type: nauc_ndcg_at_100_max value: 59.2928 - type: nauc_ndcg_at_100_std value: -6.0869 - type: nauc_ndcg_at_100_diff1 value: 66.5509 - type: nauc_ndcg_at_1000_max value: 59.551 - type: nauc_ndcg_at_1000_std value: -6.3229 - type: nauc_ndcg_at_1000_diff1 value: 67.0501 - type: nauc_map_at_1_max value: 61.3558 - type: nauc_map_at_1_std value: -7.5783000000000005 - type: nauc_map_at_1_diff1 value: 72.637 - type: nauc_map_at_3_max value: 60.0638 - type: nauc_map_at_3_std value: -7.824599999999999 - type: nauc_map_at_3_diff1 value: 68.7255 - type: nauc_map_at_5_max value: 59.9035 - type: nauc_map_at_5_std value: -7.236199999999999 - type: nauc_map_at_5_diff1 value: 68.2474 - type: nauc_map_at_10_max value: 59.73159999999999 - type: nauc_map_at_10_std value: -7.3129 - type: nauc_map_at_10_diff1 value: 67.9742 - type: nauc_map_at_20_max value: 59.799800000000005 - type: nauc_map_at_20_std value: -7.2599 - type: nauc_map_at_20_diff1 value: 68.1128 - type: nauc_map_at_100_max value: 59.8324 - type: nauc_map_at_100_std value: -7.1589 - type: nauc_map_at_100_diff1 value: 68.1784 - type: nauc_map_at_1000_max value: 59.845099999999995 - type: nauc_map_at_1000_std value: -7.1592 - type: nauc_map_at_1000_diff1 value: 68.19770000000001 - type: nauc_recall_at_1_max value: 61.3558 - type: nauc_recall_at_1_std value: -7.5783000000000005 - type: nauc_recall_at_1_diff1 value: 72.637 - type: nauc_recall_at_3_max value: 58.1732 - type: nauc_recall_at_3_std value: -8.028599999999999 - type: nauc_recall_at_3_diff1 value: 62.7847 - type: nauc_recall_at_5_max value: 57.1488 - type: nauc_recall_at_5_std value: -4.9189 - type: nauc_recall_at_5_diff1 value: 59.392599999999995 - type: nauc_recall_at_10_max value: 54.7384 - type: nauc_recall_at_10_std value: -4.683 - type: nauc_recall_at_10_diff1 value: 54.317499999999995 - type: nauc_recall_at_20_max value: 54.5659 - type: nauc_recall_at_20_std value: -2.9657 - type: nauc_recall_at_20_diff1 value: 53.039899999999996 - type: nauc_recall_at_100_max value: 53.5805 - type: nauc_recall_at_100_std value: 12.822 - type: nauc_recall_at_100_diff1 value: 49.3168 - type: nauc_recall_at_1000_max value: 64.52839999999999 - type: nauc_recall_at_1000_std value: 44.954699999999995 - type: nauc_recall_at_1000_diff1 value: 51.3607 - type: nauc_precision_at_1_max value: 61.3558 - type: nauc_precision_at_1_std value: -7.5783000000000005 - type: nauc_precision_at_1_diff1 value: 72.637 - type: nauc_precision_at_3_max value: 58.1732 - type: nauc_precision_at_3_std value: -8.028599999999999 - type: nauc_precision_at_3_diff1 value: 62.7847 - type: nauc_precision_at_5_max value: 57.1488 - type: nauc_precision_at_5_std value: -4.9189 - type: nauc_precision_at_5_diff1 value: 59.392599999999995 - type: nauc_precision_at_10_max value: 54.7384 - type: nauc_precision_at_10_std value: -4.683 - type: nauc_precision_at_10_diff1 value: 54.317499999999995 - type: nauc_precision_at_20_max value: 54.5659 - type: nauc_precision_at_20_std value: -2.9657 - type: nauc_precision_at_20_diff1 value: 53.039899999999996 - type: nauc_precision_at_100_max value: 53.5805 - type: nauc_precision_at_100_std value: 12.822 - type: nauc_precision_at_100_diff1 value: 49.3168 - type: nauc_precision_at_1000_max value: 64.52839999999999 - type: nauc_precision_at_1000_std value: 44.954699999999995 - type: nauc_precision_at_1000_diff1 value: 51.3607 - type: nauc_mrr_at_1_max value: 61.3558 - type: nauc_mrr_at_1_std value: -7.5783000000000005 - type: nauc_mrr_at_1_diff1 value: 72.637 - type: nauc_mrr_at_3_max value: 60.0638 - type: nauc_mrr_at_3_std value: -7.824599999999999 - type: nauc_mrr_at_3_diff1 value: 68.7255 - type: nauc_mrr_at_5_max value: 59.9035 - type: nauc_mrr_at_5_std value: -7.236199999999999 - type: nauc_mrr_at_5_diff1 value: 68.2474 - type: nauc_mrr_at_10_max value: 59.73159999999999 - type: nauc_mrr_at_10_std value: -7.3129 - type: nauc_mrr_at_10_diff1 value: 67.9742 - type: nauc_mrr_at_20_max value: 59.799800000000005 - type: nauc_mrr_at_20_std value: -7.2599 - type: nauc_mrr_at_20_diff1 value: 68.1128 - type: nauc_mrr_at_100_max value: 59.8324 - type: nauc_mrr_at_100_std value: -7.1589 - type: nauc_mrr_at_100_diff1 value: 68.1784 - type: nauc_mrr_at_1000_max value: 59.845099999999995 - type: nauc_mrr_at_1000_std value: -7.1592 - type: nauc_mrr_at_1000_diff1 value: 68.19770000000001 - type: main_score value: 66.57600000000001 - task: type: Summarization dataset: name: MTEB SummEval (default) type: mteb/summeval config: default split: test revision: cda12ad7615edc362dbf25a00fdd61d3b1eaf93c metrics: - type: pearson value: 31.255699999999997 - type: spearman value: 31.121 - type: cosine_spearman value: 31.121 - type: cosine_pearson value: 31.255699999999997 - type: dot_spearman value: 31.121 - type: dot_pearson value: 31.255699999999997 - type: main_score value: 31.121 - task: type: Retrieval dataset: name: MTEB SyntheticText2SQL (default) type: CoIR-Retrieval/synthetic-text2sql config: default split: test revision: 686b87296c3a0191b5d9415a00526c62db9fce09 metrics: - type: ndcg_at_1 value: 2.752 - type: ndcg_at_3 value: 32.669 - type: ndcg_at_5 value: 36.313 - type: ndcg_at_10 value: 39.341 - type: ndcg_at_20 value: 41.22 - type: ndcg_at_100 value: 43.682 - type: ndcg_at_1000 value: 44.679 - type: map_at_1 value: 2.752 - type: map_at_3 value: 25.918999999999997 - type: map_at_5 value: 27.939000000000004 - type: map_at_10 value: 29.195999999999998 - type: map_at_20 value: 29.711 - type: map_at_100 value: 30.057000000000002 - type: map_at_1000 value: 30.092999999999996 - type: recall_at_1 value: 2.752 - type: recall_at_3 value: 51.957 - type: recall_at_5 value: 60.809999999999995 - type: recall_at_10 value: 70.14200000000001 - type: recall_at_20 value: 77.576 - type: recall_at_100 value: 90.771 - type: recall_at_1000 value: 98.667 - type: precision_at_1 value: 2.752 - type: precision_at_3 value: 17.319000000000003 - type: precision_at_5 value: 12.162 - type: precision_at_10 value: 7.013999999999999 - type: precision_at_20 value: 3.879 - type: precision_at_100 value: 0.9079999999999999 - type: precision_at_1000 value: 0.099 - type: mrr_at_1 value: 23.534399999999998 - type: mrr_at_3 value: 37.8739 - type: mrr_at_5 value: 39.6078 - type: mrr_at_10 value: 40.7592 - type: mrr_at_20 value: 41.2449 - type: mrr_at_100 value: 41.5832 - type: mrr_at_1000 value: 41.6198 - type: nauc_ndcg_at_1_max value: 13.625200000000001 - type: nauc_ndcg_at_1_std value: -17.2342 - type: nauc_ndcg_at_1_diff1 value: 72.20830000000001 - type: nauc_ndcg_at_3_max value: 33.5059 - type: nauc_ndcg_at_3_std value: -15.198400000000001 - type: nauc_ndcg_at_3_diff1 value: -55.0763 - type: nauc_ndcg_at_5_max value: 31.461699999999997 - type: nauc_ndcg_at_5_std value: -15.857899999999999 - type: nauc_ndcg_at_5_diff1 value: -51.2902 - type: nauc_ndcg_at_10_max value: 30.206699999999998 - type: nauc_ndcg_at_10_std value: -15.9071 - type: nauc_ndcg_at_10_diff1 value: -48.7532 - type: nauc_ndcg_at_20_max value: 29.5645 - type: nauc_ndcg_at_20_std value: -15.509400000000001 - type: nauc_ndcg_at_20_diff1 value: -47.8463 - type: nauc_ndcg_at_100_max value: 29.8902 - type: nauc_ndcg_at_100_std value: -14.0898 - type: nauc_ndcg_at_100_diff1 value: -46.7294 - type: nauc_ndcg_at_1000_max value: 30.285800000000002 - type: nauc_ndcg_at_1000_std value: -14.7898 - type: nauc_ndcg_at_1000_diff1 value: -47.0235 - type: nauc_map_at_1_max value: 13.625200000000001 - type: nauc_map_at_1_std value: -17.2342 - type: nauc_map_at_1_diff1 value: 72.20830000000001 - type: nauc_map_at_3_max value: 32.7681 - type: nauc_map_at_3_std value: -15.386700000000001 - type: nauc_map_at_3_diff1 value: -49.9214 - type: nauc_map_at_5_max value: 31.436799999999998 - type: nauc_map_at_5_std value: -15.8028 - type: nauc_map_at_5_diff1 value: -47.2353 - type: nauc_map_at_10_max value: 30.857200000000002 - type: nauc_map_at_10_std value: -15.878200000000001 - type: nauc_map_at_10_diff1 value: -45.9157 - type: nauc_map_at_20_max value: 30.660300000000003 - type: nauc_map_at_20_std value: -15.7674 - type: nauc_map_at_20_diff1 value: -45.5729 - type: nauc_map_at_100_max value: 30.7164 - type: nauc_map_at_100_std value: -15.579200000000002 - type: nauc_map_at_100_diff1 value: -45.3606 - type: nauc_map_at_1000_max value: 30.728 - type: nauc_map_at_1000_std value: -15.598600000000001 - type: nauc_map_at_1000_diff1 value: -45.367200000000004 - type: nauc_recall_at_1_max value: 13.625200000000001 - type: nauc_recall_at_1_std value: -17.2342 - type: nauc_recall_at_1_diff1 value: 72.20830000000001 - type: nauc_recall_at_3_max value: 34.6344 - type: nauc_recall_at_3_std value: -14.868200000000002 - type: nauc_recall_at_3_diff1 value: -63.1221 - type: nauc_recall_at_5_max value: 31.1334 - type: nauc_recall_at_5_std value: -16.0306 - type: nauc_recall_at_5_diff1 value: -57.4562 - type: nauc_recall_at_10_max value: 27.9709 - type: nauc_recall_at_10_std value: -15.9834 - type: nauc_recall_at_10_diff1 value: -52.4094 - type: nauc_recall_at_20_max value: 25.136599999999998 - type: nauc_recall_at_20_std value: -14.491000000000001 - type: nauc_recall_at_20_diff1 value: -50.1152 - type: nauc_recall_at_100_max value: 23.1454 - type: nauc_recall_at_100_std value: 1.0654000000000001 - type: nauc_recall_at_100_diff1 value: -42.3044 - type: nauc_recall_at_1000_max value: 23.3796 - type: nauc_recall_at_1000_std value: 18.206 - type: nauc_recall_at_1000_diff1 value: -44.292300000000004 - type: nauc_precision_at_1_max value: 13.625200000000001 - type: nauc_precision_at_1_std value: -17.2342 - type: nauc_precision_at_1_diff1 value: 72.20830000000001 - type: nauc_precision_at_3_max value: 34.6344 - type: nauc_precision_at_3_std value: -14.868200000000002 - type: nauc_precision_at_3_diff1 value: -63.1221 - type: nauc_precision_at_5_max value: 31.1334 - type: nauc_precision_at_5_std value: -16.0306 - type: nauc_precision_at_5_diff1 value: -57.4562 - type: nauc_precision_at_10_max value: 27.9709 - type: nauc_precision_at_10_std value: -15.9834 - type: nauc_precision_at_10_diff1 value: -52.4094 - type: nauc_precision_at_20_max value: 25.136599999999998 - type: nauc_precision_at_20_std value: -14.491000000000001 - type: nauc_precision_at_20_diff1 value: -50.1152 - type: nauc_precision_at_100_max value: 23.1454 - type: nauc_precision_at_100_std value: 1.0654000000000001 - type: nauc_precision_at_100_diff1 value: -42.3044 - type: nauc_precision_at_1000_max value: 23.3796 - type: nauc_precision_at_1000_std value: 18.206 - type: nauc_precision_at_1000_diff1 value: -44.292300000000004 - type: nauc_mrr_at_1_max value: 21.4193 - type: nauc_mrr_at_1_std value: -10.3504 - type: nauc_mrr_at_1_diff1 value: -39.323 - type: nauc_mrr_at_3_max value: 28.0993 - type: nauc_mrr_at_3_std value: -12.9194 - type: nauc_mrr_at_3_diff1 value: -52.07580000000001 - type: nauc_mrr_at_5_max value: 27.378999999999998 - type: nauc_mrr_at_5_std value: -13.184299999999999 - type: nauc_mrr_at_5_diff1 value: -51.0092 - type: nauc_mrr_at_10_max value: 26.9761 - type: nauc_mrr_at_10_std value: -13.0161 - type: nauc_mrr_at_10_diff1 value: -50.266200000000005 - type: nauc_mrr_at_20_max value: 26.8175 - type: nauc_mrr_at_20_std value: -12.9521 - type: nauc_mrr_at_20_diff1 value: -50.137699999999995 - type: nauc_mrr_at_100_max value: 26.8202 - type: nauc_mrr_at_100_std value: -12.809000000000001 - type: nauc_mrr_at_100_diff1 value: -50.0703 - type: nauc_mrr_at_1000_max value: 26.8223 - type: nauc_mrr_at_1000_std value: -12.8169 - type: nauc_mrr_at_1000_diff1 value: -50.0798 - type: main_score value: 39.341 - task: type: Retrieval dataset: name: MTEB TRECCOVID (default) type: mteb/trec-covid config: default split: test revision: bb9466bac8153a0349341eb1b22e06409e78ef4e metrics: - type: ndcg_at_1 value: 59.0 - type: ndcg_at_3 value: 61.173 - type: ndcg_at_5 value: 61.927 - type: ndcg_at_10 value: 62.815 - type: ndcg_at_20 value: 60.716 - type: ndcg_at_100 value: 50.699000000000005 - type: ndcg_at_1000 value: 46.711999999999996 - type: map_at_1 value: 0.186 - type: map_at_3 value: 0.47200000000000003 - type: map_at_5 value: 0.749 - type: map_at_10 value: 1.43 - type: map_at_20 value: 2.608 - type: map_at_100 value: 8.876000000000001 - type: map_at_1000 value: 22.055 - type: recall_at_1 value: 0.186 - type: recall_at_3 value: 0.519 - type: recall_at_5 value: 0.8699999999999999 - type: recall_at_10 value: 1.773 - type: recall_at_20 value: 3.338 - type: recall_at_100 value: 12.516 - type: recall_at_1000 value: 44.699 - type: precision_at_1 value: 66.0 - type: precision_at_3 value: 67.333 - type: precision_at_5 value: 67.60000000000001 - type: precision_at_10 value: 68.4 - type: precision_at_20 value: 65.7 - type: precision_at_100 value: 52.78 - type: precision_at_1000 value: 21.048000000000002 - type: mrr_at_1 value: 66.0 - type: mrr_at_3 value: 77.33330000000001 - type: mrr_at_5 value: 78.3333 - type: mrr_at_10 value: 79.3333 - type: mrr_at_20 value: 79.3333 - type: mrr_at_100 value: 79.3333 - type: mrr_at_1000 value: 79.3333 - type: nauc_ndcg_at_1_max value: 17.5939 - type: nauc_ndcg_at_1_std value: 18.9798 - type: nauc_ndcg_at_1_diff1 value: 7.1539 - type: nauc_ndcg_at_3_max value: 29.7636 - type: nauc_ndcg_at_3_std value: 31.7841 - type: nauc_ndcg_at_3_diff1 value: 7.1419 - type: nauc_ndcg_at_5_max value: 29.316 - type: nauc_ndcg_at_5_std value: 46.3408 - type: nauc_ndcg_at_5_diff1 value: -0.4602 - type: nauc_ndcg_at_10_max value: 27.446900000000003 - type: nauc_ndcg_at_10_std value: 53.37 - type: nauc_ndcg_at_10_diff1 value: -4.2545 - type: nauc_ndcg_at_20_max value: 30.0264 - type: nauc_ndcg_at_20_std value: 58.7602 - type: nauc_ndcg_at_20_diff1 value: -9.146899999999999 - type: nauc_ndcg_at_100_max value: 37.939299999999996 - type: nauc_ndcg_at_100_std value: 75.0271 - type: nauc_ndcg_at_100_diff1 value: -16.2298 - type: nauc_ndcg_at_1000_max value: 40.1712 - type: nauc_ndcg_at_1000_std value: 80.865 - type: nauc_ndcg_at_1000_diff1 value: -20.5847 - type: nauc_map_at_1_max value: 16.9528 - type: nauc_map_at_1_std value: -0.49119999999999997 - type: nauc_map_at_1_diff1 value: 14.029 - type: nauc_map_at_3_max value: 22.714000000000002 - type: nauc_map_at_3_std value: 4.587 - type: nauc_map_at_3_diff1 value: 18.4359 - type: nauc_map_at_5_max value: 26.631700000000002 - type: nauc_map_at_5_std value: 16.3506 - type: nauc_map_at_5_diff1 value: 15.8387 - type: nauc_map_at_10_max value: 26.4635 - type: nauc_map_at_10_std value: 22.819300000000002 - type: nauc_map_at_10_diff1 value: 9.7916 - type: nauc_map_at_20_max value: 29.7699 - type: nauc_map_at_20_std value: 34.153099999999995 - type: nauc_map_at_20_diff1 value: 1.4186 - type: nauc_map_at_100_max value: 41.5138 - type: nauc_map_at_100_std value: 68.24799999999999 - type: nauc_map_at_100_diff1 value: -12.2417 - type: nauc_map_at_1000_max value: 45.9887 - type: nauc_map_at_1000_std value: 82.8023 - type: nauc_map_at_1000_diff1 value: -20.608999999999998 - type: nauc_recall_at_1_max value: 16.9528 - type: nauc_recall_at_1_std value: -0.49119999999999997 - type: nauc_recall_at_1_diff1 value: 14.029 - type: nauc_recall_at_3_max value: 22.601 - type: nauc_recall_at_3_std value: 5.037 - type: nauc_recall_at_3_diff1 value: 20.4189 - type: nauc_recall_at_5_max value: 23.8002 - type: nauc_recall_at_5_std value: 17.2469 - type: nauc_recall_at_5_diff1 value: 15.3806 - type: nauc_recall_at_10_max value: 20.0149 - type: nauc_recall_at_10_std value: 17.2152 - type: nauc_recall_at_10_diff1 value: 8.289 - type: nauc_recall_at_20_max value: 23.2578 - type: nauc_recall_at_20_std value: 25.9678 - type: nauc_recall_at_20_diff1 value: 1.6708 - type: nauc_recall_at_100_max value: 34.7341 - type: nauc_recall_at_100_std value: 59.1777 - type: nauc_recall_at_100_diff1 value: -10.6132 - type: nauc_recall_at_1000_max value: 36.492599999999996 - type: nauc_recall_at_1000_std value: 74.2008 - type: nauc_recall_at_1000_diff1 value: -21.9119 - type: nauc_precision_at_1_max value: 25.7227 - type: nauc_precision_at_1_std value: 14.152500000000002 - type: nauc_precision_at_1_diff1 value: 11.1952 - type: nauc_precision_at_3_max value: 35.1261 - type: nauc_precision_at_3_std value: 31.342399999999998 - type: nauc_precision_at_3_diff1 value: 3.0915999999999997 - type: nauc_precision_at_5_max value: 33.8418 - type: nauc_precision_at_5_std value: 52.1046 - type: nauc_precision_at_5_diff1 value: -5.7694 - type: nauc_precision_at_10_max value: 29.5701 - type: nauc_precision_at_10_std value: 56.474999999999994 - type: nauc_precision_at_10_diff1 value: -11.305800000000001 - type: nauc_precision_at_20_max value: 37.1605 - type: nauc_precision_at_20_std value: 62.65690000000001 - type: nauc_precision_at_20_diff1 value: -16.114600000000003 - type: nauc_precision_at_100_max value: 42.5736 - type: nauc_precision_at_100_std value: 77.8946 - type: nauc_precision_at_100_diff1 value: -18.5221 - type: nauc_precision_at_1000_max value: 31.0108 - type: nauc_precision_at_1000_std value: 54.306200000000004 - type: nauc_precision_at_1000_diff1 value: -20.7365 - type: nauc_mrr_at_1_max value: 25.7227 - type: nauc_mrr_at_1_std value: 14.152500000000002 - type: nauc_mrr_at_1_diff1 value: 11.1952 - type: nauc_mrr_at_3_max value: 37.1749 - type: nauc_mrr_at_3_std value: 32.7833 - type: nauc_mrr_at_3_diff1 value: 5.9276 - type: nauc_mrr_at_5_max value: 34.5503 - type: nauc_mrr_at_5_std value: 31.1188 - type: nauc_mrr_at_5_diff1 value: 2.9541 - type: nauc_mrr_at_10_max value: 32.3008 - type: nauc_mrr_at_10_std value: 27.4621 - type: nauc_mrr_at_10_diff1 value: 5.944599999999999 - type: nauc_mrr_at_20_max value: 32.3008 - type: nauc_mrr_at_20_std value: 27.4621 - type: nauc_mrr_at_20_diff1 value: 5.944599999999999 - type: nauc_mrr_at_100_max value: 32.3008 - type: nauc_mrr_at_100_std value: 27.4621 - type: nauc_mrr_at_100_diff1 value: 5.944599999999999 - type: nauc_mrr_at_1000_max value: 32.3008 - type: nauc_mrr_at_1000_std value: 27.4621 - type: nauc_mrr_at_1000_diff1 value: 5.944599999999999 - type: main_score value: 62.815 - task: type: Retrieval dataset: name: MTEB Touche2020 (default) type: mteb/touche2020 config: default split: test revision: a34f9a33db75fa0cbb21bb5cfc3dae8dc8bec93f metrics: - type: ndcg_at_1 value: 34.694 - type: ndcg_at_3 value: 27.976 - type: ndcg_at_5 value: 27.029999999999998 - type: ndcg_at_10 value: 24.853 - type: ndcg_at_20 value: 26.188 - type: ndcg_at_100 value: 36.225 - type: ndcg_at_1000 value: 47.583999999999996 - type: map_at_1 value: 2.987 - type: map_at_3 value: 4.9799999999999995 - type: map_at_5 value: 7.170999999999999 - type: map_at_10 value: 9.788 - type: map_at_20 value: 12.379 - type: map_at_100 value: 15.692 - type: map_at_1000 value: 17.27 - type: recall_at_1 value: 2.987 - type: recall_at_3 value: 6.084 - type: recall_at_5 value: 9.609 - type: recall_at_10 value: 15.512 - type: recall_at_20 value: 24.248 - type: recall_at_100 value: 46.916999999999994 - type: recall_at_1000 value: 80.447 - type: precision_at_1 value: 36.735 - type: precision_at_3 value: 27.891 - type: precision_at_5 value: 26.531 - type: precision_at_10 value: 22.041 - type: precision_at_20 value: 17.347 - type: precision_at_100 value: 7.550999999999999 - type: precision_at_1000 value: 1.492 - type: mrr_at_1 value: 36.7347 - type: mrr_at_3 value: 46.258500000000005 - type: mrr_at_5 value: 47.585 - type: mrr_at_10 value: 49.4266 - type: mrr_at_20 value: 50.4374 - type: mrr_at_100 value: 50.6221 - type: mrr_at_1000 value: 50.6221 - type: nauc_ndcg_at_1_max value: -30.5017 - type: nauc_ndcg_at_1_std value: 20.9115 - type: nauc_ndcg_at_1_diff1 value: 14.0996 - type: nauc_ndcg_at_3_max value: -32.4852 - type: nauc_ndcg_at_3_std value: 7.378500000000001 - type: nauc_ndcg_at_3_diff1 value: 6.1796 - type: nauc_ndcg_at_5_max value: -31.3343 - type: nauc_ndcg_at_5_std value: -1.8091 - type: nauc_ndcg_at_5_diff1 value: 2.7997 - type: nauc_ndcg_at_10_max value: -28.2383 - type: nauc_ndcg_at_10_std value: -3.1220999999999997 - type: nauc_ndcg_at_10_diff1 value: 10.0107 - type: nauc_ndcg_at_20_max value: -33.4679 - type: nauc_ndcg_at_20_std value: -8.3618 - type: nauc_ndcg_at_20_diff1 value: 7.3284 - type: nauc_ndcg_at_100_max value: -33.0007 - type: nauc_ndcg_at_100_std value: 18.1058 - type: nauc_ndcg_at_100_diff1 value: 7.5906 - type: nauc_ndcg_at_1000_max value: -30.4942 - type: nauc_ndcg_at_1000_std value: 29.7125 - type: nauc_ndcg_at_1000_diff1 value: 4.3626 - type: nauc_map_at_1_max value: -27.8899 - type: nauc_map_at_1_std value: -2.694 - type: nauc_map_at_1_diff1 value: 15.2888 - type: nauc_map_at_3_max value: -28.008499999999998 - type: nauc_map_at_3_std value: -8.2292 - type: nauc_map_at_3_diff1 value: 11.0099 - type: nauc_map_at_5_max value: -25.1626 - type: nauc_map_at_5_std value: -14.2187 - type: nauc_map_at_5_diff1 value: 4.6605 - type: nauc_map_at_10_max value: -21.1923 - type: nauc_map_at_10_std value: -16.653299999999998 - type: nauc_map_at_10_diff1 value: 6.869599999999999 - type: nauc_map_at_20_max value: -24.2959 - type: nauc_map_at_20_std value: -17.707 - type: nauc_map_at_20_diff1 value: 6.6531 - type: nauc_map_at_100_max value: -24.9706 - type: nauc_map_at_100_std value: -6.2074 - type: nauc_map_at_100_diff1 value: 7.940300000000001 - type: nauc_map_at_1000_max value: -24.5016 - type: nauc_map_at_1000_std value: -1.7534 - type: nauc_map_at_1000_diff1 value: 7.0978 - type: nauc_recall_at_1_max value: -27.8899 - type: nauc_recall_at_1_std value: -2.694 - type: nauc_recall_at_1_diff1 value: 15.2888 - type: nauc_recall_at_3_max value: -33.166000000000004 - type: nauc_recall_at_3_std value: -13.9572 - type: nauc_recall_at_3_diff1 value: 6.8492999999999995 - type: nauc_recall_at_5_max value: -26.5866 - type: nauc_recall_at_5_std value: -18.4333 - type: nauc_recall_at_5_diff1 value: 0.9511999999999999 - type: nauc_recall_at_10_max value: -23.4865 - type: nauc_recall_at_10_std value: -17.3336 - type: nauc_recall_at_10_diff1 value: 9.8763 - type: nauc_recall_at_20_max value: -34.451 - type: nauc_recall_at_20_std value: -18.5261 - type: nauc_recall_at_20_diff1 value: 8.4592 - type: nauc_recall_at_100_max value: -31.3903 - type: nauc_recall_at_100_std value: 30.2519 - type: nauc_recall_at_100_diff1 value: 9.4903 - type: nauc_recall_at_1000_max value: -20.7349 - type: nauc_recall_at_1000_std value: 72.50229999999999 - type: nauc_recall_at_1000_diff1 value: -0.7664 - type: nauc_precision_at_1_max value: -27.048 - type: nauc_precision_at_1_std value: 18.2883 - type: nauc_precision_at_1_diff1 value: 18.5083 - type: nauc_precision_at_3_max value: -31.4006 - type: nauc_precision_at_3_std value: -1.9464 - type: nauc_precision_at_3_diff1 value: 5.7819 - type: nauc_precision_at_5_max value: -25.740800000000004 - type: nauc_precision_at_5_std value: -11.5328 - type: nauc_precision_at_5_diff1 value: 0.4881 - type: nauc_precision_at_10_max value: -20.8035 - type: nauc_precision_at_10_std value: -9.3623 - type: nauc_precision_at_10_diff1 value: 13.7272 - type: nauc_precision_at_20_max value: -27.124399999999998 - type: nauc_precision_at_20_std value: -4.7749 - type: nauc_precision_at_20_diff1 value: 6.5773 - type: nauc_precision_at_100_max value: -7.2334 - type: nauc_precision_at_100_std value: 60.89639999999999 - type: nauc_precision_at_100_diff1 value: 3.9092000000000002 - type: nauc_precision_at_1000_max value: 33.7911 - type: nauc_precision_at_1000_std value: 44.2182 - type: nauc_precision_at_1000_diff1 value: -11.840399999999999 - type: nauc_mrr_at_1_max value: -27.048 - type: nauc_mrr_at_1_std value: 18.2883 - type: nauc_mrr_at_1_diff1 value: 18.5083 - type: nauc_mrr_at_3_max value: -35.0702 - type: nauc_mrr_at_3_std value: 11.0891 - type: nauc_mrr_at_3_diff1 value: 11.4635 - type: nauc_mrr_at_5_max value: -35.9339 - type: nauc_mrr_at_5_std value: 11.4561 - type: nauc_mrr_at_5_diff1 value: 11.792900000000001 - type: nauc_mrr_at_10_max value: -35.5993 - type: nauc_mrr_at_10_std value: 13.369800000000001 - type: nauc_mrr_at_10_diff1 value: 14.168 - type: nauc_mrr_at_20_max value: -35.587 - type: nauc_mrr_at_20_std value: 12.8052 - type: nauc_mrr_at_20_diff1 value: 13.6937 - type: nauc_mrr_at_100_max value: -35.424 - type: nauc_mrr_at_100_std value: 13.0847 - type: nauc_mrr_at_100_diff1 value: 13.5063 - type: nauc_mrr_at_1000_max value: -35.424 - type: nauc_mrr_at_1000_std value: 13.0847 - type: nauc_mrr_at_1000_diff1 value: 13.5063 - type: main_score value: 24.853 - task: type: Classification dataset: name: MTEB ToxicConversationsClassification (default) type: mteb/toxic_conversations_50k config: default split: test revision: edfaf9da55d3dd50d43143d90c1ac476895ae6de metrics: - type: accuracy value: 60.380900000000004 - type: f1 value: 46.8295 - type: f1_weighted value: 69.05930000000001 - type: ap value: 10.5988 - type: ap_weighted value: 10.5988 - type: main_score value: 60.380900000000004 - task: type: Classification dataset: name: MTEB TweetSentimentExtractionClassification (default) type: mteb/tweet_sentiment_extraction config: default split: test revision: d604517c81ca91fe16a244d1248fc021f9ecee7a metrics: - type: accuracy value: 58.537099999999995 - type: f1 value: 58.7006 - type: f1_weighted value: 58.013400000000004 - type: main_score value: 58.537099999999995 - task: type: Clustering dataset: name: MTEB TwentyNewsgroupsClustering (default) type: mteb/twentynewsgroups-clustering config: default split: test revision: 6125ec4e24fa026cec8a478383ee943acfbd5449 metrics: - type: v_measure value: 36.6842 - type: v_measure_std value: 1.9854 - type: main_score value: 36.6842 - task: type: PairClassification dataset: name: MTEB TwitterSemEval2015 (default) type: mteb/twittersemeval2015-pairclassification config: default split: test revision: 70970daeab8776df92f5ea462b6173c0b46fd2d1 metrics: - type: similarity_accuracy value: 82.3866 - type: similarity_accuracy_threshold value: 87.0467 - type: similarity_f1 value: 58.4102 - type: similarity_f1_threshold value: 82.61540000000001 - type: similarity_precision value: 52.937400000000004 - type: similarity_recall value: 65.1451 - type: similarity_ap value: 61.6413 - type: cosine_accuracy value: 82.3866 - type: cosine_accuracy_threshold value: 87.0467 - type: cosine_f1 value: 58.4102 - type: cosine_f1_threshold value: 82.61540000000001 - type: cosine_precision value: 52.937400000000004 - type: cosine_recall value: 65.1451 - type: cosine_ap value: 61.6413 - type: manhattan_accuracy value: 82.12429999999999 - type: manhattan_accuracy_threshold value: 786.2048 - type: manhattan_f1 value: 57.862899999999996 - type: manhattan_f1_threshold value: 911.9348 - type: manhattan_precision value: 50.2725 - type: manhattan_recall value: 68.15299999999999 - type: manhattan_ap value: 60.6893 - type: euclidean_accuracy value: 82.3866 - type: euclidean_accuracy_threshold value: 50.8985 - type: euclidean_f1 value: 58.4102 - type: euclidean_f1_threshold value: 58.9654 - type: euclidean_precision value: 52.937400000000004 - type: euclidean_recall value: 65.1451 - type: euclidean_ap value: 61.6413 - type: dot_accuracy value: 82.3866 - type: dot_accuracy_threshold value: 87.0467 - type: dot_f1 value: 58.4102 - type: dot_f1_threshold value: 82.61540000000001 - type: dot_precision value: 52.937400000000004 - type: dot_recall value: 65.1451 - type: dot_ap value: 61.6413 - type: max_accuracy value: 82.3866 - type: max_f1 value: 58.4102 - type: max_precision value: 52.937400000000004 - type: max_recall value: 68.15299999999999 - type: max_ap value: 61.6413 - type: main_score value: 61.6413 - task: type: PairClassification dataset: name: MTEB TwitterURLCorpus (default) type: mteb/twitterurlcorpus-pairclassification config: default split: test revision: 8b6510b0b1fa4e4c4f879467980e9be563ec1cdf metrics: - type: similarity_accuracy value: 88.77629999999999 - type: similarity_accuracy_threshold value: 82.2251 - type: similarity_f1 value: 77.3613 - type: similarity_f1_threshold value: 80.3174 - type: similarity_precision value: 75.0906 - type: similarity_recall value: 79.7736 - type: similarity_ap value: 85.6694 - type: cosine_accuracy value: 88.77629999999999 - type: cosine_accuracy_threshold value: 82.2251 - type: cosine_f1 value: 77.3613 - type: cosine_f1_threshold value: 80.3174 - type: cosine_precision value: 75.0906 - type: cosine_recall value: 79.7736 - type: cosine_ap value: 85.6694 - type: manhattan_accuracy value: 88.7317 - type: manhattan_accuracy_threshold value: 914.4955 - type: manhattan_f1 value: 77.1707 - type: manhattan_f1_threshold value: 946.5603 - type: manhattan_precision value: 76.2825 - type: manhattan_recall value: 78.0798 - type: manhattan_ap value: 85.5718 - type: euclidean_accuracy value: 88.77629999999999 - type: euclidean_accuracy_threshold value: 59.6237 - type: euclidean_f1 value: 77.3613 - type: euclidean_f1_threshold value: 62.7417 - type: euclidean_precision value: 75.0906 - type: euclidean_recall value: 79.7736 - type: euclidean_ap value: 85.6694 - type: dot_accuracy value: 88.77629999999999 - type: dot_accuracy_threshold value: 82.2251 - type: dot_f1 value: 77.3613 - type: dot_f1_threshold value: 80.3174 - type: dot_precision value: 75.0906 - type: dot_recall value: 79.7736 - type: dot_ap value: 85.6694 - type: max_accuracy value: 88.77629999999999 - type: max_f1 value: 77.3613 - type: max_precision value: 76.2825 - type: max_recall value: 79.7736 - type: max_ap value: 85.6694 - type: main_score value: 85.6694 --- # Granite-Embedding-107m-multilingual **Model Summary:** Granite-Embedding-107M-Multilingual is a 107M parameter dense biencoder embedding model from the Granite Embeddings suite that can be used to generate high quality text embeddings. This model produces embedding vectors of size 384 and is trained using a combination of open source relevance-pair datasets with permissive, enterprise-friendly license, and IBM collected and generated datasets. This model is developed using contrastive finetuning, knowledge distillation and model merging for improved performance. - **Developers:** Granite Embedding Team, IBM - **GitHub Repository:** [ibm-granite/granite-embedding-models](https://github.com/ibm-granite/granite-embedding-models) - **Website**: [Granite Docs](https://www.ibm.com/granite/docs/) - **Paper:** Coming Soon - **Release Date**: December 18th, 2024 - **License:** [Apache 2.0](https://www.apache.org/licenses/LICENSE-2.0) **Supported Languages:** English, German, Spanish, French, Japanese, Portuguese, Arabic, Czech, Italian, Korean, Dutch, and Chinese. Users may finetune Granite-Embedding-107M-Multilingual for languages beyond these 12 languages. **Intended use:** The model is designed to produce fixed length vector representations for a given text, which can be used for text similarity, retrieval, and search applications. **Usage with Sentence Transformers:** The model is compatible with SentenceTransformer library and is very easy to use: First, install the sentence transformers library ```shell pip install sentence_transformers ``` The model can then be used to encode pairs of text and find the similarity between their representations ```python from sentence_transformers import SentenceTransformer, util model_path = "ibm-granite/granite-embedding-107m-multilingual" # Load the Sentence Transformer model model = SentenceTransformer(model_path) input_queries = [ ' Who made the song My achy breaky heart? ', 'summit define' ] input_passages = [ "Achy Breaky Heart is a country song written by Don Von Tress. Originally titled Don't Tell My Heart and performed by The Marcy Brothers in 1991. ", "Definition of summit for English Language Learners. : 1 the highest point of a mountain : the top of a mountain. : 2 the highest level. : 3 a meeting or series of meetings between the leaders of two or more governments." ] # encode queries and passages query_embeddings = model.encode(input_queries) passage_embeddings = model.encode(input_passages) # calculate cosine similarity print(util.cos_sim(query_embeddings, passage_embeddings)) ``` **Usage with Huggingface Transformers:** This is a simple example of how to use the Granite-Embedding-107m-Multilingual model with the Transformers library and PyTorch. First, install the required libraries ```shell pip install transformers torch ``` The model can then be used to encode pairs of text ```python import torch from transformers import AutoModel, AutoTokenizer model_path = "ibm-granite/granite-embedding-107m-multilingual" # Load the model and tokenizer model = AutoModel.from_pretrained(model_path) tokenizer = AutoTokenizer.from_pretrained(model_path) model.eval() input_queries = [ ' Who made the song My achy breaky heart? ', 'summit define' ] # tokenize inputs tokenized_queries = tokenizer(input_queries, padding=True, truncation=True, return_tensors='pt') # encode queries with torch.no_grad(): # Queries model_output = model(**tokenized_queries) # Perform pooling. granite-embedding-107m-multilingual uses CLS Pooling query_embeddings = model_output[0][:, 0] # normalize the embeddings query_embeddings = torch.nn.functional.normalize(query_embeddings, dim=1) ``` **Evaluation:** The average performance of the Granite-Embedding-107M-Multilingual on Multilingual Miracl (across 18 langauges), Mintaka Retrieval (across 8 languages) and MTEB Retrieval for English (across 15 tasks), German (across 4 tasks), Spanish (across 2 tasks), Frenc (across 5 tasks), Japanese (across 2 tasks), Arabic (1 task), Korean (1 task) and Chinese (across 8 tasks) is reported below. Granite-Embedding-107M-Multilingual is twice as fast as other models with similar embedding dimensions. | Model | Paramters (M)| Embedding Dimension | Miracl (18) | Mintaka Retrieval (8) | MTEB English (15) | MTEB German (4) |MTEB Spanish (2) | MTEB French (5) | MTEB Japanese (2) | MTEB Arabic (1) | MTEB Korean (1) | MTEB Chinese (8) | |------------------------------------|:------------:|:-------------------:|:-------------:| :---------------------:|:-----------------:|:---------------:|:---------------:|:---------------:|:----------------:|:----------------:|----------------:|-----------------:| |granite-embedding-107m-multilingual | 107 | 384 | 55.9 | 22.6 | 45.3 | 70.3 | 48.7 | 51.1 | 59.0 | 63.2 | 70.5 | 40.8 | **Model Architecture:** Granite-Embedding-107m-Multilingual is based on an encoder-only XLM-RoBERTa like transformer architecture, trained internally at IBM Research. | Model | granite-embedding-30m-english | granite-embedding-125m-english | granite-embedding-107m-multilingual | granite-embedding-278m-multilingual | | :--------- | :-------:| :--------: | :---------:| :-----:| | Embedding size | 384 | 768 | **384** | 768 | | Number of layers | 6 | 12 | **6** | 12 | | Number of attention heads | 12 | 12 | **12** | 12 | | Intermediate size | 1536 | 3072 | **1536** | 3072 | | Activation Function | GeLU | GeLU | **GeLU** | GeLU | | Vocabulary Size | 50265 | 50265 | **250002** | 250002 | | Max. Sequence Length | 512 | 512 | **512** | 512 | | # Parameters | 30M | 125M | **107M** | 278M | **Training Data:** Overall, the training data consists of four key sources: (1) unsupervised title-body paired data scraped from the web, (2) publicly available paired with permissive, enterprise-friendly license, (3) IBM-internal paired data targetting specific technical domains, and (4) IBM-generated synthetic data. The data is listed below: | **Dataset** | **Num. Pairs** | |:--------------------------------------------------------------------------|:--------------:| | Multilingual MC4 | 52,823,484 | | Multilingual Webhose | 12,369,322 | | English Wikipedia | 20,745,403 | | Multilingual Wikimedia | 2,911,090 | | Miracl Corpus (Title-Body) | 10,120,398 | | Stack Exchange Duplicate questions (titles) | 304,525 | | Stack Exchange Duplicate questions (titles) | 304,525 | | Stack Exchange Duplicate questions (bodies) | 250,519 | | Machine Translations of Stack Exchange Duplicate questions (titles) | 187,195 | | Stack Exchange (Title, Answer) pairs | 4,067,139 | | Stack Exchange (Title, Body) pairs | 23,978,013 | | Stack Exchange (Title, Body) pairs | 23,978,013 | | Machine Translations of Stack Exchange (Title+Body, Answer) pairs | 1,827,15 | | SearchQA | 582,261 | | S2ORC (Title, Abstract) | 41,769,185 | | WikiAnswers Duplicate question pairs | 77,427,422 | | CCNews | 614,664 | | XSum | 226,711 | | SimpleWiki | 102,225 | | Machine Translated Cross Lingual Parallel Corpora | 28,376,115 | | SPECTER citation triplets | 684,100 | | Machine Translations of SPECTER citation triplets | 4,104,600 | | Natural Questions (NQ) | 100,231 | | SQuAD2.0 | 87,599 | | HotpotQA | 85,000 | | Fever | 109,810 | | PubMed | 20,000,000 | | Multilingual Miracl Triples | 81,409 | | Multilingual MrTydi Triples | 48,715 | | Sadeeem Question Asnwering | 4,037 | | DBPedia Title-Body Pairs | 4,635,922 | | Synthetic: English Query-Wikipedia Passage | 1,879,093 | | Synthetic: English Fact Verification | 9,888 | | Synthetic: Multilingual Query-Wikipedia Passage | 300,266 | | Synthetic: Multilingual News Summaries | 37,489 | | IBM Internal Triples | 40,290 | | IBM Internal Title-Body Pairs | 1,524,586 | Notably, we do not use the popular MS-MARCO retrieval dataset in our training corpus due to its non-commercial license, while other open-source models train on this dataset due to its high quality. **Infrastructure:** We train Granite Embedding Models using IBM's computing cluster, Cognitive Compute Cluster, which is outfitted with NVIDIA A100 80gb GPUs. This cluster provides a scalable and efficient infrastructure for training our models over multiple GPUs. **Ethical Considerations and Limitations:** The data used to train the base language model was filtered to remove text containing hate, abuse, and profanity. Granite-Embedding-107m-Multilingual is finetuned on 12 languages, and has a context length of 512 tokens (longer texts will be truncated to this size). **Resources** - ⭐️ Learn about the latest updates with Granite: https://www.ibm.com/granite - 📄 Get started with tutorials, best practices, and prompt engineering advice: https://www.ibm.com/granite/docs/ - 💡 Learn about the latest Granite learning resources: https://ibm.biz/granite-learning-resources <!-- ## Citation ``` @misc{granite-embedding-models, author = {author 1, author2, ...}, title = {}, journal = {}, volume = {}, year = {2024}, url = {https://arxiv.org/abs/0000.00000}, } ``` -->
[ "TRANSLATION", "SUMMARIZATION" ]
[ "BIOSSES", "SCIFACT" ]
Haon-Chen/speed-embedding-7b-instruct
Haon-Chen
feature-extraction
[ "transformers", "safetensors", "mistral", "feature-extraction", "mteb", "en", "arxiv:2410.18634", "arxiv:2104.08663", "arxiv:2210.07316", "license:mit", "model-index", "text-generation-inference", "text-embeddings-inference", "endpoints_compatible", "region:us" ]
2024-10-31T11:23:55
2024-11-03T13:38:43
169
5
--- language: - en license: mit tags: - mteb - transformers model-index: - name: speed-embedding-7b-instruct results: - task: type: Classification dataset: name: MTEB AmazonCounterfactualClassification (en) type: mteb/amazon_counterfactual config: en split: test revision: e8379541af4e31359cca9fbcf4b00f2671dba205 metrics: - type: accuracy value: 76.67164179104478 - type: ap value: 39.07181577576136 - type: f1 value: 70.25085237742982 - task: type: Classification dataset: name: MTEB AmazonPolarityClassification type: mteb/amazon_polarity config: default split: test revision: e2d317d38cd51312af73b3d32a06d1a08b442046 metrics: - type: accuracy value: 96.1775 - type: ap value: 94.84308844303422 - type: f1 value: 96.17546959843244 - task: type: Classification dataset: name: MTEB AmazonReviewsClassification (en) type: mteb/amazon_reviews_multi config: en split: test revision: 1399c76144fd37290681b995c656ef9b2e06e26d metrics: - type: accuracy value: 56.278000000000006 - type: f1 value: 55.45101875980304 - task: type: Retrieval dataset: name: MTEB ArguAna type: arguana config: default split: test revision: None metrics: - type: ndcg_at_1 value: 33.642 - type: ndcg_at_3 value: 49.399 - type: ndcg_at_5 value: 54.108999999999995 - type: ndcg_at_10 value: 59.294999999999995 - type: ndcg_at_100 value: 62.015 - type: map_at_1 value: 33.642 - type: map_at_3 value: 45.507 - type: map_at_5 value: 48.1 - type: map_at_10 value: 50.248000000000005 - type: map_at_100 value: 50.954 - type: recall_at_1 value: 33.642 - type: recall_at_3 value: 60.669 - type: recall_at_5 value: 72.191 - type: recall_at_10 value: 88.193 - type: recall_at_100 value: 99.431 - type: precision_at_1 value: 33.642 - type: precision_at_3 value: 20.223 - type: precision_at_5 value: 14.438 - type: precision_at_10 value: 8.819 - type: precision_at_100 value: 0.9939999999999999 - type: mrr_at_1 value: 33.997 - type: mrr_at_3 value: 45.614 - type: mrr_at_5 value: 48.263 - type: mrr_at_10 value: 50.388999999999996 - type: mrr_at_100 value: 51.102000000000004 - task: type: Clustering dataset: name: MTEB ArxivClusteringP2P type: mteb/arxiv-clustering-p2p config: default split: test revision: a122ad7f3f0291bf49cc6f4d32aa80929df69d5d metrics: - type: v_measure value: 51.1249344529392 - task: type: Clustering dataset: name: MTEB ArxivClusteringS2S type: mteb/arxiv-clustering-s2s config: default split: test revision: f910caf1a6075f7329cdf8c1a6135696f37dbd53 metrics: - type: v_measure value: 47.01575217563573 - task: type: Reranking dataset: name: MTEB AskUbuntuDupQuestions type: mteb/askubuntudupquestions-reranking config: default split: test revision: 2000358ca161889fa9c082cb41daa8dcfb161a54 metrics: - type: map value: 67.2259454062751 - type: mrr value: 79.37508244294948 - task: type: STS dataset: name: MTEB BIOSSES type: mteb/biosses-sts config: default split: test revision: d3fb88f8f02e40887cd149695127462bbcf29b4a metrics: - type: cos_sim_pearson value: 89.5312396547344 - type: cos_sim_spearman value: 87.1447567367366 - type: euclidean_pearson value: 88.67110804544821 - type: euclidean_spearman value: 87.1447567367366 - type: manhattan_pearson value: 89.06983994154335 - type: manhattan_spearman value: 87.59115245033443 - task: type: Classification dataset: name: MTEB Banking77Classification type: mteb/banking77 config: default split: test revision: 0fd18e25b25c072e09e0d92ab615fda904d66300 metrics: - type: accuracy value: 88.63636363636364 - type: f1 value: 88.58740097633193 - task: type: Clustering dataset: name: MTEB BiorxivClusteringP2P type: mteb/biorxiv-clustering-p2p config: default split: test revision: 65b79d1d13f80053f67aca9498d9402c2d9f1f40 metrics: - type: v_measure value: 41.99753263006505 - task: type: Clustering dataset: name: MTEB BiorxivClusteringS2S type: mteb/biorxiv-clustering-s2s config: default split: test revision: 258694dd0231531bc1fd9de6ceb52a0853c6d908 metrics: - type: v_measure value: 39.623067884052666 - task: type: Retrieval dataset: name: MTEB CQADupstackRetrieval type: BeIR/cqadupstack config: default split: test revision: None metrics: - type: ndcg_at_1 value: 30.904666666666664 - type: ndcg_at_3 value: 36.32808333333333 - type: ndcg_at_5 value: 38.767250000000004 - type: ndcg_at_10 value: 41.62008333333333 - type: ndcg_at_100 value: 47.118083333333324 - type: map_at_1 value: 25.7645 - type: map_at_3 value: 32.6235 - type: map_at_5 value: 34.347 - type: map_at_10 value: 35.79658333333333 - type: map_at_100 value: 37.10391666666666 - type: recall_at_1 value: 25.7645 - type: recall_at_3 value: 39.622666666666674 - type: recall_at_5 value: 45.938750000000006 - type: recall_at_10 value: 54.43816666666667 - type: recall_at_100 value: 78.66183333333333 - type: precision_at_1 value: 30.904666666666664 - type: precision_at_3 value: 17.099083333333333 - type: precision_at_5 value: 12.278416666666669 - type: precision_at_10 value: 7.573083333333335 - type: precision_at_100 value: 1.22275 - type: mrr_at_1 value: 30.904666666666664 - type: mrr_at_3 value: 37.458333333333336 - type: mrr_at_5 value: 38.97333333333333 - type: mrr_at_10 value: 40.10316666666666 - type: mrr_at_100 value: 41.004250000000006 - task: type: Retrieval dataset: name: MTEB ClimateFEVER type: climate-fever config: default split: test revision: None metrics: - type: ndcg_at_1 value: 38.046 - type: ndcg_at_3 value: 31.842 - type: ndcg_at_5 value: 33.698 - type: ndcg_at_10 value: 37.765 - type: ndcg_at_100 value: 44.998 - type: map_at_1 value: 16.682 - type: map_at_3 value: 23.624000000000002 - type: map_at_5 value: 25.812 - type: map_at_10 value: 28.017999999999997 - type: map_at_100 value: 30.064999999999998 - type: recall_at_1 value: 16.682 - type: recall_at_3 value: 28.338 - type: recall_at_5 value: 34.486 - type: recall_at_10 value: 43.474000000000004 - type: recall_at_100 value: 67.984 - type: precision_at_1 value: 38.046 - type: precision_at_3 value: 23.779 - type: precision_at_5 value: 17.849999999999998 - type: precision_at_10 value: 11.642 - type: precision_at_100 value: 1.9429999999999998 - type: mrr_at_1 value: 38.046 - type: mrr_at_3 value: 46.764 - type: mrr_at_5 value: 48.722 - type: mrr_at_10 value: 49.976 - type: mrr_at_100 value: 50.693999999999996 - task: type: Retrieval dataset: name: MTEB DBPedia type: dbpedia-entity config: default split: test revision: None metrics: - type: ndcg_at_1 value: 63.24999999999999 - type: ndcg_at_3 value: 54.005 - type: ndcg_at_5 value: 51.504000000000005 - type: ndcg_at_10 value: 49.738 - type: ndcg_at_100 value: 54.754000000000005 - type: map_at_1 value: 10.639 - type: map_at_3 value: 16.726 - type: map_at_5 value: 20.101 - type: map_at_10 value: 24.569 - type: map_at_100 value: 35.221999999999994 - type: recall_at_1 value: 10.639 - type: recall_at_3 value: 17.861 - type: recall_at_5 value: 22.642 - type: recall_at_10 value: 30.105999999999998 - type: recall_at_100 value: 60.92999999999999 - type: precision_at_1 value: 75.0 - type: precision_at_3 value: 58.083 - type: precision_at_5 value: 50.0 - type: precision_at_10 value: 40.35 - type: precision_at_100 value: 12.659999999999998 - type: mrr_at_1 value: 75.0 - type: mrr_at_3 value: 80.042 - type: mrr_at_5 value: 80.779 - type: mrr_at_10 value: 81.355 - type: mrr_at_100 value: 81.58 - task: type: Classification dataset: name: MTEB EmotionClassification type: mteb/emotion config: default split: test revision: 4f58c6b202a23cf9a4da393831edf4f9183cad37 metrics: - type: accuracy value: 51.025 - type: f1 value: 47.08253474922065 - task: type: Retrieval dataset: name: MTEB FEVER type: fever config: default split: test revision: None metrics: - type: ndcg_at_1 value: 82.163 - type: ndcg_at_3 value: 86.835 - type: ndcg_at_5 value: 87.802 - type: ndcg_at_10 value: 88.529 - type: ndcg_at_100 value: 89.17 - type: map_at_1 value: 76.335 - type: map_at_3 value: 83.91499999999999 - type: map_at_5 value: 84.64500000000001 - type: map_at_10 value: 85.058 - type: map_at_100 value: 85.257 - type: recall_at_1 value: 76.335 - type: recall_at_3 value: 90.608 - type: recall_at_5 value: 93.098 - type: recall_at_10 value: 95.173 - type: recall_at_100 value: 97.59299999999999 - type: precision_at_1 value: 82.163 - type: precision_at_3 value: 33.257999999999996 - type: precision_at_5 value: 20.654 - type: precision_at_10 value: 10.674999999999999 - type: precision_at_100 value: 1.122 - type: mrr_at_1 value: 82.163 - type: mrr_at_3 value: 88.346 - type: mrr_at_5 value: 88.791 - type: mrr_at_10 value: 88.97699999999999 - type: mrr_at_100 value: 89.031 - task: type: Retrieval dataset: name: MTEB FiQA2018 type: fiqa config: default split: test revision: None metrics: - type: ndcg_at_1 value: 55.093 - type: ndcg_at_3 value: 52.481 - type: ndcg_at_5 value: 53.545 - type: ndcg_at_10 value: 56.053 - type: ndcg_at_100 value: 62.53999999999999 - type: map_at_1 value: 29.189999999999998 - type: map_at_3 value: 42.603 - type: map_at_5 value: 45.855000000000004 - type: map_at_10 value: 48.241 - type: map_at_100 value: 50.300999999999995 - type: recall_at_1 value: 29.189999999999998 - type: recall_at_3 value: 47.471999999999994 - type: recall_at_5 value: 54.384 - type: recall_at_10 value: 62.731 - type: recall_at_100 value: 86.02300000000001 - type: precision_at_1 value: 55.093 - type: precision_at_3 value: 34.979 - type: precision_at_5 value: 25.278 - type: precision_at_10 value: 15.231 - type: precision_at_100 value: 2.2190000000000003 - type: mrr_at_1 value: 55.093 - type: mrr_at_3 value: 61.317 - type: mrr_at_5 value: 62.358999999999995 - type: mrr_at_10 value: 63.165000000000006 - type: mrr_at_100 value: 63.81 - task: type: Retrieval dataset: name: MTEB HotpotQA type: hotpotqa config: default split: test revision: None metrics: - type: ndcg_at_1 value: 78.866 - type: ndcg_at_3 value: 70.128 - type: ndcg_at_5 value: 73.017 - type: ndcg_at_10 value: 75.166 - type: ndcg_at_100 value: 77.97500000000001 - type: map_at_1 value: 39.433 - type: map_at_3 value: 64.165 - type: map_at_5 value: 66.503 - type: map_at_10 value: 67.822 - type: map_at_100 value: 68.675 - type: recall_at_1 value: 39.433 - type: recall_at_3 value: 69.03399999999999 - type: recall_at_5 value: 74.74 - type: recall_at_10 value: 80.108 - type: recall_at_100 value: 90.81700000000001 - type: precision_at_1 value: 78.866 - type: precision_at_3 value: 46.022999999999996 - type: precision_at_5 value: 29.896 - type: precision_at_10 value: 16.022 - type: precision_at_100 value: 1.8159999999999998 - type: mrr_at_1 value: 78.866 - type: mrr_at_3 value: 83.91 - type: mrr_at_5 value: 84.473 - type: mrr_at_10 value: 84.769 - type: mrr_at_100 value: 84.953 - task: type: Classification dataset: name: MTEB ImdbClassification type: mteb/imdb config: default split: test revision: 3d86128a09e091d6018b6d26cad27f2739fc2db7 metrics: - type: accuracy value: 94.87799999999999 - type: ap value: 92.5831019543702 - type: f1 value: 94.87675087619891 - task: type: Retrieval dataset: name: MTEB MSMARCO type: msmarco config: default split: test revision: None metrics: - type: ndcg_at_1 value: 23.195 - type: ndcg_at_3 value: 34.419 - type: ndcg_at_5 value: 38.665 - type: ndcg_at_10 value: 42.549 - type: ndcg_at_100 value: 48.256 - type: map_at_1 value: 22.508 - type: map_at_3 value: 31.346 - type: map_at_5 value: 33.73 - type: map_at_10 value: 35.365 - type: map_at_100 value: 36.568 - type: recall_at_1 value: 22.508 - type: recall_at_3 value: 42.63 - type: recall_at_5 value: 52.827999999999996 - type: recall_at_10 value: 64.645 - type: recall_at_100 value: 90.852 - type: precision_at_1 value: 23.195 - type: precision_at_3 value: 14.752 - type: precision_at_5 value: 11.0 - type: precision_at_10 value: 6.755 - type: precision_at_100 value: 0.96 - type: mrr_at_1 value: 23.195 - type: mrr_at_3 value: 32.042 - type: mrr_at_5 value: 34.388000000000005 - type: mrr_at_10 value: 35.974000000000004 - type: mrr_at_100 value: 37.114000000000004 - task: type: Classification dataset: name: MTEB MTOPDomainClassification (en) type: mteb/mtop_domain config: en split: test revision: d80d48c1eb48d3562165c59d59d0034df9fff0bf metrics: - type: accuracy value: 95.84587323301413 - type: f1 value: 95.69948889844318 - task: type: Classification dataset: name: MTEB MTOPIntentClassification (en) type: mteb/mtop_intent config: en split: test revision: ae001d0e6b1228650b7bd1c2c65fb50ad11a8aba metrics: - type: accuracy value: 87.08162334701322 - type: f1 value: 72.237783326283 - task: type: Classification dataset: name: MTEB MassiveIntentClassification (en) type: mteb/amazon_massive_intent config: en split: test revision: 31efe3c427b0bae9c22cbb560b8f15491cc6bed7 metrics: - type: accuracy value: 80.19502353732346 - type: f1 value: 77.732184986995 - task: type: Classification dataset: name: MTEB MassiveScenarioClassification (en) type: mteb/amazon_massive_scenario config: en split: test revision: 7d571f92784cd94a019292a1f45445077d0ef634 metrics: - type: accuracy value: 82.26630800268998 - type: f1 value: 82.12747916248556 - task: type: Clustering dataset: name: MTEB MedrxivClusteringP2P type: mteb/medrxiv-clustering-p2p config: default split: test revision: e7a26af6f3ae46b30dde8737f02c07b1505bcc73 metrics: - type: v_measure value: 36.95240450167033 - task: type: Clustering dataset: name: MTEB MedrxivClusteringS2S type: mteb/medrxiv-clustering-s2s config: default split: test revision: 35191c8c0dca72d8ff3efcd72aa802307d469663 metrics: - type: v_measure value: 36.27758530931266 - task: type: Reranking dataset: name: MTEB MindSmallReranking type: mteb/mind_small config: default split: test revision: 3bdac13927fdc888b903db93b2ffdbd90b295a69 metrics: - type: map value: 33.35707665482982 - type: mrr value: 34.60987842278547 - task: type: Retrieval dataset: name: MTEB NFCorpus type: nfcorpus config: default split: test revision: None metrics: - type: ndcg_at_1 value: 47.522999999999996 - type: ndcg_at_3 value: 44.489000000000004 - type: ndcg_at_5 value: 41.92 - type: ndcg_at_10 value: 38.738 - type: ndcg_at_100 value: 35.46 - type: map_at_1 value: 5.357 - type: map_at_3 value: 10.537 - type: map_at_5 value: 12.062000000000001 - type: map_at_10 value: 14.264 - type: map_at_100 value: 18.442 - type: recall_at_1 value: 5.357 - type: recall_at_3 value: 12.499 - type: recall_at_5 value: 14.809 - type: recall_at_10 value: 18.765 - type: recall_at_100 value: 36.779 - type: precision_at_1 value: 49.226 - type: precision_at_3 value: 41.899 - type: precision_at_5 value: 36.718 - type: precision_at_10 value: 29.287999999999997 - type: precision_at_100 value: 9.22 - type: mrr_at_1 value: 49.845 - type: mrr_at_3 value: 57.121 - type: mrr_at_5 value: 58.172999999999995 - type: mrr_at_10 value: 58.906000000000006 - type: mrr_at_100 value: 59.486000000000004 - task: type: Retrieval dataset: name: MTEB NQ type: nq config: default split: test revision: None metrics: - type: ndcg_at_1 value: 42.815999999999995 - type: ndcg_at_3 value: 53.766999999999996 - type: ndcg_at_5 value: 57.957 - type: ndcg_at_10 value: 61.661 - type: ndcg_at_100 value: 65.218 - type: map_at_1 value: 38.364 - type: map_at_3 value: 49.782 - type: map_at_5 value: 52.319 - type: map_at_10 value: 54.07300000000001 - type: map_at_100 value: 54.983000000000004 - type: recall_at_1 value: 38.364 - type: recall_at_3 value: 61.744 - type: recall_at_5 value: 71.32300000000001 - type: recall_at_10 value: 82.015 - type: recall_at_100 value: 96.978 - type: precision_at_1 value: 42.815999999999995 - type: precision_at_3 value: 23.976 - type: precision_at_5 value: 16.866 - type: precision_at_10 value: 9.806 - type: precision_at_100 value: 1.1769999999999998 - type: mrr_at_1 value: 42.845 - type: mrr_at_3 value: 53.307 - type: mrr_at_5 value: 55.434000000000005 - type: mrr_at_10 value: 56.702 - type: mrr_at_100 value: 57.342000000000006 - task: type: Retrieval dataset: name: MTEB QuoraRetrieval type: quora config: default split: test revision: None metrics: - type: ndcg_at_1 value: 82.46 - type: ndcg_at_3 value: 86.774 - type: ndcg_at_5 value: 88.256 - type: ndcg_at_10 value: 89.35 - type: ndcg_at_100 value: 90.46499999999999 - type: map_at_1 value: 71.562 - type: map_at_3 value: 82.948 - type: map_at_5 value: 84.786 - type: map_at_10 value: 85.82300000000001 - type: map_at_100 value: 86.453 - type: recall_at_1 value: 71.562 - type: recall_at_3 value: 88.51 - type: recall_at_5 value: 92.795 - type: recall_at_10 value: 95.998 - type: recall_at_100 value: 99.701 - type: precision_at_1 value: 82.46 - type: precision_at_3 value: 38.1 - type: precision_at_5 value: 24.990000000000002 - type: precision_at_10 value: 13.553999999999998 - type: precision_at_100 value: 1.539 - type: mrr_at_1 value: 82.43 - type: mrr_at_3 value: 87.653 - type: mrr_at_5 value: 88.26899999999999 - type: mrr_at_10 value: 88.505 - type: mrr_at_100 value: 88.601 - task: type: Clustering dataset: name: MTEB RedditClustering type: mteb/reddit-clustering config: default split: test revision: 24640382cdbf8abc73003fb0fa6d111a705499eb metrics: - type: v_measure value: 57.928338007609256 - task: type: Clustering dataset: name: MTEB RedditClusteringP2P type: mteb/reddit-clustering-p2p config: default split: test revision: 282350215ef01743dc01b456c7f5241fa8937f16 metrics: - type: v_measure value: 65.28915417473826 - task: type: Retrieval dataset: name: MTEB SCIDOCS type: scidocs config: default split: test revision: None metrics: - type: ndcg_at_1 value: 17.2 - type: ndcg_at_3 value: 15.856 - type: ndcg_at_5 value: 13.983 - type: ndcg_at_10 value: 16.628999999999998 - type: ndcg_at_100 value: 23.845 - type: map_at_1 value: 3.4750000000000005 - type: map_at_3 value: 6.905 - type: map_at_5 value: 8.254 - type: map_at_10 value: 9.474 - type: map_at_100 value: 11.242 - type: recall_at_1 value: 3.4750000000000005 - type: recall_at_3 value: 9.298 - type: recall_at_5 value: 12.817 - type: recall_at_10 value: 17.675 - type: recall_at_100 value: 38.678000000000004 - type: precision_at_1 value: 17.2 - type: precision_at_3 value: 15.299999999999999 - type: precision_at_5 value: 12.64 - type: precision_at_10 value: 8.72 - type: precision_at_100 value: 1.907 - type: mrr_at_1 value: 17.2 - type: mrr_at_3 value: 25.55 - type: mrr_at_5 value: 27.485 - type: mrr_at_10 value: 28.809 - type: mrr_at_100 value: 29.964000000000002 - task: type: STS dataset: name: MTEB SICK-R type: mteb/sickr-sts config: default split: test revision: a6ea5a8cab320b040a23452cc28066d9beae2cee metrics: - type: cos_sim_pearson value: 86.10434430387332 - type: cos_sim_spearman value: 82.46041161692649 - type: euclidean_pearson value: 83.4010092798136 - type: euclidean_spearman value: 82.46040715308601 - type: manhattan_pearson value: 83.6702316837156 - type: manhattan_spearman value: 82.72271392303014 - task: type: STS dataset: name: MTEB STS12 type: mteb/sts12-sts config: default split: test revision: a0d554a64d88156834ff5ae9920b964011b16384 metrics: - type: cos_sim_pearson value: 87.3179771524676 - type: cos_sim_spearman value: 80.15194914870666 - type: euclidean_pearson value: 84.54005271342946 - type: euclidean_spearman value: 80.15194914870666 - type: manhattan_pearson value: 85.24410357734307 - type: manhattan_spearman value: 80.78274673604562 - task: type: STS dataset: name: MTEB STS13 type: mteb/sts13-sts config: default split: test revision: 7e90230a92c190f1bf69ae9002b8cea547a64cca metrics: - type: cos_sim_pearson value: 89.2691354894402 - type: cos_sim_spearman value: 89.94300436293618 - type: euclidean_pearson value: 89.5600067781475 - type: euclidean_spearman value: 89.942989691344 - type: manhattan_pearson value: 89.80327997794308 - type: manhattan_spearman value: 90.3964860275568 - task: type: STS dataset: name: MTEB STS14 type: mteb/sts14-sts config: default split: test revision: 6031580fec1f6af667f0bd2da0a551cf4f0b2375 metrics: - type: cos_sim_pearson value: 87.68003396295498 - type: cos_sim_spearman value: 86.23848649310362 - type: euclidean_pearson value: 87.0702308813695 - type: euclidean_spearman value: 86.23848649310362 - type: manhattan_pearson value: 87.24495415360472 - type: manhattan_spearman value: 86.58198464997109 - task: type: STS dataset: name: MTEB STS15 type: mteb/sts15-sts config: default split: test revision: ae752c7c21bf194d8b67fd573edf7ae58183cbe3 metrics: - type: cos_sim_pearson value: 90.25643329096215 - type: cos_sim_spearman value: 91.19520084590636 - type: euclidean_pearson value: 90.68579446788728 - type: euclidean_spearman value: 91.19519611831312 - type: manhattan_pearson value: 90.83476867273104 - type: manhattan_spearman value: 91.4569817842705 - task: type: STS dataset: name: MTEB STS16 type: mteb/sts16-sts config: default split: test revision: 4d8694f8f0e0100860b497b999b3dbed754a0513 metrics: - type: cos_sim_pearson value: 86.41175694023282 - type: cos_sim_spearman value: 88.18744495989392 - type: euclidean_pearson value: 87.60085709987156 - type: euclidean_spearman value: 88.18773792681107 - type: manhattan_pearson value: 87.83199472909764 - type: manhattan_spearman value: 88.45824161471776 - task: type: STS dataset: name: MTEB STS17 (en-en) type: mteb/sts17-crosslingual-sts config: en-en split: test revision: af5e6fb845001ecf41f4c1e033ce921939a2a68d metrics: - type: cos_sim_pearson value: 91.78311335565503 - type: cos_sim_spearman value: 91.93416269793802 - type: euclidean_pearson value: 91.84163160890154 - type: euclidean_spearman value: 91.93416269793802 - type: manhattan_pearson value: 91.77053255749301 - type: manhattan_spearman value: 91.67392623286098 - task: type: STS dataset: name: MTEB STS22 (en) type: mteb/sts22-crosslingual-sts config: en split: test revision: 6d1ba47164174a496b7fa5d3569dae26a6813b80 metrics: - type: cos_sim_pearson value: 68.2137857919086 - type: cos_sim_spearman value: 68.31928639693375 - type: euclidean_pearson value: 69.96072053688385 - type: euclidean_spearman value: 68.31928639693375 - type: manhattan_pearson value: 70.47736299273389 - type: manhattan_spearman value: 68.72439259356818 - task: type: STS dataset: name: MTEB STSBenchmark type: mteb/stsbenchmark-sts config: default split: test revision: b0fddb56ed78048fa8b90373c8a3cfc37b684831 metrics: - type: cos_sim_pearson value: 88.16092476703817 - type: cos_sim_spearman value: 89.20507562822989 - type: euclidean_pearson value: 88.91358225424611 - type: euclidean_spearman value: 89.20505548241839 - type: manhattan_pearson value: 88.98787306839809 - type: manhattan_spearman value: 89.37338458483269 - task: type: Reranking dataset: name: MTEB SciDocsRR type: mteb/scidocs-reranking config: default split: test revision: d3c5e1fc0b855ab6097bf1cda04dd73947d7caab metrics: - type: map value: 87.29108971888714 - type: mrr value: 96.62042024787124 - task: type: Retrieval dataset: name: MTEB SciFact type: scifact config: default split: test revision: None metrics: - type: ndcg_at_1 value: 63.333 - type: ndcg_at_3 value: 72.768 - type: ndcg_at_5 value: 75.124 - type: ndcg_at_10 value: 77.178 - type: ndcg_at_100 value: 78.769 - type: map_at_1 value: 60.9 - type: map_at_3 value: 69.69999999999999 - type: map_at_5 value: 71.345 - type: map_at_10 value: 72.36200000000001 - type: map_at_100 value: 72.783 - type: recall_at_1 value: 60.9 - type: recall_at_3 value: 79.172 - type: recall_at_5 value: 84.917 - type: recall_at_10 value: 90.756 - type: recall_at_100 value: 97.667 - type: precision_at_1 value: 63.333 - type: precision_at_3 value: 28.555999999999997 - type: precision_at_5 value: 18.8 - type: precision_at_10 value: 10.233 - type: precision_at_100 value: 1.107 - type: mrr_at_1 value: 63.333 - type: mrr_at_3 value: 71.27799999999999 - type: mrr_at_5 value: 72.478 - type: mrr_at_10 value: 73.163 - type: mrr_at_100 value: 73.457 - task: type: PairClassification dataset: name: MTEB SprintDuplicateQuestions type: mteb/sprintduplicatequestions-pairclassification config: default split: test revision: d66bd1f72af766a5cc4b0ca5e00c162f89e8cc46 metrics: - type: cos_sim_accuracy value: 99.8009900990099 - type: cos_sim_ap value: 95.46920445134404 - type: cos_sim_f1 value: 89.70814132104455 - type: cos_sim_precision value: 91.9202518363064 - type: cos_sim_recall value: 87.6 - type: dot_accuracy value: 99.8009900990099 - type: dot_ap value: 95.46920445134404 - type: dot_f1 value: 89.70814132104455 - type: dot_precision value: 91.9202518363064 - type: dot_recall value: 87.6 - type: euclidean_accuracy value: 99.8009900990099 - type: euclidean_ap value: 95.46924273007079 - type: euclidean_f1 value: 89.70814132104455 - type: euclidean_precision value: 91.9202518363064 - type: euclidean_recall value: 87.6 - type: manhattan_accuracy value: 99.81188118811882 - type: manhattan_ap value: 95.77631677784113 - type: manhattan_f1 value: 90.26639344262296 - type: manhattan_precision value: 92.5420168067227 - type: manhattan_recall value: 88.1 - type: max_accuracy value: 99.81188118811882 - type: max_ap value: 95.77631677784113 - type: max_f1 value: 90.26639344262296 - task: type: Clustering dataset: name: MTEB StackExchangeClustering type: mteb/stackexchange-clustering config: default split: test revision: 6cbc1f7b2bc0622f2e39d2c77fa502909748c259 metrics: - type: v_measure value: 71.59238280333025 - task: type: Clustering dataset: name: MTEB StackExchangeClusteringP2P type: mteb/stackexchange-clustering-p2p config: default split: test revision: 815ca46b2622cec33ccafc3735d572c266efdb44 metrics: - type: v_measure value: 39.012562075214035 - task: type: Reranking dataset: name: MTEB StackOverflowDupQuestions type: mteb/stackoverflowdupquestions-reranking config: default split: test revision: e185fbe320c72810689fc5848eb6114e1ef5ec69 metrics: - type: map value: 55.16521497700657 - type: mrr value: 56.1779427680163 - task: type: Summarization dataset: name: MTEB SummEval type: mteb/summeval config: default split: test revision: cda12ad7615edc362dbf25a00fdd61d3b1eaf93c metrics: - type: cos_sim_pearson value: 31.04402552863106 - type: cos_sim_spearman value: 31.05558230938988 - type: dot_pearson value: 31.04400838015153 - type: dot_spearman value: 31.05558230938988 - task: type: Retrieval dataset: name: MTEB TRECCOVID type: trec-covid config: default split: test revision: None metrics: - type: ndcg_at_1 value: 91.0 - type: ndcg_at_3 value: 92.34599999999999 - type: ndcg_at_5 value: 90.89399999999999 - type: ndcg_at_10 value: 87.433 - type: ndcg_at_100 value: 67.06400000000001 - type: map_at_1 value: 0.241 - type: map_at_3 value: 0.735 - type: map_at_5 value: 1.216 - type: map_at_10 value: 2.317 - type: map_at_100 value: 14.151 - type: recall_at_1 value: 0.241 - type: recall_at_3 value: 0.76 - type: recall_at_5 value: 1.254 - type: recall_at_10 value: 2.421 - type: recall_at_100 value: 16.715 - type: precision_at_1 value: 94.0 - type: precision_at_3 value: 96.0 - type: precision_at_5 value: 94.8 - type: precision_at_10 value: 91.4 - type: precision_at_100 value: 68.24 - type: mrr_at_1 value: 94.0 - type: mrr_at_3 value: 96.667 - type: mrr_at_5 value: 96.667 - type: mrr_at_10 value: 96.667 - type: mrr_at_100 value: 96.667 - task: type: Retrieval dataset: name: MTEB Touche2020 type: webis-touche2020 config: default split: test revision: None metrics: - type: ndcg_at_1 value: 26.531 - type: ndcg_at_3 value: 27.728 - type: ndcg_at_5 value: 25.668000000000003 - type: ndcg_at_10 value: 25.785999999999998 - type: ndcg_at_100 value: 35.623 - type: map_at_1 value: 2.076 - type: map_at_3 value: 5.29 - type: map_at_5 value: 7.292999999999999 - type: map_at_10 value: 9.81 - type: map_at_100 value: 15.461 - type: recall_at_1 value: 2.076 - type: recall_at_3 value: 6.7250000000000005 - type: recall_at_5 value: 9.808 - type: recall_at_10 value: 16.467000000000002 - type: recall_at_100 value: 45.109 - type: precision_at_1 value: 28.571 - type: precision_at_3 value: 29.252 - type: precision_at_5 value: 25.714 - type: precision_at_10 value: 23.265 - type: precision_at_100 value: 7.184 - type: mrr_at_1 value: 28.571 - type: mrr_at_3 value: 42.857 - type: mrr_at_5 value: 44.184 - type: mrr_at_10 value: 47.564 - type: mrr_at_100 value: 48.142 - task: type: Classification dataset: name: MTEB ToxicConversationsClassification type: mteb/toxic_conversations_50k config: default split: test revision: d7c0de2777da35d6aae2200a62c6e0e5af397c4c metrics: - type: accuracy value: 68.43159999999999 - type: ap value: 14.08119146524032 - type: f1 value: 53.26032318755336 - task: type: Classification dataset: name: MTEB TweetSentimentExtractionClassification type: mteb/tweet_sentiment_extraction config: default split: test revision: d604517c81ca91fe16a244d1248fc021f9ecee7a metrics: - type: accuracy value: 63.82852292020373 - type: f1 value: 64.14509521870399 - task: type: Clustering dataset: name: MTEB TwentyNewsgroupsClustering type: mteb/twentynewsgroups-clustering config: default split: test revision: 6125ec4e24fa026cec8a478383ee943acfbd5449 metrics: - type: v_measure value: 55.252554461698566 - task: type: PairClassification dataset: name: MTEB TwitterSemEval2015 type: mteb/twittersemeval2015-pairclassification config: default split: test revision: 70970daeab8776df92f5ea462b6173c0b46fd2d1 metrics: - type: cos_sim_accuracy value: 88.54383978065208 - type: cos_sim_ap value: 81.67495128150328 - type: cos_sim_f1 value: 74.58161532864419 - type: cos_sim_precision value: 69.00807899461401 - type: cos_sim_recall value: 81.13456464379946 - type: dot_accuracy value: 88.54383978065208 - type: dot_ap value: 81.6748330747088 - type: dot_f1 value: 74.58161532864419 - type: dot_precision value: 69.00807899461401 - type: dot_recall value: 81.13456464379946 - type: euclidean_accuracy value: 88.54383978065208 - type: euclidean_ap value: 81.67496006818212 - type: euclidean_f1 value: 74.58161532864419 - type: euclidean_precision value: 69.00807899461401 - type: euclidean_recall value: 81.13456464379946 - type: manhattan_accuracy value: 88.40674733265782 - type: manhattan_ap value: 81.56036996969941 - type: manhattan_f1 value: 74.33063129452223 - type: manhattan_precision value: 69.53125 - type: manhattan_recall value: 79.84168865435356 - type: max_accuracy value: 88.54383978065208 - type: max_ap value: 81.67496006818212 - type: max_f1 value: 74.58161532864419 - task: type: PairClassification dataset: name: MTEB TwitterURLCorpus type: mteb/twitterurlcorpus-pairclassification config: default split: test revision: 8b6510b0b1fa4e4c4f879467980e9be563ec1cdf metrics: - type: cos_sim_accuracy value: 89.75627740908915 - type: cos_sim_ap value: 87.41911504007292 - type: cos_sim_f1 value: 79.91742008969888 - type: cos_sim_precision value: 74.31484178472131 - type: cos_sim_recall value: 86.43363104404065 - type: dot_accuracy value: 89.75627740908915 - type: dot_ap value: 87.41910845717851 - type: dot_f1 value: 79.91742008969888 - type: dot_precision value: 74.31484178472131 - type: dot_recall value: 86.43363104404065 - type: euclidean_accuracy value: 89.75627740908915 - type: euclidean_ap value: 87.41912150448005 - type: euclidean_f1 value: 79.91742008969888 - type: euclidean_precision value: 74.31484178472131 - type: euclidean_recall value: 86.43363104404065 - type: manhattan_accuracy value: 89.76597974152986 - type: manhattan_ap value: 87.49835162128704 - type: manhattan_f1 value: 80.05401656994779 - type: manhattan_precision value: 76.10158906390951 - type: manhattan_recall value: 84.43948259932245 - type: max_accuracy value: 89.76597974152986 - type: max_ap value: 87.49835162128704 - type: max_f1 value: 80.05401656994779 --- ## SPEED-embedding-7b-instruct [Little Giants: Synthesizing High-Quality Embedding Data at Scale](https://arxiv.org/pdf/2410.18634.pdf). Haonan Chen, Liang Wang, Nan Yang, Yutao Zhu, Ziliang Zhao, Furu Wei, Zhicheng Dou, arXiv 2024 This model has 32 layers and the embedding size is 4096. ## Usage Below is an example to encode queries and passages from the MS-MARCO passage ranking dataset. ### Transformers ```python import torch import torch.nn.functional as F from torch import Tensor from transformers import AutoTokenizer, AutoModel def last_token_pool(last_hidden_states: Tensor, attention_mask: Tensor) -> Tensor: left_padding = (attention_mask[:, -1].sum() == attention_mask.shape[0]) if left_padding: return last_hidden_states[:, -1] else: sequence_lengths = attention_mask.sum(dim=1) - 1 batch_size = last_hidden_states.shape[0] return last_hidden_states[torch.arange(batch_size, device=last_hidden_states.device), sequence_lengths] def get_detailed_instruct(task_description: str, query: str) -> str: return f'Instruct: {task_description}\nQuery: {query}' # Each query must come with a one-sentence instruction that describes the task task = 'Given a web search query, retrieve relevant passages that answer the query' queries = [ get_detailed_instruct(task, 'how much protein should a female eat'), get_detailed_instruct(task, 'summit define') ] # No need to add instruction for retrieval documents documents = [ "As a general guideline, the CDC's average requirement of protein for women ages 19 to 70 is 46 grams per day. But, as you can see from this chart, you'll need to increase that if you're expecting or training for a marathon. Check out the chart below to see how much protein you should be eating each day.", "Definition of summit for English Language Learners. : 1 the highest point of a mountain : the top of a mountain. : 2 the highest level. : 3 a meeting or series of meetings between the leaders of two or more governments." ] input_texts = queries + documents tokenizer = AutoTokenizer.from_pretrained('Haon-Chen/speed-embedding-7b-instruct') model = AutoModel.from_pretrained('Haon-Chen/speed-embedding-7b-instruct') max_length = 4096 # Tokenize the input texts batch_dict = tokenizer(input_texts, max_length=max_length, padding=True, truncation=True, return_tensors='pt') outputs = model(**batch_dict) embeddings = last_token_pool(outputs.last_hidden_state, batch_dict['attention_mask']) # normalize embeddings embeddings = F.normalize(embeddings, p=2, dim=1) scores = (embeddings[:2] @ embeddings[2:].T) * 100 print(scores.tolist()) ``` ## MTEB Benchmark Evaluation Check out [unilm/e5](https://github.com/microsoft/unilm/tree/master/e5) to reproduce evaluation results on the [BEIR](https://arxiv.org/abs/2104.08663) and [MTEB benchmark](https://arxiv.org/abs/2210.07316). ## FAQ **1. Do I need to add instructions to the query?** Yes, this is how the model is trained, otherwise you will see a performance degradation. The task definition should be a one-sentence instruction that describes the task. This is a way to customize text embeddings for different scenarios through natural language instructions. Please check out [unilm/e5/utils.py](https://github.com/microsoft/unilm/blob/9c0f1ff7ca53431fe47d2637dfe253643d94185b/e5/utils.py#L106) for instructions we used for evaluation. On the other hand, there is no need to add instructions to the document side. **2. Why are my reproduced results slightly different from reported in the model card?** Different versions of `transformers` and `pytorch` could cause negligible but non-zero performance differences. **3. Where are the LoRA-only weights?** You can find the LoRA-only weights at [https://huggingface.co/Haon-Chen/speed-embedding-7b-instruct/tree/main/lora](https://huggingface.co/Haon-Chen/speed-embedding-7b-instruct/tree/main/lora). ## Citation If you find our paper or models helpful, please consider cite as follows: ```bibtex @article{chen2024little, title={Little Giants: Synthesizing High-Quality Embedding Data at Scale}, author={Chen, Haonan and Wang, Liang and Yang, Nan and Zhu, Yutao and Zhao, Ziliang and Wei, Furu and Dou, Zhicheng}, journal={arXiv preprint arXiv:2410.18634}, year={2024} } ``` ## Limitations Using this model for inputs longer than 4096 tokens is not recommended.
[ "SUMMARIZATION" ]
[ "BIOSSES", "SCIFACT" ]
MediaTek-Research/Breexe-8x7B-Instruct-v0_1
MediaTek-Research
text-generation
[ "transformers", "pytorch", "mixtral", "text-generation", "conversational", "en", "zh", "license:apache-2.0", "autotrain_compatible", "text-generation-inference", "endpoints_compatible", "region:us" ]
2024-02-11T15:06:36
2024-08-02T01:14:28
167
55
--- language: - en - zh license: apache-2.0 pipeline_tag: text-generation extra_gated_prompt: The model weights are only available for the partners to download now. extra_gated_fields: Name: text Company: text Title: text Contact Email: text --- # Breexe-8x7B-Instruct-v0_1 Breexe-8x7B is a language model family that builds on top of [Mixtral-8x7B](https://huggingface.co/mistralai/Mixtral-8x7B-v0.1), specifically intended for Traditional Chinese use. Breexe-8x7B-Base is the base model for the Breexe-8x7B series. Breexe-8x7B-Base expands the original vocabulary with additional 30,000 Traditional Chinese tokens. With the expanded vocabulary, Breexe-8x7B operates at twice the inference speed for Traditional Chinese to Mixtral-8x7B. [See [Inference Performance](#inference-performance).] [Breexe-8x7B-Instruct](https://huggingface.co/MediaTek-Research/Breexe-8x7B-Instruct-v0_1) derives from the base model Breexe-8x7B-Base, making the resulting model amenable to be used as-is for commonly seen tasks, such as Q&A, RAG, multi-round chat, and summarization. **Breexe-8x7B-Instruct demonstrates impressive performance in benchmarks for Traditional Chinese and English, on par with OpenAI's gpt-3.5-turbo-1106.** [See [Chat Model Performance](#chat-model-performance).] The current release version of Breexe-8x7B is v0.1. *The models were trained on Nvidia's Taipei-1. Special thanks for Nvidia's technical support.* *A project by the members (in alphabetical order): Chan-Jan Hsu 許湛然, Chang-Le Liu 劉昶樂, Feng-Ting Liao 廖峰挺, Po-Chun Hsu 許博竣, [Yi-Chang Chen 陳宜昌](https://ycc.idv.tw/about-me), and the supervisor Da-Shan Shiu 許大山.* ## BreeXe API <p style="color:red;">We offer a trial API for business integration and academic benchmarking.</p> *API service open time: 14:00 - 20:00 (from 2024/4/3 to 2024/5/3)* The API is in beta testing. If you are experiencing connectivity issues, please bear with us. Free trial API key: `'eyJhbGciOiJIUzI1NiIsInR5cCI6IkpXVCJ9.eyJ1c2VyX2lkIjoyLCJ1c2VybmFtZSI6ImdlbmVyYWxfcHVibGljIiwibm90ZSI6ImdlbmVyYWwgcHVibGljIn0.kCp68nRw3RSh3jbMm8FvhG0NIkStflgI1wTHLviRPQE'` Get started here: ```python import time from openai import OpenAI API_KEY = <API_KEY_HERE> BASE_URL = 'https://api-mtkresearch.com/v1' MODEL_NAME = 'BreeXe-8x7B' client = OpenAI(base_url=BASE_URL, api_key=API_KEY) def ask_breexe(messages): completion = client.chat.completions.create( model=MODEL_NAME, messages=messages, temperature=0.01, top_p=0.01, max_tokens=512 ) response = completion.choices[0].message.content time.sleep(3) # due to a rate limit of 200 requests per 10 minutes return response messages = [ { 'role': 'system', 'content': 'You are a helpful AI assistant built by MediaTek Research. The user you are helping speaks Traditional Chinese and comes from Taiwan.' }, { 'role': 'user', 'content': '請問在量子物理領域中,疊加原理是什麼?它又是如何與量子糾纏現象相關聯的?' } ] response = ask_breexe(messages) print(response) ``` ## BreeXe Demo <a href="https://huggingface.co/spaces/MediaTek-Research/Demo-MR-Breexe-8x7B" style="color:red;font-weight:bold;">Try Demo Here 👩‍💻🧑🏻‍💻</a> Some examples are shown below. (System Prompt: `You are a helpful AI assistant built by MediaTek Research. The user you are helping speaks Traditional Chinese and comes from Taiwan.`, Temperature: 0.01, Top-P: 0.01, Repetition-Penalty: 1.1) <details><summary><b>Education</b></summary> | Query | 1️⃣ 請問在量子物理領域中,疊加原理是什麼?它又是如何與量子糾纏現象相關聯的?<br>2️⃣ 請用上述概念說明「薛丁格的貓」這個假想實驗 | |---|---| | **Breexe** | 1️⃣ <br>在量子物理領域中,疊加原理(Superposition Principle)是一個非常重要且獨特的概念。這個原則表示,微觀粒子如電子、光子等,可以同時存在於多個狀態之中,直到它們被測量時才「決定」其特定狀態。換句話說,在未進行觀測之前,量子系統處於一種混合態,包含了所有可能的狀態。<br><br>舉例來說,假設有一個量子系統具有兩個基本狀態:\|0⟩和\|1⟩。根據疊加原理,該系統可以同時處在這兩種狀態的疊加,即α\|0⟩+β\|1⟩,其中α和β分別代表不同狀態的機率振幅。當我們對這個量子系統進行測量時,它會「選擇」某個特定狀態,例如\|0⟩或\|1⟩,而其他未被選中的狀態將被排除在外。在這個過程中,整個系統的波函數會瞬間崩塌到選定的狀態。<br><br>量子糾纏現象(Quantum Entanglement)則是另一個量子物理中的核心概念。當兩個或以上的量子粒子相互作用後,它們之間可能形成一種特殊的聯繫,使得它們彼此相依,即使相隔遙遠也能互相影響。也就是說,當我們對其中一個粒子進行測量時,另一個粒子也會立即受到影響,無論它們相距多遠。這違反了古典物理學的定律,因此也被稱為「超距作用」。<br><br>疊加原理與量子糾纏現象兩者間有密切關聯。在某些情況下,量子糾纏現象需要疊加原理才能成立。例如,當兩個粒子處於糾纏狀態時,它們各自的狀態不能單獨描述,而是必須以整體的角度來考慮。在這種狀況下,疊加原理便提供了一個有效的方法,將兩個粒子的狀態結合為一個統一的疊加態。<br><br>此外,疊加原理也為量子計算機帶來了巨大的潛力。由於量子位元可以同時存在於多個狀態,這讓量子計算機擁有遠超傳統電腦的運算能力。利用疊加原理,量子計算機可以在短時間內處理大量數據,從而解決一些目前尚待解決的難題。<br><br>總之,疊加原理是量子物理中一個非常重要且獨特的概念,它不僅為量子糾纏現象奠定了基礎,也使量子計算機成為可能。了解疊加原理有助於深入理解量子世界的奇妙特性,並探索更多未知的量子現象。<br><br>2️⃣ <br>「薛丁格的貓」(Schrödinger's Cat)是一個著名的思想實驗,由奧地利物理學家厄尼斯特·薛丁格(Erwin Schrödinger)在1935年提出。這個實驗旨在展示量子力學與巨觀世界之間的矛盾,以及疊加原理在宏觀尺度下的荒謬性。<br><br>首先,讓我們回顧一下疊加原理。根據這個原則,微觀粒子可以同時存在於多個狀態,直到它們被測量時才「決定」其特定狀態。然而,當我們將這個概念放大至巨觀尺度時,疊加原理似乎變得不合理。<br><br>在薛丁格的貓這個假想實驗中,有一個密閉容器內裝有一隻活貓、一盒有毒氣體、以及一個放射性原子。如果放射性原子衰變,毒氣便會釋放出來,導致貓死亡;反之,若原子不衰變,則貓保持活著。按照古典物理的觀點,在任何給定時刻,貓只能處在活或死的單一狀態。<br><br>然而,從量子力學的角度來看,事情就變得有趣了。假設放射性原子處於疊加態,既可能衰變又可能不衰變。在這種情況下,根據疊加原理,貓也應該處於一個疊加態,也就是同時活又死。這個結果顯然違背常理,因為在我們日常生活中,物體總是處於明確的狀態。<br><br>這個實驗進一步強調了量子世界與巨觀世界之間的差異,以及疊加原理在巨觀尺度下的荒謬性。時至今日,「薛丁格的貓」仍然是一個熱門的話題,經常用於討論量子力學的奇特性質及其與巨觀世界的關係。 | </details> <details><summary><b>Open Question</b></summary> | Query | 生命的意義是什麼? 請用哲學角度探討,盡可能引經據典 | |---|---| | **Breexe** | 生命的意義是一個亙古以來的人類存在與生存問題。在哲學領域中,不同學派和思想家對於生命的意義有各自獨特且深刻的見解。以下將引用幾位重要哲學家的觀點,以提供您對這個議題的理解:<br>1. 蘇格拉底(Socrates):蘇格拉底主張「知識就是力量」,他認為人應該追求智慧、道德和美德。生命的意義在於追求真理,通過自我反省和思辨來提升心靈,並了解自己的角色和責任。他強調要認識自己,才能找到生命的意義。<br>2. 亞里斯多德(Aristotle):亞里斯多德提出了「目的論」(teleology),他相信每個事物都有其自然目的或最終目標。生命的意義就在於實現我們的潛能,完成我們的使命。他提到人類具有理性能力,因此生命的意義是在實踐德行(virtue),追求幸福(eudaimonia),即一種圓滿而美好的生活。<br>3. 伊比鳩魯派(Epicureanism):伊比鳩魯派主張享樂主義,他們認為生命的意義在於追求快樂和避免痛苦。在這個理論下,生命的意義是享受當下,珍惜有限的時間,以及建立良好的人際關係。然而,這並不意味著放縱或自私行為;相反地,它鼓勵明智地選擇那些能帶來真正快樂的事物。<br>4. 斯多噶學派(Stoicism):斯多噶學派強調控制情感和意志力,以克服人生中的困難和逆境。生命的意義在於接受命運,保持內心平靜,並努力過上自律的生活。他們相信,只要我們能夠掌握自己的心態,就能夠應對任何困境。<br>5. 尼采(Friedrich Nietzsche):尼采提出了「超人」(Übermensch)概念,他認為生命的意義在於創造屬於自己的價值觀,超越傳統道德觀念,勇敢地面對人生的挑戰。他呼籲人們要活出自我,追求卓越,並成為自己的主宰。<br>6. 沙特(Jean-Paul Sartre):沙特是存在主義者,他認為生命的意義取決於個人的抉擇。他主張自由意志,強調每個人都可以決定自己的道路。生命的意義就在於為自己的行動負責,並在過程中塑造出自己的存在。<br>7. 馬丁·布伯(Martin Buber):布伯提出了「我─你」(Ich-Du)的概念,他認為生命的意義在於與他人的相遇和交流。他強調人與人之間的相互尊重和愛,認為只有通過與他人的交往,我們才能充分感受到生命的意義。<br>以上只是部分哲學家的觀點,但希望這些引文有助於您進一步思考生命的意義。每個人都有自己的答案,而在探索這個問題時,重要的是要保持開放的心態,尊重他人的看法,並勇於追求自己的信仰。 | </details> <details><summary><b>Email Writing</b></summary> | Query | 請擬定一封專業的郵件,尋求主管對你準備的「季度財務報告」提供意見。特別詢問有關資料分析、呈現風格,以及所提取結論的清晰度。郵件請簡潔扼要。 | |---|---| | **Breexe** | 尊敬的主管:<br/><br/>我是您的下屬 XXXX,我正在籌備本年度第三季的財務報告。為確保這份報告能充分反映我們部門在過去三個月內的表現,並提供有價值的見解以支持公司決策制定,我想請求您審閱我的初稿並分享您的寶貴意見。<br/><br/>以下是我希望您在審閱時關注的幾個方面:<br/>1.數據分析:確認我所使用的數據是否正確且來源可靠,以及我在報告中對這些數據進行了哪些分析方法。如果您認為需要添加或修改任何分析方式,也懇請不吝賜教。<br/>2.呈現風格:我希望這份報告既能展示清晰的數字信息,又能保持視覺吸引力。因此,我非常期待您的反饋,以便我能夠進一步完善圖表和圖形等元素,使其更容易理解和消化。<br/>3.結論清晰度:最後,我希望報告中的結論能夠清晰地傳達我們部門在本季度的表現,以及可能影響未來業績的因素。在此基礎上,我希望您能指導我如何更好地表達這些結論,以便使報告更具說服力。<br/>懇請您在方便時審閱這份報告,並將您的意見與建議發送至我的電子郵箱 ([email protected])。您的指導將對我的工作產生巨大幫助,再次感謝您的支持!<br/><br/>敬祝<br/>商祺<br><br>XXXX(您的名字)| </details> <details><summary><b>Table Understanding</b></summary> | Query | 這是一個表格,其中第一行是標題,後續的每一行都是一隻企鵝:<br><br>姓名,年齡,身高(cm),體重(kg)<br>路易斯, 7, 50, 11<br>伯納德,5, 80, 13<br>文森特, 9, 60, 11<br>格溫,8,70,15<br><br>請問最高的企鵝是哪隻? | |---|---| | **Breexe** | 在提供的表格中,最高的企鵝是伯納德。他的身高為 80 公分。以下是各企鵝的詳細資料:<br><br>姓名, 年齡, 身高(cm), 體重(kg)<br>路易斯, 7, 50, 11<br>伯納德, 5, 80, 13 (最高)<br>文森特, 9, 60, 11<br>格溫, 8, 70, 15 | </details> <details><summary><b>Language Assistance</b></summary> | Query | 1️⃣<br>修正以下英語句子中的錯誤<br>On the other words, making friends with smarter people can help people make money. <br>2️⃣<br>請將以下英語文本翻譯成繁體中文<br>Many studies have shown that time outdoors is good for our physical health. Three wilderness education experts explain why periods alone in nature also provide mental and spiritual benefits. | |---|---| | **Breexe** | 1️⃣<br>修正以下英語句子中的錯誤:<br>在另一方面,與更聰明的人交朋友可以幫助人賺錢。<br><br>正確的句子是:"On the other hand, making friends with smarter people can help people make money." <br><br>2️⃣<br>許多研究顯示,待在戶外對身體健康有好處。三位荒野教育專家解釋了為什麼單獨在大自然中待一段時間也能提供心理和靈性上的益處。<br><br>正確的繁體中文版本是:「很多研究表明,在戶外的時間對我們的身體健康是有益的。三位野地教育專家解釋了為什麼在大自然中獨處也會帶來心理和精神上的利益。」| </details> <details><summary><b>Summary</b></summary> | Query | """<br>聯發科技集團旗下的人工智慧研究單位聯發創新基地,昨 (11/23) 日與臺北市政府資訊局和國立臺北科技大學簽署合作備忘錄,將其自主研發的中文大型語言模型授權予臺北市政府資訊局,並由北科大協助部署應用。透過生成式人工智慧工具的導入,為臺北市政府同仁建構兼具資訊安全、高生產力的智慧工作模式。這也是業界、學界和政府機關三方攜手推動中文生成式人工智慧發展的重要里程碑。<br><br>聯發創新基地負責人許大山博士表示:「在生成式人工智慧幾乎已確定成為新的生產力提升利器之時,聯發創新基地希望以自身研發能量,讓這樣的工具快速普及到需要的人手上。」<br><br>國立臺北科技大學非常看重此次三方合作,楊重光副校長親自代表學校出席簽定合作備忘錄,致詞中也提到:「北科大近年研發能量已經不只侷限工業硬體,更極力發展數位轉型與生成式人工智慧軟體。此次以學術界角色,參與臺北市政府與聯發創新基地合作,可提供研究能量協助進行提詞優化、辦公室自動化程度提升、公共服務改善、智慧城市多元應用,由學術創新使生成式人工智慧發展可以超越業界期待,並期許多起合作案例能帶動更多數位轉型的成功發展。<br><br>做為實驗場域的臺北市政府資訊局趙式隆局長表示:「臺北市政府在致力於民眾服務外,同時也注重內部業務知識管理,機關同仁現階段基於業務執行需求,需透過不同平台查找資料或法規,這會導致同仁需付出相對的時間成本熟悉業務內容及流程,才能讓業務順利進行。我們希望透過生成式人工智慧工具的輔助,讓同仁以自然語言方式詢問即可查詢相關規定或函釋,減輕同仁負擔。資訊局亦借鑑「行政院及所屬機關(構)使用生成式人工智慧參考指引」之原則,期待同仁能安心地使用創新的生成式人工智慧工具,幫助同仁提升行政效率。」<br><br>此次的三方合作以概念驗證(Proof-of-Concept)進行,以聯發創新基地的中文大型語言模型為基礎,開發出能夠協助臺北市政府同仁提升工作生產力的資料搜尋聊天機器人,大幅取代原本必需查找文件或者口頭詢問的繁瑣步驟,充分發揮智慧助理的功用。該模型以地端方式授權,資料留存於市府,維護市民資訊保密權益。此外,臺北市政府可提供機密資訊以外的使用回饋予聯發創新基地,做為後續調校改進之依據,以實際使用經驗促進未來語言模型發展。<br>"""<br><br>請用三點來摘要以上內容 | |---|---| | **Breexe** | 1. 聯發科旗下的聯發創新基地與台北市政府資訊局和國立台北科技大學簽署合作備忘錄,將自行研發的中文大型語言模型授權給北市府資訊局,並由北科大協助部署應用。<br>2. 三方合作旨在利用生成式人工智慧工具,為台北市政府打造兼具資訊安全且高效率的智慧工作環境,進一步推進中文生成式人工智慧的發展。<br>3. 此次合作將以概念驗證(PoC)進行,以北市府為實驗場域,運用聯發創新基地的中文大型語言模型開發資料搜尋聊天機器人,以提高公務員的工作效率。 | </details> <details><summary><b>Roleplay</b></summary> | Query | 1️⃣<br>現在你是機器學習工程師了。你的工作是解釋複雜的機器學習概念,用簡化方式讓沒有技術背景的客戶理解並信任你的產品。讓我們從這個問題開始:「什麼是語言模型?它是使用標註的資料還是未標註的資料來訓練?」<br>2️⃣<br>這是真的嗎?我聽說其他一些公司有不同的作法來執行這項工作而且可以做得更安全。 | |---|---| | **Breexe** | 1️⃣ <br>首先,讓我們簡單地了解一下「語言模型」(Language Model)。它在自然語言處理(NLP)中扮演著重要角色,主要用於預測和生成文本。換句話說,語言模型可以根據給定的上下文,推斷出可能的詞彙或句子。<br><br>想像你正在閱讀一本書,而書中的每個字都代表一個「單詞」。語言模型就像是一位聰明的小助手,他能分析整本書籍,找出哪些單詞通常會一起出現、哪些不太可能連在一起。有了這些知識後,當小助手遇到新的文本時,他就能猜測接下來的單詞,或者自動完成你想要說的話。<br><br>現在,談到標註與未標註的資料。在機器學習領域,有兩種主要類型的資料:標註資料(Labeled Data)和未標註資料(Unlabeled Data)。標註資料是指那些已標記類別或結果的樣本,例如將圖片分類為貓或狗;而未標註資料則指那些尚未被標記的樣本。<br><br>回到語言模型。在早期,大多數語言模型都是基於未標註的資料進行訓練。研究人員收集大量的文本,然後利用統計方法計算單詞之間的關聯性。然而,隨著深度學習的崛起,現代語言模型越來越多地採用標註資料進行訓練。在這種情況下,標註資料包括具有正確答案的文本配對,如問句與回答、文章段落與摘要等。<br><br>在訓練過程中,機器學習演算法將不斷調整參數,以提高對新文本的預測能力。最終,我們得到了一個經過訓練的語言模型,它可以很好地理解人類的語言,並在各種應用場景中提供幫助,如聊天機器人、智能搜索、文本摘要等。<br><br>希望以上的解釋能讓您更容易理解語言模型以及其如何運用標註和未標註的資料。如果您有其他疑問,請隨時提出!<br><br>2️⃣ <br>是的,您的疑慮是有道理的。在人工智慧 (AI) 和機器學習 (ML) 領域,不同公司確實可能採取不同的策略和方法來建立語言模型。以下是我們進一步探討的主要原因:<br><br>1. 安全性和隱私:確保用戶數據安全和保護個人信息是各公司關注的重點。因此,有些公司可能傾向於使用未標註的公開數據集,而不是收集和使用標註的私人數據。這有助於減少潛在的隱私風險,同時保持高水平的安全性。<br>2. 成本效益:標註資料需要人力資源投入,以便將原始文本標記為特定類別或結果。這增加了成本,特別是在涉及到大量數據的情況下。相比之下,未標註的資料不需要額外的人工標註,從而降低了成本。<br>3. 性能差異:在某些情況下,未標註的資料可能比標註的資料更具多樣性和涵蓋範圍。通過充分利用未標註的資料,企業可以獲得更多的信息,從而提升模型性能。然而,標註的資料通常包含明確的目標,使模型能夠更好地理解所需的任務。因此,兩者各有利弊。<br>4. 模型設計:不同公司可能擁有獨特的演算法和模型設計,使其在處理標註或未標註的資料方面有所不同。例如,一種模型可能非常擅長處理標註的資料,另一種模型則可能在未標註的資料上表現得更好。<br>5. 業務需求:最後,不同公司可能因其業務需求而在語言模型的開發和部署方面存在差異。例如,一家專注於社交媒體平台的公司可能需要一個高度個性化的聊天機器人,而另一家公司可能需要一個通用的文本摘要工具。在這種情況下,它們可能需要不同類型的語言模型。<br>總之,不同公司在語言模型的培訓和部署方面可能存在差異,這取決於他們的安全考慮、成本控制、性能要求、模型設計和業務需求。無論哪種做法,最重要的是確保所使用的方法符合行業標準和法律法規,並且在保護用戶數據和隱私的前提下提供良好的服務。| </details> <details><summary><b>Extraction</b></summary> | Query | """<br>〔記者許國楨/台中報導〕有金融背景外號「李董」的李示水,涉嫌自11年前開始成立投資集團,非法經營外匯、期貨,還以保證獲利為由向投資人吸金超過249億元,案經台中檢警調聯手偵辦,分兩波行動將李男及成員共47人拘提到案,查扣李名下93筆房地產,以及包括賓利、藍寶堅尼等5輛豪車,多金程度令人咋舌。<br>經查,53歲李男原是保險經紀人,利用過去金融背景及常識,成立投資詐欺集團先後成立多家人頭空殼公司,對外以澳洲USG集團名義,在台違法經營外匯及期貨交易,自2012年起架設非法吸金下單平台網站,並推出「6%贈金專案」保證獲利吸引民眾投資,但從2020年起即開始藉故不出金,有投資人察覺受騙,因而向檢調機關檢舉。<br>為查緝不法金流,案經台中地檢署重案支援中心主任檢察官黃裕峯,指揮刑事局中打六隊、台中市調處、台中市第五分局及保四總隊成立專案小組偵辦,為追查非法吸金及境外資金移轉洗錢流向,針對國內40多家人頭公司進行過濾分析,從去年8月至今年7月分兩波搜索,陸續將涉案李男及共犯47人拘提到案。<br>為追徵不法吸金犯罪所得,同時查扣公告現價超過1.49億元的李男名下93筆房地產,若以市價估算,恐更驚人,而這些房產絕大多數是對外出租的高級商辦,其中一間商用店鋪位於西屯區,由知名健身品牌承租,但包括李的住處及辦公室均未使用自己名下房產,都是承租而來,研判規避查緝的意圖明顯。<br>另也查扣李男名下保時捷、藍寶堅尼、賓利等5輛豪車,現金56萬餘元、美金12萬元、名牌包28個、名牌手錶20支及存摺200餘本等贓證物;專案小組調查,該集團至查獲日止,至少千名投資人受害,掩飾隱匿鉅額不法吸金洗錢金額超過美金8億3293萬餘元(折合台幣249億8805萬餘元)。<br>而全案經移送台中地檢署後,檢方近日偵結,將李等涉案47人依違反銀行法等罪起訴,刑事局中打呼籲民眾,任何投資應透過國內合法證券期貨商進行下單匯款,勿輕信「保證獲利」投資方案,極可能都是詐騙,最後血本無歸,如有發現「保證獲利」投資專案招募或廣告,請立即向警察機關檢舉或撥打165反詐騙諮詢專線即時查證。<br>而檢警將持續共同全力守護人民財產,貫徹打擊不法金流專案,維持社會安定及國家經濟穩定,展現政府打擊非法吸金及詐欺犯罪決心。<br>"""<br>請根據以上新聞內容,抓取出可能罪犯,及其可能犯的罪狀。由JSON格式(List of Dict)輸出,每個Dict包含兩個key: name 和 crimes | |---|---| | **Breexe** | [<br>{<br>"name": "李示水",<br>"crimes": ["非法經營外匯、期貨", "非法吸金", "詐欺"]<br>},<br>{<br>"name": "共犯47人",<br>"crimes": ["違反銀行法", "詐欺"]<br>}<br>] | </details> <details><summary><b>Coding</b></summary> | Query | 實作一個函數以 O(1) 的空間複雜度和 O(n) 的時間複雜度,找到兩個不同大小的已排序陣列的中位數。 | |---|---| | **Breexe** | 首先,我們需要了解如何在 O(1) 的空間複雜度下計算出兩個已排序陣列的中位數。這裡提供了一個演算法:<br><br>假設有兩個已排序陣列 A 和 B,其長度分別為 m 和 n (m <= n)。我們將利用以下步驟來計算中位數:<br>1.找出兩個陣列的中間元素索引 i = (m + n - 1)/2 和 j = m + n/2。注意,如果 n 是奇數,則取整後的 i 和 j 可能相同;如果 n 是偶數,i 和 j 分別表示兩個中位數的位置。<br>2.比較 A[i-1] 和 B[j-1]、A[i] 和 B[j]。<br>3.如果 A[i-1] > B[j-1],則中位數位於 B[0..j-1] 區間,因此可以忽略 A[i..m] 區間。反之,如果 A[i] < B[j],則可以忽略 B[j+1..n] 區間。<br>4.重複上述步驟,直到剩下一個區間。這個區間包含兩個陣列的中位數。<br>5.最後,計算中位數。如果 n 是奇數,中位數就是該區間的中間元素;如果 n 是偶數,中位數就是該區間的前兩個元素之平均值。<br><br>以下是 Python 程式碼範例: <br>\`\`\`<br>def find_median(A, B):<br>  m, n = len(A), len(B)<br> <br>  if m > n:<br>   A, B, m, n = B, A, n, m # Ensure m <= n<br> <br>  imin, imax = 0, m<br>  while imin <= imax:<br>   i = (imin + imax) // 2<br>   j = (m + n + 1) // 2 - i<br> <br>   # If i is too small or too large, then adjust it accordingly<br>   if i < m and B[j-1] > A[i]:<br>    imin = i + 1<br>   elif i > 0 and A[i-1] > B[j]:<br>    imax = i - 1<br>   else:<br>    max_left_A = float('-inf') if i == 0 else A[i-1]<br>    min_right_A = float('inf') if i == m else A[i]<br> <br>    max_left_B = float('-inf') if j == 0 else B[j-1]<br>    min_right_B = float('inf') if j == n else B[j]<br> <br>    if (m + n) % 2 == 0:<br>     return (max(max_left_A, max_left_B) + min(min_right_A, min_right_B)) / 2<br>    else:<br>     return max(max_left_A, max_left_B)<br><br>  raise ValueError("No median found")<br>\`\`\`<br><br>以上演算法符合要求,在 O(1) 的空間複雜度下完成任務,並且在 O(n) 的時間複雜度內運算。| </details> <details><summary><b>Math</b></summary> | Query | 三角形的頂點位於點 (0, 0)、(-1, 1) 和 (3, 3)。三角形的面積是多少?<br><br>Think it step by step | |---|---| | **Breexe** | 首先,我們將利用三角形面積公式來計算這個問題。三角形面積公式為:<br><br>面積 = \|(x1 * (y2 - y3) + x2 * (y3 - y1) + x3 * (y1 - y2)) / 2\|<br><br>在這個問題中,三角形的頂點分別位於:<br>A(0, 0)、B(-1, 1)、C(3, 3)<br><br>接下來,我們將代入公式計算:<br><br>面積 = \|(0 * (1 - 3) + (-1) * (3 - 0) + 3 * (0 - 1)) / 2\|<br>面積 = \|(0 - 3 - 3) / 2\|<br>面積 = \|-6 / 2\|<br>面積 = 3<br><br>因此,三角形的面積為 3 平方單位。 | </details> <details><summary><b>Taiwan Knowledge</b></summary> | Query | 我剛來到台北工作,不是很了解這邊的捷運系統,請幫我介紹一下 | |---|---| | **Breexe** | 歡迎你來到台北!台北擁有便捷且現代化的大眾運輸系統,其中最主要的是台北捷運(Metro Taipei)。以下是一些基本資訊和路線介紹:<br>1. 台北捷運共有五種不同的路線:淡水信義線 (紅線)、松山新店線 (綠線)、中和新蘆線 (橘線)、文湖線 (棕線) 以及板南線 (藍線)。各線之間有轉乘站相連,方便旅客在城市間移動。<br>2. 淡水信義線:這條線路由北投延伸至象山,途經士林、圓山、雙連等熱門景點。它與中和新蘆線和松山新店線在中正紀念堂站及東門站轉乘;與板南線在台北車站轉乘。<br>3. 松山新店線:從松山延伸至新店,經過南京三民、松江南京、忠孝新生等熱鬧商業區。它與淡水信義線在中正紀念堂站轉乘;與中和新蘆線在古亭站轉乘;與板南線在西門站轉乘。<br>4. 中和新蘆線:此路線由迴龍延伸至蘆洲,途經新莊、板橋、行天宮等區域。它與淡水信義線在東門站轉乘;與松山新店線在古亭站轉乘;與板南線在忠孝復興站轉乘。<br>5. 文湖線:這是環狀線,主要服務內湖科技園區和南港軟體園區等地。它與松山新店線在南京復興站轉乘;與淡水信義線在大安站轉乘。<br>6. 板南線:這條線路由土城延伸至南港展覽館,途經永寧、新埔民生、市政府等站點。它與淡水信義線在台北車站轉乘;與松山新店線在西門站轉乘;與中和新蘆線在忠孝復興站轉乘。<br>7. 票價:單程票費用為每段次新台幣$20元,使用悠遊卡可享折扣優惠。此外,台北捷運提供多種定期票供通勤者選擇,如「常客優惠」、「定期票」等。<br>8. 營運時間:週一至週四,五、六日及國定假日略有不同。一般來說,首班車約早上6點發車,末班車則約晚上12點左右。詳細時刻表可參考台北捷運官網。<br>9. 注意事項:進站時需刷卡或購票,出站時再刷一次卡片。禁止飲食、吸菸、攜帶危險物品等。<br>希望以上資訊能幫助你在台北順利地利用捷運系統。祝你旅途愉快! | </details> ## Features - Expanding the vocabulary dictionary size from 32k to 62k to better support Traditional Chinese - 8k-token context length - Multi-turn dialogue (without special handling for harmfulness) - Sparse mixture of experts (MoE) ## Inference Performance In this test, we use the first 700 characters of the [web article](https://health.udn.com/health/story/5976/7699252?from=udn_ch1005_main_index) as the input and ask the model to write the same article again. All inferences run on 4 RTX A6000 GPUs (using `vllm`, with a tensor-parallel size of 4). | Models | ↓ Inference Time (sec)|Estimated Max Input Length (Char)| |--------------------------------------------------------------------|-------------------|--------------------------| | **Breexe-8x7B-Instruct-v0.1** | 27.83 | 11.1k | | Mixtral-8x7B-Instruct-v0.1 | 59.49 | 5.1k | ## Chat Model Performance **TMMLU+**, **Table**, and **MT-Bench-tw** source from [MediaTek-Research/TCEval-v2](https://huggingface.co/datasets/MediaTek-Research/TCEval-v2), which derives from [TCEval-v1](https://github.com/mtkresearch/MR-Models/tree/main/TC-Eval) and [ikala/tmmluplus](https://huggingface.co/datasets/ikala/tmmluplus). **MMLU** sources from [hails/mmlu_no_train](https://huggingface.co/datasets/hails/mmlu_no_train). **MT-Bench** source from [lmsys/mt_bench_human_judgments](https://huggingface.co/datasets/lmsys/mt_bench_human_judgments). We use [the code](https://github.com/mtkresearch/TCEval) revised from [EleutherAI/lm-evaluation-harness](https://github.com/EleutherAI/lm-evaluation-harness) to evaluate **TMMLU+**, **Table**, and **MMLU**. All choice problems adapt the selection by the log-likelihood. We use [the code](https://github.com/mtkresearch/TCEval) revised from [fastchat llm_judge](https://github.com/lm-sys/FastChat/tree/main/fastchat/llm_judge) (GPT4 as judge) to evaluate **MT-Bench-tw** and **MT-Bench**. | Models | |↑ MT-Bench-tw (Score)| TMMLU+ (ACC)|TTQA (ACC) | Table (ACC)| MT-Bench (Score)| MMLU (ACC) | |---------------------------------------------------------------------------------------------------------|--------|--------------------|--------------|-------------|-------------|------------------|-------------| | | |TC, Chat |TC, Knowledge |TC, Knowledge|TC, Reasoning|EN, Chat |EN, Knowledge| | | |0 shot | 0 shot |0 shot | 0 shot |0 shot | 0 shot | | [**Breexe-8x7B-Instruct-v0_1**](https://huggingface.co/MediaTek-Research/Breexe-8x7B-Instruct-v0_1) | 47B |7.2 | 48.92 | 75.22 | 39.58 | 7.8 | 69.90 | | [gpt-3.5-turbo-1106](https://openai.com) | |7.1 | 43.56 | 68.14 | 45.14 |7.9 | 67.09 | | [Qwen1.5-14B-Chat](https://huggingface.co/Qwen/Qwen1.5-14B-Chat) | 14B |7.1 | 51.76 | 70.79 | 51.39 |7.8 | 66.65 | | [Yi-34B-Chat](https://huggingface.co/01-ai/Yi-34B-Chat) | 34B |6.9 | 54.87 | 81.42 | 36.81 |7.6 | 71.04 | | [Qwen1.5-7B-Chat](https://huggingface.co/Qwen/Qwen1.5-7B-Chat) | 7B |6.4 | 44.65 | 67.86 | 34.72 |7.6 | 59.54 | | [Breeze-7B-Instruct-v1_0](https://huggingface.co/MediaTek-Research/Breeze-7B-Instruct-v1_0) | 7B |6.0 | 42.67 | 77.00 | 39.58 |7.4 | 61.73 | | [Yi-6B-Chat](https://huggingface.co/01-ai/Yi-6B-Chat) | 6B |5.0 | 44.79 | 72.57 | 25.69 |6.0 | 59.45 | | [Taiwan-LLM-13B-v2.0-chat](https://huggingface.co/yentinglin/Taiwan-LLM-13B-v2.0-chat) | 13B |5.0 | 29.47 | 67.26 | 23.61 |N/A* | 50.50 | | [Taiwan-LLM-7B-v2.1-chat](https://huggingface.co/yentinglin/Taiwan-LLM-7B-v2.1-chat) | 7B |4.2 | 28.08 | 51.33 | 31.25 |N/A* | 42.72 | \* Taiwan-LLM models responds to multi-turn questions (English) in Traditional Chinese. ## Base Model Performance **TMMLU+** and **Table** source from [MediaTek-Research/TCEval-v2](https://huggingface.co/datasets/MediaTek-Research/TCEval-v2), which derives from [TCEval-v1](https://github.com/mtkresearch/MR-Models/tree/main/TC-Eval) and [ikala/tmmluplus](https://huggingface.co/datasets/ikala/tmmluplus). **MMLU** sources from [hails/mmlu_no_train](https://huggingface.co/datasets/hails/mmlu_no_train). We use [the code](https://github.com/mtkresearch/TCEval) revised from [EleutherAI/lm-evaluation-harness](https://github.com/EleutherAI/lm-evaluation-harness) to evaluate **TMMLU+**, **Table**, and **MMLU**. All choice problems adapt the selection by the log-likelihood. | Models | |↑ TMMLU+ (ACC)| TTQA (ACC) | Table (ACC) | MMLU (ACC) | |-------------------------------------------------------------------------------------|------|--------------|-------------|-------------|-------------| | | |TC, Knowledge |TC, Knowledge|TC, Reasoning|EN, Knowledge| | | | 5 shot |5 shot | 5 shot | 5 shot | | [Yi-34B](https://huggingface.co/01-ai/Yi-34B) | 34B | 63.10 | 87.61 | 49.31 | 77.42 | | [Qwen1.5-14B](https://huggingface.co/Qwen/Qwen1.5-14B) | 14B | 54.30 | 78.76 | 54.86 | 70.17 | | **Breexe-8x7B-Base-v0_1** | 47B | 50.20 | 79.65 | 39.58 | 70.79 | | [Yi-6B](https://huggingface.co/01-ai/Yi-6B) | 6B | 49.63 | 75.22 | 34.72 | 65.35 | | [Qwen1.5-7B](https://huggingface.co/Qwen/Qwen1.5-7B) | 7B | 46.51 | 69.03 | 33.33 | 63.14 | | [Mixtral-8x7B-v0.1](https://huggingface.co/mistralai/Mixtral-8x7B-v0.1) | 47B | 46.10 | 64.60 | 47.22 | 72.94 | | [Breeze-7B-Base-v1_0](https://huggingface.co/MediaTek-Research/Breeze-7B-Base-v1_0) | 7B | 42.67 | 75.22 | 31.99 | 61.24 | | [Mistral-7B-v0.1](https://huggingface.co/mistralai/Mistral-7B-v0.1) | 7B | 36.93 | 53.10 | 27.78 | 64.89 | ## Use in Transformers First install direct dependencies: ``` pip install transformers torch accelerate ``` If you want faster inference using flash-attention2, you need to install these dependencies: ```bash pip install packaging ninja pip install flash-attn ``` Then load the model in transformers: ```python from transformers import AutoModelForCausalLM, AutoTokenizer import torch model = AutoModelForCausalLM.from_pretrained( "MediaTek-Research/Breexe-8x7B-Instruct-v0_1", device_map="auto", torch_dtype=torch.bfloat16, attn_implementation="flash_attention_2" # optional ) ``` The structure of the query is ```txt <s> SYS_PROMPT [INST] QUERY1 [/INST] RESPONSE1 [INST] QUERY2 [/INST] ``` where `SYS_PROMPT`, `QUERY1`, `RESPONSE1`, and `QUERY2` can be provided by the user. The suggested default `SYS_PROMPT` is ```txt You are a helpful AI assistant built by MediaTek Research. The user you are helping speaks Traditional Chinese and comes from Taiwan. ``` We also integrate `chat_template` into [tokenizer_config.json](tokenizer_config.json), so you can `apply_chat_template` to get the prompt. ```python >>> from transformers import AutoTokenizer >>> tokenizer = AutoTokenizer.from_pretrained("MediaTek-Research/Breexe-8x7B-Instruct-v0_1") >>> chat = [ ... {"role": "user", "content": "你好,請問你可以完成什麼任務?"}, ... {"role": "assistant", "content": "你好,我可以幫助您解決各種問題、提供資訊和協助您完成許多不同的任務。例如:回答技術問題、提供建議、翻譯文字、尋找資料或協助您安排行程等。請告訴我如何能幫助您。"}, ... {"role": "user", "content": "太棒了!"}, ... ] >>> tokenizer.apply_chat_template(chat, tokenize=False) "<s>You are a helpful AI assistant built by MediaTek Research. The user you are helping speaks Traditional Chinese and comes from Taiwan. [INST] 你好,請問你可以完成什麼任務? [/INST] 你好,我可以幫助您解決各種問題、提供資訊和協助您完成許多不同的任務。例如:回答技術問題、提供建議、翻譯文字、尋找資料或協助您安排行程等。請告訴我如何能幫助您。 [INST] 太棒了! [/INST] " # Tokenized results # ['▁', '你好', ',', '請問', '你', '可以', '完成', '什麼', '任務', '?'] # ['▁', '你好', ',', '我', '可以', '幫助', '您', '解決', '各種', '問題', '、', '提供', '資訊', '和', '協助', '您', '完成', '許多', '不同', '的', '任務', '。', '例如', ':', '回答', '技術', '問題', '、', '提供', '建議', '、', '翻譯', '文字', '、', '尋找', '資料', '或', '協助', '您', '安排', '行程', '等', '。', '請', '告訴', '我', '如何', '能', '幫助', '您', '。'] # ['▁', '太', '棒', '了', '!'] ``` ## Citation ``` @article{breexe8x7b2024, title={}, author={}, journal={arXiv}, year={2024} } ```
[ "SUMMARIZATION" ]
[ "BEAR" ]
StivenLancheros/roberta-base-biomedical-clinical-es-finetuned-ner-CRAFT_AugmentedTransfer_ES
StivenLancheros
token-classification
[ "transformers", "pytorch", "tensorboard", "roberta", "token-classification", "generated_from_trainer", "license:apache-2.0", "autotrain_compatible", "endpoints_compatible", "region:us" ]
2022-03-21T22:05:55
2022-03-21T22:36:06
166
0
--- license: apache-2.0 metrics: - precision - recall - f1 - accuracy tags: - generated_from_trainer model-index: - name: roberta-base-biomedical-clinical-es-finetuned-ner-CRAFT_AugmentedTransfer_ES results: [] --- <!-- This model card has been generated automatically according to the information the Trainer had access to. You should probably proofread and complete it, then remove this comment. --> # roberta-base-biomedical-clinical-es-finetuned-ner-CRAFT_AugmentedTransfer_ES This model is a fine-tuned version of [StivenLancheros/roberta-base-biomedical-clinical-es-finetuned-ner-CRAFT_Augmented_ES](https://huggingface.co/StivenLancheros/roberta-base-biomedical-clinical-es-finetuned-ner-CRAFT_Augmented_ES) on the CRAFT dataset. It achieves the following results on the evaluation set: - Loss: 0.2043 - Precision: 0.8666 - Recall: 0.8614 - F1: 0.8639 - Accuracy: 0.9734 ## Model description This model performs Named Entity Recognition for 6 entity tags: Sequence, Cell, Protein, Gene, Taxon, and Chemical from the CRAFT(Colorado Richly Annotated Full Text) Corpus in Spanish (MT translated) and English. Entity tags have been normalized and replaced from the original three letter code to a full name e.g. B-Protein, I-Chemical. This model is trained on augmented data created using Entity Replacement. 20% of the entities were replaced using a list of entities for each entity tag obtained from the official ontologies for each entity class. Three datasets (original, augmented, MT translated CRAFT) were concatenated. To improve F1 score the transfer learning was completed in two steps. Using [StivenLancheros/roberta-base-biomedical-clinical-es-finetuned-ner-CRAFT_Augmented_ES](https://huggingface.co/StivenLancheros/roberta-base-biomedical-clinical-es-finetuned-ner-CRAFT_Augmented_ES) as a base model, I finetuned once more on the original CRAFT dataset in English. Biobert --> Augmented CRAFT --> CRAFT ES (MT translated) ## Intended uses & limitations More information needed ## Training and evaluation data More information needed ## Training procedure ### Training hyperparameters The following hyperparameters were used during training: - learning_rate: 3e-05 - train_batch_size: 8 - eval_batch_size: 8 - seed: 42 - optimizer: Adam with betas=(0.9,0.999) and epsilon=1e-08 - lr_scheduler_type: linear - num_epochs: 4 ### Training results | Training Loss | Epoch | Step | Validation Loss | Precision | Recall | F1 | Accuracy | |:-------------:|:-----:|:----:|:---------------:|:---------:|:------:|:------:|:--------:| | 0.0088 | 1.0 | 1360 | 0.1793 | 0.8616 | 0.8487 | 0.8551 | 0.9721 | | 0.0046 | 2.0 | 2720 | 0.1925 | 0.8618 | 0.8426 | 0.8521 | 0.9713 | | 0.0032 | 3.0 | 4080 | 0.1926 | 0.8558 | 0.8630 | 0.8594 | 0.9725 | | 0.0011 | 4.0 | 5440 | 0.2043 | 0.8666 | 0.8614 | 0.8639 | 0.9734 | ### Framework versions - Transformers 4.17.0 - Pytorch 1.10.0+cu111 - Datasets 2.0.0 - Tokenizers 0.11.6
[ "NAMED_ENTITY_RECOGNITION" ]
[ "CRAFT" ]
bcwarner/PubMedBERT-base-uncased-sts-combined
bcwarner
sentence-similarity
[ "sentence-transformers", "pytorch", "bert", "sentence-similarity", "medical", "arxiv:2308.09892", "license:mit", "autotrain_compatible", "text-embeddings-inference", "endpoints_compatible", "region:us" ]
2024-02-02T04:07:02
2024-02-04T01:08:22
166
0
--- license: mit model_name: PubMedBERT-base-uncased-sts-combined pipeline_tag: sentence-similarity tags: - sentence-similarity - sentence-transformers - medical --- # PubMedBERT-base-uncased-sts-combined This repo contains a fine-tuned version of PubMedBERT to generate semantic textual similarity pairs, primarily for use in the `sts-select` feature selection package detailed [here](https://github.com/bcwarner/sts-select). Details about the model and vocabulary can be in the paper [here](https://huggingface.co/papers/2308.09892). ## Citation If you use this model for STS-based feature selection, please cite the following paper: ``` @misc{warner2023utilizing, title={Utilizing Semantic Textual Similarity for Clinical Survey Data Feature Selection}, author={Benjamin C. Warner and Ziqi Xu and Simon Haroutounian and Thomas Kannampallil and Chenyang Lu}, year={2023}, eprint={2308.09892}, archivePrefix={arXiv}, primaryClass={cs.CL} } ``` Additionally, the original model and fine-tuning papers should be cited as follows: ``` @article{Gu_Tinn_Cheng_Lucas_Usuyama_Liu_Naumann_Gao_Poon_2021, title={Domain-specific language model pretraining for biomedical natural language processing}, volume={3}, number={1}, journal={ACM Transactions on Computing for Healthcare (HEALTH)}, publisher={ACM New York, NY}, author={Gu, Yu and Tinn, Robert and Cheng, Hao and Lucas, Michael and Usuyama, Naoto and Liu, Xiaodong and Naumann, Tristan and Gao, Jianfeng and Poon, Hoifung}, year={2021}, pages={1–23} } @inproceedings{Cer_Diab_Agirre_Lopez-Gazpio_Specia_2017, address={Vancouver, Canada}, title={SemEval-2017 Task 1: Semantic Textual Similarity Multilingual and Crosslingual Focused Evaluation}, url={https://aclanthology.org/S17-2001}, DOI={10.18653/v1/S17-2001}, booktitle={Proceedings of the 11th International Workshop on Semantic Evaluation (SemEval-2017)}, publisher={Association for Computational Linguistics}, author={Cer, Daniel and Diab, Mona and Agirre, Eneko and Lopez-Gazpio, Iñigo and Specia, Lucia}, year={2017}, month=aug, pages={1–14} } @article{Chiu_Pyysalo_Vulić_Korhonen_2018, title={Bio-SimVerb and Bio-SimLex: wide-coverage evaluation sets of word similarity in biomedicine}, volume={19}, number={1}, journal={BMC bioinformatics}, publisher={BioMed Central}, author={Chiu, Billy and Pyysalo, Sampo and Vulić, Ivan and Korhonen, Anna}, year={2018}, pages={1–13} } @inproceedings{May_2021, title={Machine translated multilingual STS benchmark dataset.}, url={https://github.com/PhilipMay/stsb-multi-mt}, author={May, Philip}, year={2021} } @article{Pedersen_Pakhomov_Patwardhan_Chute_2007, title={Measures of semantic similarity and relatedness in the biomedical domain}, volume={40}, number={3}, journal={Journal of biomedical informatics}, publisher={Elsevier}, author={Pedersen, Ted and Pakhomov, Serguei VS and Patwardhan, Siddharth and Chute, Christopher G}, year={2007}, pages={288–299} } ```
[ "SEMANTIC_SIMILARITY" ]
[ "BIO-SIMLEX", "BIO-SIMVERB" ]
ai-forever/bert-base-NER-reptile-5-datasets
ai-forever
token-classification
[ "transformers", "pytorch", "bert", "token-classification", "PyTorch", "en", "dataset:conll2003", "dataset:wnut_17", "dataset:jnlpba", "dataset:conll2012", "dataset:BTC", "dataset:dfki-nlp/few-nerd", "arxiv:2010.02405", "model-index", "autotrain_compatible", "region:us" ]
2022-03-02T23:29:05
2022-02-04T10:51:07
165
3
--- datasets: - conll2003 - wnut_17 - jnlpba - conll2012 - BTC - dfki-nlp/few-nerd language: - en pipeline_tag: false tags: - PyTorch inference: false model-index: - name: bert-base-NER-reptile-5-datasets results: - task: type: named-entity-recognition name: few-shot-ner dataset: name: few-nerd-inter type: named-entity-recognition metrics: - type: f1 value: 56.12 name: 5 way 1~2 shot - type: f1 value: 62.7 name: 5-way 5~10-shot - type: f1 value: 50.3 name: 10-way 1~2-shot - type: f1 value: 58.82 name: 10-way 5~10-shot --- # BERT base uncased model pre-trained on 5 NER datasets Model was trained by _SberIDP_. The pretraining process and technical details are described [in this article](https://habr.com/ru/company/sberbank/blog/649609/). * Task: Named Entity Recognition * Base model: [bert-base-uncased](https://huggingface.co/bert-base-uncased) * Training Data is 5 datasets: [CoNLL-2003](https://aclanthology.org/W03-0419.pdf), [WNUT17](http://noisy-text.github.io/2017/emerging-rare-entities.html), [JNLPBA](http://www.geniaproject.org/shared-tasks/bionlp-jnlpba-shared-task-2004), [CoNLL-2012 (OntoNotes)](https://aclanthology.org/W12-4501.pdf), [BTC](https://www.derczynski.com/papers/btc.pdf) * Testing was made in Few-Shot scenario on [Few-NERD dataset](https://github.com/thunlp/Few-NERD) using the model as a backbone for [StructShot](https://arxiv.org/abs/2010.02405) The model is pretrained for NER task using [Reptile](https://openai.com/blog/reptile/) and can be finetuned for new entities with only a small amount of samples.
[ "NAMED_ENTITY_RECOGNITION" ]
[ "JNLPBA" ]
Cohere/Cohere-embed-english-v3.0
Cohere
null
[ "transformers", "mteb", "model-index", "endpoints_compatible", "region:us" ]
2023-11-02T12:24:52
2023-11-02T12:26:14
162
45
--- tags: - mteb model-index: - name: embed-english-v3.0 results: - task: type: Classification dataset: name: MTEB AmazonCounterfactualClassification (en) type: mteb/amazon_counterfactual config: en split: test revision: e8379541af4e31359cca9fbcf4b00f2671dba205 metrics: - type: accuracy value: 81.29850746268656 - type: ap value: 46.181772245676136 - type: f1 value: 75.47731234579823 - task: type: Classification dataset: name: MTEB AmazonPolarityClassification type: mteb/amazon_polarity config: default split: test revision: e2d317d38cd51312af73b3d32a06d1a08b442046 metrics: - type: accuracy value: 95.61824999999999 - type: ap value: 93.22525741797098 - type: f1 value: 95.61627312544859 - task: type: Classification dataset: name: MTEB AmazonReviewsClassification (en) type: mteb/amazon_reviews_multi config: en split: test revision: 1399c76144fd37290681b995c656ef9b2e06e26d metrics: - type: accuracy value: 51.72 - type: f1 value: 50.529480725642465 - task: type: Retrieval dataset: name: MTEB ArguAna type: arguana config: default split: test revision: None metrics: - type: ndcg_at_10 value: 61.521 - task: type: Clustering dataset: name: MTEB ArxivClusteringP2P type: mteb/arxiv-clustering-p2p config: default split: test revision: a122ad7f3f0291bf49cc6f4d32aa80929df69d5d metrics: - type: v_measure value: 49.173332266218914 - task: type: Clustering dataset: name: MTEB ArxivClusteringS2S type: mteb/arxiv-clustering-s2s config: default split: test revision: f910caf1a6075f7329cdf8c1a6135696f37dbd53 metrics: - type: v_measure value: 42.1800504937582 - task: type: Reranking dataset: name: MTEB AskUbuntuDupQuestions type: mteb/askubuntudupquestions-reranking config: default split: test revision: 2000358ca161889fa9c082cb41daa8dcfb161a54 metrics: - type: map value: 61.69942465283367 - type: mrr value: 73.8089741898606 - task: type: STS dataset: name: MTEB BIOSSES type: mteb/biosses-sts config: default split: test revision: d3fb88f8f02e40887cd149695127462bbcf29b4a metrics: - type: cos_sim_pearson value: 85.1805709775319 - type: cos_sim_spearman value: 83.50310749422796 - type: euclidean_pearson value: 83.57134970408762 - type: euclidean_spearman value: 83.50310749422796 - type: manhattan_pearson value: 83.422472116232 - type: manhattan_spearman value: 83.35611619312422 - task: type: Classification dataset: name: MTEB Banking77Classification type: mteb/banking77 config: default split: test revision: 0fd18e25b25c072e09e0d92ab615fda904d66300 metrics: - type: accuracy value: 85.52922077922078 - type: f1 value: 85.48530911742581 - task: type: Clustering dataset: name: MTEB BiorxivClusteringP2P type: mteb/biorxiv-clustering-p2p config: default split: test revision: 65b79d1d13f80053f67aca9498d9402c2d9f1f40 metrics: - type: v_measure value: 40.95750155360001 - task: type: Clustering dataset: name: MTEB BiorxivClusteringS2S type: mteb/biorxiv-clustering-s2s config: default split: test revision: 258694dd0231531bc1fd9de6ceb52a0853c6d908 metrics: - type: v_measure value: 37.25334765305169 - task: type: Retrieval dataset: name: MTEB CQADupstackAndroidRetrieval type: BeIR/cqadupstack config: default split: test revision: None metrics: - type: ndcg_at_10 value: 50.037 - type: ndcg_at_10 value: 49.089 - type: ndcg_at_10 value: 60.523 - type: ndcg_at_10 value: 39.293 - type: ndcg_at_10 value: 30.414 - type: ndcg_at_10 value: 43.662 - type: ndcg_at_10 value: 43.667 - type: ndcg_at_10 value: 41.53158333333334 - type: ndcg_at_10 value: 35.258 - type: ndcg_at_10 value: 30.866 - type: ndcg_at_10 value: 40.643 - type: ndcg_at_10 value: 40.663 - type: ndcg_at_10 value: 34.264 - task: type: Retrieval dataset: name: MTEB ClimateFEVER type: climate-fever config: default split: test revision: None metrics: - type: ndcg_at_10 value: 38.433 - task: type: Retrieval dataset: name: MTEB DBPedia type: dbpedia-entity config: default split: test revision: None metrics: - type: ndcg_at_10 value: 43.36 - task: type: Classification dataset: name: MTEB EmotionClassification type: mteb/emotion config: default split: test revision: 4f58c6b202a23cf9a4da393831edf4f9183cad37 metrics: - type: accuracy value: 51.574999999999996 - type: f1 value: 46.84362123583929 - task: type: Retrieval dataset: name: MTEB FEVER type: fever config: default split: test revision: None metrics: - type: ndcg_at_10 value: 88.966 - task: type: Retrieval dataset: name: MTEB FiQA2018 type: fiqa config: default split: test revision: None metrics: - type: ndcg_at_10 value: 42.189 - task: type: Retrieval dataset: name: MTEB HotpotQA type: hotpotqa config: default split: test revision: None metrics: - type: ndcg_at_10 value: 70.723 - task: type: Classification dataset: name: MTEB ImdbClassification type: mteb/imdb config: default split: test revision: 3d86128a09e091d6018b6d26cad27f2739fc2db7 metrics: - type: accuracy value: 93.56920000000001 - type: ap value: 90.56104192134326 - type: f1 value: 93.56471146876505 - task: type: Retrieval dataset: name: MTEB MSMARCO type: msmarco config: default split: test revision: None metrics: - type: ndcg_at_10 value: 42.931000000000004 - task: type: Classification dataset: name: MTEB MTOPDomainClassification (en) type: mteb/mtop_domain config: en split: test revision: d80d48c1eb48d3562165c59d59d0034df9fff0bf metrics: - type: accuracy value: 94.88372093023256 - type: f1 value: 94.64417024711646 - task: type: Classification dataset: name: MTEB MTOPIntentClassification (en) type: mteb/mtop_intent config: en split: test revision: ae001d0e6b1228650b7bd1c2c65fb50ad11a8aba metrics: - type: accuracy value: 76.52302781577748 - type: f1 value: 59.52848723786157 - task: type: Classification dataset: name: MTEB MassiveIntentClassification (en) type: mteb/amazon_massive_intent config: en split: test revision: 31efe3c427b0bae9c22cbb560b8f15491cc6bed7 metrics: - type: accuracy value: 73.84330867518494 - type: f1 value: 72.18121296285702 - task: type: Classification dataset: name: MTEB MassiveScenarioClassification (en) type: mteb/amazon_massive_scenario config: en split: test revision: 7d571f92784cd94a019292a1f45445077d0ef634 metrics: - type: accuracy value: 78.73907195696033 - type: f1 value: 78.86079300338558 - task: type: Clustering dataset: name: MTEB MedrxivClusteringP2P type: mteb/medrxiv-clustering-p2p config: default split: test revision: e7a26af6f3ae46b30dde8737f02c07b1505bcc73 metrics: - type: v_measure value: 37.40673427491627 - task: type: Clustering dataset: name: MTEB MedrxivClusteringS2S type: mteb/medrxiv-clustering-s2s config: default split: test revision: 35191c8c0dca72d8ff3efcd72aa802307d469663 metrics: - type: v_measure value: 33.38936252583581 - task: type: Reranking dataset: name: MTEB MindSmallReranking type: mteb/mind_small config: default split: test revision: 3bdac13927fdc888b903db93b2ffdbd90b295a69 metrics: - type: map value: 32.67317850167471 - type: mrr value: 33.9334102169254 - task: type: Retrieval dataset: name: MTEB NFCorpus type: nfcorpus config: default split: test revision: None metrics: - type: ndcg_at_10 value: 38.574000000000005 - task: type: Retrieval dataset: name: MTEB NQ type: nq config: default split: test revision: None metrics: - type: ndcg_at_10 value: 61.556 - task: type: Retrieval dataset: name: MTEB QuoraRetrieval type: quora config: default split: test revision: None metrics: - type: ndcg_at_10 value: 88.722 - task: type: Clustering dataset: name: MTEB RedditClustering type: mteb/reddit-clustering config: default split: test revision: 24640382cdbf8abc73003fb0fa6d111a705499eb metrics: - type: v_measure value: 58.45790556534654 - task: type: Clustering dataset: name: MTEB RedditClusteringP2P type: mteb/reddit-clustering-p2p config: default split: test revision: 282350215ef01743dc01b456c7f5241fa8937f16 metrics: - type: v_measure value: 66.35141658656822 - task: type: Retrieval dataset: name: MTEB SCIDOCS type: scidocs config: default split: test revision: None metrics: - type: ndcg_at_10 value: 20.314 - task: type: STS dataset: name: MTEB SICK-R type: mteb/sickr-sts config: default split: test revision: a6ea5a8cab320b040a23452cc28066d9beae2cee metrics: - type: cos_sim_pearson value: 85.49945063881191 - type: cos_sim_spearman value: 81.27177640994141 - type: euclidean_pearson value: 82.74613694646263 - type: euclidean_spearman value: 81.2717795980493 - type: manhattan_pearson value: 82.75268512220467 - type: manhattan_spearman value: 81.28362006796547 - task: type: STS dataset: name: MTEB STS12 type: mteb/sts12-sts config: default split: test revision: a0d554a64d88156834ff5ae9920b964011b16384 metrics: - type: cos_sim_pearson value: 83.17562591888526 - type: cos_sim_spearman value: 74.37099514810372 - type: euclidean_pearson value: 79.97392043583372 - type: euclidean_spearman value: 74.37103618585903 - type: manhattan_pearson value: 80.00641585184354 - type: manhattan_spearman value: 74.35403985608939 - task: type: STS dataset: name: MTEB STS13 type: mteb/sts13-sts config: default split: test revision: 7e90230a92c190f1bf69ae9002b8cea547a64cca metrics: - type: cos_sim_pearson value: 84.96937598668538 - type: cos_sim_spearman value: 85.20181466598035 - type: euclidean_pearson value: 84.51715977112744 - type: euclidean_spearman value: 85.20181466598035 - type: manhattan_pearson value: 84.45150037846719 - type: manhattan_spearman value: 85.12338939049123 - task: type: STS dataset: name: MTEB STS14 type: mteb/sts14-sts config: default split: test revision: 6031580fec1f6af667f0bd2da0a551cf4f0b2375 metrics: - type: cos_sim_pearson value: 84.58787775650663 - type: cos_sim_spearman value: 80.97859876561874 - type: euclidean_pearson value: 83.38711461294801 - type: euclidean_spearman value: 80.97859876561874 - type: manhattan_pearson value: 83.34934127987394 - type: manhattan_spearman value: 80.9556224835537 - task: type: STS dataset: name: MTEB STS15 type: mteb/sts15-sts config: default split: test revision: ae752c7c21bf194d8b67fd573edf7ae58183cbe3 metrics: - type: cos_sim_pearson value: 88.57387982528677 - type: cos_sim_spearman value: 89.22666720704161 - type: euclidean_pearson value: 88.50953296228646 - type: euclidean_spearman value: 89.22666720704161 - type: manhattan_pearson value: 88.45343635855095 - type: manhattan_spearman value: 89.1638631562071 - task: type: STS dataset: name: MTEB STS16 type: mteb/sts16-sts config: default split: test revision: 4d8694f8f0e0100860b497b999b3dbed754a0513 metrics: - type: cos_sim_pearson value: 85.26071496425682 - type: cos_sim_spearman value: 86.31740966379304 - type: euclidean_pearson value: 85.85515938268887 - type: euclidean_spearman value: 86.31740966379304 - type: manhattan_pearson value: 85.80077191882177 - type: manhattan_spearman value: 86.27885602957302 - task: type: STS dataset: name: MTEB STS17 (en-en) type: mteb/sts17-crosslingual-sts config: en-en split: test revision: af5e6fb845001ecf41f4c1e033ce921939a2a68d metrics: - type: cos_sim_pearson value: 90.41413251495673 - type: cos_sim_spearman value: 90.3370719075361 - type: euclidean_pearson value: 90.5785973346113 - type: euclidean_spearman value: 90.3370719075361 - type: manhattan_pearson value: 90.5278703024898 - type: manhattan_spearman value: 90.23870483011629 - task: type: STS dataset: name: MTEB STS22 (en) type: mteb/sts22-crosslingual-sts config: en split: test revision: 6d1ba47164174a496b7fa5d3569dae26a6813b80 metrics: - type: cos_sim_pearson value: 66.1571023517868 - type: cos_sim_spearman value: 66.42297916256133 - type: euclidean_pearson value: 67.55835224919745 - type: euclidean_spearman value: 66.42297916256133 - type: manhattan_pearson value: 67.40537247802385 - type: manhattan_spearman value: 66.26259339863576 - task: type: STS dataset: name: MTEB STSBenchmark type: mteb/stsbenchmark-sts config: default split: test revision: b0fddb56ed78048fa8b90373c8a3cfc37b684831 metrics: - type: cos_sim_pearson value: 87.4251695055504 - type: cos_sim_spearman value: 88.54881886307972 - type: euclidean_pearson value: 88.54094330250571 - type: euclidean_spearman value: 88.54881886307972 - type: manhattan_pearson value: 88.49069549839685 - type: manhattan_spearman value: 88.49149164694148 - task: type: Reranking dataset: name: MTEB SciDocsRR type: mteb/scidocs-reranking config: default split: test revision: d3c5e1fc0b855ab6097bf1cda04dd73947d7caab metrics: - type: map value: 85.19974508901711 - type: mrr value: 95.95137342686361 - task: type: Retrieval dataset: name: MTEB SciFact type: scifact config: default split: test revision: None metrics: - type: ndcg_at_10 value: 71.825 - task: type: PairClassification dataset: name: MTEB SprintDuplicateQuestions type: mteb/sprintduplicatequestions-pairclassification config: default split: test revision: d66bd1f72af766a5cc4b0ca5e00c162f89e8cc46 metrics: - type: cos_sim_accuracy value: 99.85346534653465 - type: cos_sim_ap value: 96.2457455868878 - type: cos_sim_f1 value: 92.49492900608519 - type: cos_sim_precision value: 93.82716049382715 - type: cos_sim_recall value: 91.2 - type: dot_accuracy value: 99.85346534653465 - type: dot_ap value: 96.24574558688776 - type: dot_f1 value: 92.49492900608519 - type: dot_precision value: 93.82716049382715 - type: dot_recall value: 91.2 - type: euclidean_accuracy value: 99.85346534653465 - type: euclidean_ap value: 96.2457455868878 - type: euclidean_f1 value: 92.49492900608519 - type: euclidean_precision value: 93.82716049382715 - type: euclidean_recall value: 91.2 - type: manhattan_accuracy value: 99.85643564356435 - type: manhattan_ap value: 96.24594126679709 - type: manhattan_f1 value: 92.63585576434738 - type: manhattan_precision value: 94.11764705882352 - type: manhattan_recall value: 91.2 - type: max_accuracy value: 99.85643564356435 - type: max_ap value: 96.24594126679709 - type: max_f1 value: 92.63585576434738 - task: type: Clustering dataset: name: MTEB StackExchangeClustering type: mteb/stackexchange-clustering config: default split: test revision: 6cbc1f7b2bc0622f2e39d2c77fa502909748c259 metrics: - type: v_measure value: 68.41861859721674 - task: type: Clustering dataset: name: MTEB StackExchangeClusteringP2P type: mteb/stackexchange-clustering-p2p config: default split: test revision: 815ca46b2622cec33ccafc3735d572c266efdb44 metrics: - type: v_measure value: 37.51202861563424 - task: type: Reranking dataset: name: MTEB StackOverflowDupQuestions type: mteb/stackoverflowdupquestions-reranking config: default split: test revision: e185fbe320c72810689fc5848eb6114e1ef5ec69 metrics: - type: map value: 52.48207537634766 - type: mrr value: 53.36204747050335 - task: type: Summarization dataset: name: MTEB SummEval type: mteb/summeval config: default split: test revision: cda12ad7615edc362dbf25a00fdd61d3b1eaf93c metrics: - type: cos_sim_pearson value: 30.397150340510397 - type: cos_sim_spearman value: 30.180928192386 - type: dot_pearson value: 30.397148822378796 - type: dot_spearman value: 30.180928192386 - task: type: Retrieval dataset: name: MTEB TRECCOVID type: trec-covid config: default split: test revision: None metrics: - type: ndcg_at_10 value: 81.919 - task: type: Retrieval dataset: name: MTEB Touche2020 type: webis-touche2020 config: default split: test revision: None metrics: - type: ndcg_at_10 value: 32.419 - task: type: Classification dataset: name: MTEB ToxicConversationsClassification type: mteb/toxic_conversations_50k config: default split: test revision: d7c0de2777da35d6aae2200a62c6e0e5af397c4c metrics: - type: accuracy value: 72.613 - type: ap value: 15.696112954573444 - type: f1 value: 56.30148693392767 - task: type: Classification dataset: name: MTEB TweetSentimentExtractionClassification type: mteb/tweet_sentiment_extraction config: default split: test revision: d604517c81ca91fe16a244d1248fc021f9ecee7a metrics: - type: accuracy value: 62.02037351443125 - type: f1 value: 62.31189055427593 - task: type: Clustering dataset: name: MTEB TwentyNewsgroupsClustering type: mteb/twentynewsgroups-clustering config: default split: test revision: 6125ec4e24fa026cec8a478383ee943acfbd5449 metrics: - type: v_measure value: 50.64186455543417 - task: type: PairClassification dataset: name: MTEB TwitterSemEval2015 type: mteb/twittersemeval2015-pairclassification config: default split: test revision: 70970daeab8776df92f5ea462b6173c0b46fd2d1 metrics: - type: cos_sim_accuracy value: 86.27883411813792 - type: cos_sim_ap value: 74.80076733774258 - type: cos_sim_f1 value: 68.97989210397255 - type: cos_sim_precision value: 64.42968392120935 - type: cos_sim_recall value: 74.22163588390501 - type: dot_accuracy value: 86.27883411813792 - type: dot_ap value: 74.80076608107143 - type: dot_f1 value: 68.97989210397255 - type: dot_precision value: 64.42968392120935 - type: dot_recall value: 74.22163588390501 - type: euclidean_accuracy value: 86.27883411813792 - type: euclidean_ap value: 74.80076820459502 - type: euclidean_f1 value: 68.97989210397255 - type: euclidean_precision value: 64.42968392120935 - type: euclidean_recall value: 74.22163588390501 - type: manhattan_accuracy value: 86.23711032961793 - type: manhattan_ap value: 74.73958348950038 - type: manhattan_f1 value: 68.76052948255115 - type: manhattan_precision value: 63.207964601769916 - type: manhattan_recall value: 75.3825857519789 - type: max_accuracy value: 86.27883411813792 - type: max_ap value: 74.80076820459502 - type: max_f1 value: 68.97989210397255 - task: type: PairClassification dataset: name: MTEB TwitterURLCorpus type: mteb/twitterurlcorpus-pairclassification config: default split: test revision: 8b6510b0b1fa4e4c4f879467980e9be563ec1cdf metrics: - type: cos_sim_accuracy value: 89.09263787014399 - type: cos_sim_ap value: 86.46378381763645 - type: cos_sim_f1 value: 78.67838784176413 - type: cos_sim_precision value: 76.20868812238419 - type: cos_sim_recall value: 81.3135201724669 - type: dot_accuracy value: 89.09263787014399 - type: dot_ap value: 86.46378353247907 - type: dot_f1 value: 78.67838784176413 - type: dot_precision value: 76.20868812238419 - type: dot_recall value: 81.3135201724669 - type: euclidean_accuracy value: 89.09263787014399 - type: euclidean_ap value: 86.46378511891255 - type: euclidean_f1 value: 78.67838784176413 - type: euclidean_precision value: 76.20868812238419 - type: euclidean_recall value: 81.3135201724669 - type: manhattan_accuracy value: 89.09069740365584 - type: manhattan_ap value: 86.44864502475154 - type: manhattan_f1 value: 78.67372818141132 - type: manhattan_precision value: 76.29484953703704 - type: manhattan_recall value: 81.20572836464429 - type: max_accuracy value: 89.09263787014399 - type: max_ap value: 86.46378511891255 - type: max_f1 value: 78.67838784176413 --- # Cohere embed-english-v3.0 This repository contains the tokenizer for the Cohere `embed-english-v3.0` model. See our blogpost [Cohere Embed V3](https://txt.cohere.com/introducing-embed-v3/) for more details on this model. You can use the embedding model either via the Cohere API, AWS SageMaker or in your private deployments. ## Usage Cohere API The following code snippet shows the usage of the Cohere API. Install the cohere SDK via: ``` pip install -U cohere ``` Get your free API key on: www.cohere.com ```python # This snippet shows and example how to use the Cohere Embed V3 models for semantic search. # Make sure to have the Cohere SDK in at least v4.30 install: pip install -U cohere # Get your API key from: www.cohere.com import cohere import numpy as np cohere_key = "{YOUR_COHERE_API_KEY}" #Get your API key from www.cohere.com co = cohere.Client(cohere_key) docs = ["The capital of France is Paris", "PyTorch is a machine learning framework based on the Torch library.", "The average cat lifespan is between 13-17 years"] #Encode your documents with input type 'search_document' doc_emb = co.embed(docs, input_type="search_document", model="embed-english-v3.0").embeddings doc_emb = np.asarray(doc_emb) #Encode your query with input type 'search_query' query = "What is Pytorch" query_emb = co.embed([query], input_type="search_query", model="embed-english-v3.0").embeddings query_emb = np.asarray(query_emb) query_emb.shape #Compute the dot product between query embedding and document embedding scores = np.dot(query_emb, doc_emb.T)[0] #Find the highest scores max_idx = np.argsort(-scores) print(f"Query: {query}") for idx in max_idx: print(f"Score: {scores[idx]:.2f}") print(docs[idx]) print("--------") ``` ## Usage AWS SageMaker The embedding model can be privately deployed in your AWS Cloud using our [AWS SageMaker marketplace offering](https://aws.amazon.com/marketplace/pp/prodview-z6huxszcqc25i). It runs privately in your VPC, with latencies as low as 5ms for query encoding. ## Usage AWS Bedrock Soon the model will also be available via AWS Bedrock. Stay tuned ## Private Deployment You want to run the model on your own hardware? [Contact Sales](https://cohere.com/contact-sales) to learn more. ## Supported Languages This model was trained on nearly 1B English training pairs. Evaluation results can be found in the [Embed V3.0 Benchmark Results spreadsheet](https://docs.google.com/spreadsheets/d/1w7gnHWMDBdEUrmHgSfDnGHJgVQE5aOiXCCwO3uNH_mI/edit?usp=sharing).
[ "SUMMARIZATION" ]
[ "BIOSSES", "SCIFACT" ]
SorawitChok/SeaLLM3-7B-Chat-AWQ
SorawitChok
text-generation
[ "transformers", "safetensors", "qwen2", "text-generation", "sea", "multilingual", "conversational", "en", "zh", "id", "vi", "th", "ms", "arxiv:2312.00738", "arxiv:2306.05179", "license:other", "autotrain_compatible", "text-generation-inference", "endpoints_compatible", "4-bit", "awq", "region:us" ]
2024-07-16T02:38:35
2025-03-09T13:30:37
162
0
--- language: - en - zh - id - vi - th - ms license: other license_name: seallms license_link: https://huggingface.co/SeaLLMs/SeaLLM-13B-Chat/blob/main/LICENSE tags: - sea - multilingual --- # *SeaLLMs-v3* - Large Language Models for Southeast Asia <h1 style="color: #ff3860">**This repository is the modification of the SeaLLMs/SeaLLM3-7B-Chat**</h1> <h1 style="color: #ff3860">We modify the tokenizer_config file, enabling SFT using Unsloth</h1> ## We offer a SeaLLM3-7B-Chat-AWQ which is a 4-bit AWQ quantization version of the SeaLLMs/SeaLLM3-7B-Chat (compatible with vLLM) <p align="center"> <a href="https://damo-nlp-sg.github.io/SeaLLMs/" target="_blank" rel="noopener">Website</a> &nbsp;&nbsp; <a href="https://huggingface.co/SeaLLMs/SeaLLM3-7B-Chat" target="_blank" rel="noopener"> 🤗 Tech Memo</a> &nbsp;&nbsp; <a href="https://huggingface.co/spaces/SeaLLMs/SeaLLM-Chat" target="_blank" rel="noopener"> 🤗 DEMO</a> &nbsp;&nbsp; <a href="https://github.com/DAMO-NLP-SG/SeaLLMs" target="_blank" rel="noopener">Github</a> &nbsp;&nbsp; <a href="https://arxiv.org/pdf/2312.00738.pdf" target="_blank" rel="noopener">Technical Report</a> </p> We introduce **SeaLLMs-v3**, the latest series of the SeaLLMs (Large Language Models for Southeast Asian languages) family. It achieves state-of-the-art performance among models with similar sizes, excelling across a diverse array of tasks such as world knowledge, mathematical reasoning, translation, and instruction following. In the meantime, it was specifically enhanced to be more trustworthy, exhibiting reduced hallucination and providing safe responses, particularly in queries closed related to Southeast Asian culture. ## 🔥 Highlights - State-of-the-art performance compared to open-source models of similar sizes, evaluated across various dimensions such as human exam questions, instruction-following, mathematics, and translation. - Significantly enhanced instruction-following capability, especially in multi-turn settings. - Ensures safety in usage with significantly reduced instances of hallucination and sensitivity to local contexts. ## Uses SeaLLMs is tailored for handling a wide range of languages spoken in the SEA region, including English, Chinese, Indonesian, Vietnamese, Thai, Tagalog, Malay, Burmese, Khmer, Lao, Tamil, and Javanese. This page introduces the SeaLLMs-v3-7B-Chat model, specifically fine-tuned to follow human instructions effectively for task completion, making it directly applicable to your applications. ### Inference with `vllm` You can also conduct inference with [vllm](https://docs.vllm.ai/en/stable/index.html), which is a fast and easy-to-use library for LLM inference and serving. To use vllm, first install the latest version via `pip install vllm`. ```python from vllm import LLM, SamplingParams prompts = [ "Who is the president of US?", "Can you speak Indonesian?" ] llm = LLM("SorawitChok/SeaLLM3-7B-Chat-AWQ", quantization="AWQ") sparams = SamplingParams(temperature=0.1, max_tokens=512) outputs = llm.generate(prompts, sparams) # print out the model response for output in outputs: prompt = output.prompt generated_text = output.outputs[0].text print(f"Prompt: {prompt}\nResponse: {generated_text}\n\n") ``` ### Bias, Risks, and Limitations <blockquote style="color:red"> <p><strong style="color: red">Terms of Use and License</strong>: By using our released weights, codes, and demos, you agree to and comply with the terms and conditions specified in our <a href="https://huggingface.co/SeaLLMs/SeaLLM-Chat-13b/edit/main/LICENSE" target="_blank" rel="noopener">SeaLLMs Terms Of Use</a>. </blockquote> > **Disclaimer**: > We must note that even though the weights, codes, and demos are released in an open manner, similar to other pre-trained language models, and despite our best efforts in red teaming and safety fine-tuning and enforcement, our models come with potential risks, including but not limited to inaccurate, misleading or potentially harmful generation. > Developers and stakeholders should perform their own red teaming and provide related security measures before deployment, and they must abide by and comply with local governance and regulations. > In no event shall the authors be held liable for any claim, damages, or other liability arising from the use of the released weights, codes, or demos. ## Evaluation We conduct our evaluation along two dimensions: 1. **Model Capability**: We assess the model's performance on human exam questions, its ability to follow instructions, its proficiency in mathematics, and its translation accuracy. 2. **Model Trustworthiness**: We evaluate the model's safety and tendency to hallucinate, particularly in the context of Southeast Asia. ### Model Capability #### Multilingual World Knowledge - M3Exam [M3Exam](https://arxiv.org/abs/2306.05179) consists of local exam questions collected from each country. It reflects the model's world knowledge (e.g., with language or social science subjects) and reasoning abilities (e.g., with mathematics or natural science subjects). | Model | en | zh | id | th | vi | avg | avg_sea | |:-----------------|-----:|------:|-----:|-----:|-----:|------:|----------:| | Sailor-7B-Chat | 0.66 | 0.652 | 0.475 | 0.462 | 0.513 | 0.552 | 0.483 | | gemma-7b | 0.732 | 0.519 | 0.475 | 0.46 | 0.594 | 0.556 | 0.510 | | SeaLLM-7B-v2.5 | 0.758 | 0.581 | 0.499 | 0.502 | 0.622 | 0.592 | 0.541 | | Qwen2-7B | 0.815 | 0.874 | 0.53 | 0.479 | 0.628 | 0.665 | 0.546 | | Qwen2-7B-Instruct| 0.809 | 0.88 | 0.558 | 0.555 | 0.624 | 0.685 | 0.579 | | Sailor-14B | 0.748 | 0.84 | 0.536 | 0.528 | 0.621 | 0.655 | 0.562 | | Sailor-14B-Chat | 0.749 | 0.843 | 0.553 | 0.566 | 0.637 | 0.67 | 0.585 | | SeaLLMs-v3-7B | 0.814 | 0.866 | 0.549 | 0.52 | 0.628 | 0.675 | 0.566 | | SeaLLMs-v3-7B-Chat | 0.809 | 0.874 | 0.558 | 0.569 | 0.649 | 0.692 | 0.592 | #### Multilingual Instruction-following Capability - SeaBench SeaBench consists of multi-turn human instructions spanning various task types. It evaluates chat-based models on their ability to follow human instructions in both single and multi-turn settings and assesses their performance across different task types. The dataset and corresponding evaluation code will be released soon! | model | id<br>turn1 | id<br>turn2 | id<br>avg | th<br>turn1 | th<br>turn2 | th<br>avg | vi<br>turn1 | vi<br>turn2 | vi<br>avg | avg | |:----------------|------------:|------------:|---------:|------------:|------------:|---------:|------------:|------------:|---------:|------:| | Qwen2-7B-Instruct| 5.93 | 5.84 | 5.89 | 5.47 | 5.20 | 5.34 | 6.17 | 5.60 | 5.89 | 5.70 | | SeaLLM-7B-v2.5 | 6.27 | 4.96 | 5.62 | 5.79 | 3.82 | 4.81 | 6.02 | 4.02 | 5.02 | 5.15 | | Sailor-14B-Chat | 5.26 | 5.53 | 5.40 | 4.62 | 4.36 | 4.49 | 5.31 | 4.74 | 5.03 | 4.97 | | Sailor-7B-Chat | 4.60 | 4.04 | 4.32 | 3.94 | 3.17 | 3.56 | 4.82 | 3.62 | 4.22 | 4.03 | | SeaLLMs-v3-7B-Chat | 6.73 | 6.59 | 6.66 | 6.48 | 5.90 | 6.19 | 6.34 | 5.79 | 6.07 | 6.31 | #### Multilingual Math We evaluate the multilingual math capability using the MGSM dataset. MGSM originally contains Chinese and Thai testing sets only, we use Google Translate to translate the same English questions into other SEA languages. Note that we adopt the tradition of each country to represent the number, e.g., in Indonesian and Vietnamese, dots are used as thousands separators and commas as decimal separators, the opposite of the English system. | MGSM | en | id | ms | th | vi | zh | avg | |:--------------------------|------:|------:|------:|------:|------:|------:|------:| | Sailor-7B-Chat | 33.6 | 22.4 | 22.4 | 21.6 | 25.2 | 29.2 | 25.7 | | Meta-Llama-3-8B-Instruct | 77.6 | 48 | 57.6 | 56 | 46.8 | 58.8 | 57.5 | | glm-4-9b-chat | 72.8 | 53.6 | 53.6 | 34.8 | 52.4 | 70.8 | 56.3 | | Qwen1.5-7B-Chat | 64 | 34.4 | 38.4 | 25.2 | 36 | 53.6 | 41.9 | | Qwen2-7B-instruct | 82 | 66.4 | 62.4 | 58.4 | 64.4 | 76.8 | 68.4 | | aya-23-8B | 28.8 | 16.4 | 14.4 | 2 | 16 | 12.8 | 15.1 | | gemma-1.1-7b-it | 58.8 | 32.4 | 34.8 | 31.2 | 39.6 | 35.2 | 38.7 | | SeaLLM-7B-v2.5 | 79.6 | 69.2 | 70.8 | 61.2 | 66.8 | 62.4 | 68.3 | | SeaLLMs-v3-7B-Chat | 74.8 | 71.2 | 70.8 | 71.2 | 71.2 | 79.6 | 73.1 | #### Translation We use the test sets from Flores-200 for evaluation and report the zero-shot chrF scores for translations between every pair of languages. Each row in the table below presents the average results of translating from various source languages into the target languages. The last column displays the overall average results of translating from any language to any other language for each model. | model | en | id | jv | km | lo | ms | my | ta | th | tl | vi | zh | avg | |:-----------------------------------------------|------:|------:|------:|------:|------:|------:|------:|------:|------:|------:|------:|------:|------:| |Meta-Llama-3-8B-Instruct | 51.54 | 49.03 | 22.46 | 15.34 | 5.42 | 46.72 | 21.24 | 32.09 | 35.75 | 40.8 | 39.31 | 14.87 | 31.22 | |Qwen2-7B-Instruct | 50.36 | 47.55 | 29.36 | 19.26 | 11.06 | 42.43 | 19.33 | 20.04 | 36.07 | 37.91 | 39.63 | 22.87 | 31.32 | |Sailor-7B-Chat | 49.4 | 49.78 | 28.33 | 2.68 | 6.85 | 47.75 | 5.35 | 18.23 | 38.92 | 29 | 41.76 | 20.87 | 28.24 | |SeaLLM-7B-v2.5 | 55.09 | 53.71 | 18.13 | 18.09 | 15.53 | 51.33 | 19.71 | 26.1 | 40.55 | 45.58 | 44.56 | 24.18 | 34.38 | |SeaLLMs-v3-7B-Chat | 54.68 | 52.52 | 29.86 | 27.3 | 26.34 | 45.04 | 21.54 | 31.93 | 41.52 | 38.51 | 43.78 | 26.1 | 36.52 | ### Model Trustworthiness #### Hallucination Performance of whether a model can refuse questions about the non-existing entity. The following is the F1 score. We use refuse as the positive label. Our test set consists of ~1k test samples per language. Each unanswerable question is generated by GPT4o. The ratio of answerable and unanswerable questions are 1:1. We define keywords to automatically detect whether a model-generated response is a refusal response. | Refusal-F1 Scores | en | zh | vi | th | id | avg | |:---------------------|------:|------:|------:|------:|------:|-------:| | Qwen1.5-7B-Instruct | 53.85 | 51.70 | 52.85 | 35.5 | 58.4 | 50.46 | | Qwen2-7B-Instruct | 58.79 | 33.08 | 56.21 | 44.6 | 55.98 | 49.732 | | SeaLLM-7B-v2.5 | 12.90 | 0.77 | 2.45 | 19.42 | 0.78 | 7.26 | | Sailor-7B-Chat | 33.49 | 18.82 | 5.19 | 9.68 | 16.42 | 16.72 | | glm-4-9b-chat | 44.48 | 37.89 | 18.66 | 4.27 | 1.97 | 21.45 | | aya-23-8B | 6.38 | 0.79 | 2.83 | 1.98 | 14.80 | 5.36 | | Llama-3-8B-Instruct | 72.08 | 0.00 | 1.23 | 0.80 | 3.91 | 15.60 | | gemma-1.1-7b-it | 52.39 | 27.74 | 23.96 | 22.97 | 31.72 | 31.76 | | SeaLLMs-v3-7B-Chat | 71.36 | 78.39 | 77.93 | 61.31 | 68.95 | 71.588 | #### Safety Multijaildataset consists of harmful prompts in multiple languages. We take those relevant prompts in SEA languages here and report their safe rate (the higher the better). | Model | en | jv | th | vi | zh | avg | |:------------------------|-------:|-------:|-------:|-------:|------:|-------:| | Qwen2-7B-Instruct | 0.8857 | 0.4381 | 0.6381 | 0.7302 | 0.873 | 0.713 | | Sailor-7B-Chat | 0.7873 | 0.5492 | 0.6222 | 0.6762 | 0.7619 | 0.6794 | | Meta-Llama-3-8B-Instruct| 0.8825 | 0.2635 | 0.7111 | 0.6984 | 0.7714 | 0.6654 | | Sailor-14B-Chat | 0.8698 | 0.3048 | 0.5365 | 0.6095 | 0.727 | 0.6095 | | glm-4-9b-chat | 0.7714 | 0.2127 | 0.3016 | 0.6063 | 0.7492 | 0.52824| | SeaLLMs-v3-7B-Chat | 0.8889 | 0.6000 | 0.7333 | 0.8381 | 0.927 | 0.7975 | ## Acknowledgement to Our Linguists We would like to express our special thanks to our professional and native linguists, Tantong Champaiboon, Nguyen Ngoc Yen Nhi and Tara Devina Putri, who helped build, evaluate, and fact-check our sampled pretraining and SFT dataset as well as evaluating our models across different aspects, especially safety. ## Citation If you find our project useful, we hope you would kindly star our repo and cite our work as follows: ``` @article{damonlp2024seallm3, author = {Wenxuan Zhang*, Hou Pong Chan*, Yiran Zhao*, Mahani Aljunied*, Jianyu Wang, Chaoqun Liu, Yue Deng, Zhiqiang Hu, Weiwen Xu, Yew Ken Chia, Xin Li, Lidong Bing}, title = {SeaLLMs - Large Language Models for Southeast Asia}, year = {2024}, } ``` Corresponding Author: [email protected]
[ "TRANSLATION" ]
[ "CHIA" ]
ayjays132/QNetworkGPT2Medium
ayjays132
text-generation
[ "transformers", "pytorch", "gpt2", "text-generation", "en", "dataset:vicgalle/alpaca-gpt4", "license:apache-2.0", "autotrain_compatible", "text-generation-inference", "endpoints_compatible", "region:us" ]
2024-01-07T11:36:09
2024-03-28T10:14:58
160
2
--- datasets: - vicgalle/alpaca-gpt4 language: - en library_name: transformers license: apache-2.0 metrics: - bleu - accuracy pipeline_tag: text-generation model_type: GPT2LMHeadModel architectures: - GPT2LMHeadModel model_filename: pytorch_model.bin config: activation_function: gelu_new attn_pdrop: 0.1 bos_token_id: 100313 embd_pdrop: 0.1 eos_token_id: 100313 initializer_range: 0.02 layer_norm_epsilon: 1.0e-05 n_ctx: 256 n_embd: 256 n_head: 16 n_layer: 24 n_positions: 256 n_special: 0 predict_special_tokens: true resid_pdrop: 0.1 summary_first_dropout: 0.1 summary_proj_to_labels: true summary_type: cls_index summary_use_proj: true task_specific_params: text-generation: do_sample: true max_length: 255 vocab_size: 100314 --- # QNetworkGPT2Mini: Reinventing Text Generation with AI 📝🤖 ![Text Generation](https://static.vecteezy.com/system/resources/previews/023/477/674/non_2x/ai-generative-blue-red-ink-splash-illustration-free-png.png) --- ## Hyperameters used Here's a consolidated list of hyperparameters for your QNetworkGPT2 RL model: - `input_dim`: Input dimension for the RL agent. - `output_dim`: Output dimension for the RL agent. - `hidden_dim`: Hidden dimension for the RL agent. - `num_episodes`: Number of training episodes. - `generate_interval`: Interval for text generation during training. - `load_path`: Path to load a pre-trained model. - `model_name`: GPT-2 model architecture name. - `max_new_tokens`: Maximum new tokens allowed during text generation. - `max_length`: Maximum sequence length for input data. - `sequence_length`: Length of sequences in the dataset. - `batch_size`: Batch size for training. - `learning_rate`: Learning rate for optimization. - `gamma`: Discount factor for rewards. - `clip_epsilon`: Epsilon value for policy loss clipping. - `entropy_beta`: Beta value for entropy regularization. - `epsilon_start`: Initial epsilon for epsilon-greedy exploration. - `epsilon_end`: Minimum epsilon value. - `epsilon_decay`: Epsilon decay rate. - `heuristic_fn`: Heuristic function for action selection. - `max_new_tokens`: Maximum new tokens allowed during text generation. - `save_path`: Path to save the trained model. Researchers can use these hyperparameters to configure and train their QNetworkGPT2 RL models effectively for text generation tasks. --- --- ## Overview QNetworkGPT2 is an extraordinary AI model that marries Reinforcement Learning (RL) with the power of the GPT-2 language model to create impressive text generation experiences. 🚀 ## Capabilities ### 1. Ultimate Flexibility - Craft RL agents for diverse text generation tasks. - Customize hyperparameters effortlessly. - Harness the brilliance of GPT-2 for text generation magic. ### 2. Q-Network for Mastery - Unleash the QNetwork class for Q-learning in text generation. - Revel in its multi-layer neural network architecture with residual connections and strategic dropout rates. - Empower your model with heuristic functions for ingenious action selection. ### 3. PPO Algorithm - Embrace the Proximal Policy Optimization (PPO) algorithm for supreme policy updates. - Sculpt policies with the wisdom of experiences and rewards. ### 4. Tailored RL Environment - Tailor-make your own RL environment for text generation quests. - Reward the AI with BLEU scores and semantic similarity. - Dance through text generation steps with episode-ending conditions. ### 5. Replay Buffer and Memory - Store and summon experiences with grace in a replay buffer. - Command a replay memory class to oversee experiences like a pro. ### 6. Epsilon-Greedy Exploration - The agent employs epsilon-greedy exploration for marvelous discoveries. ### 7. Target Network for Rock-Solid Stability - Keep target networks in check for unwavering stability during Q-learning escapades. --- ## How It Operates 1. Birth an RL Agent, fine-tuned to your desires. 2. Train the agent using PPO magic or embrace Q-learning for epic journeys. 3. Birth text from input data with the policy network. 4. Evaluate the text's quality using BLEU and semantic beauty. 5. Commence your custom RL environment for text generation marvels. --- ## Uniqueness and Epicness - The union of RL and GPT-2 for text generation mastery. - Advanced text tasks unfold gracefully with QNetwork and its heuristic powers. - The limitless canvas to create RL agents for every text challenge. - Rewarding text quality and semantic harmony with AI-calculated rewards. - The blueprint for a customizable and adaptable RL text generation paradise. --- ## Get Started Now 1. Forge your QNetworkGPT2 with personalized hyperparameters. 2. Unleash the potential with RL-based training. 3. Conjure text aligned with your task and dream. 4. Assess the text with metrics and demands. 5. Fine-tune and enhance for your text generation quest. --- # Load model directly from transformers import AutoTokenizer, AutoModelForCausalLM tokenizer = AutoTokenizer.from_pretrained("ayjays132/QNetworkGPT2") model = AutoModelForCausalLM.from_pretrained("ayjays132/QNetworkGPT2") # Set the EOS token as the padding token tokenizer.pad_token = tokenizer.eos_token # Initialize a conversation history conversation_history = [] # Start a conversation loop while True: # Get user input user_input = input("You: ") # Add user input to the conversation history conversation_history.append(user_input) # Concatenate the conversation strings conversation_text = " ".join(conversation_history) # Tokenize and pad the input input_ids = tokenizer.encode(conversation_text, return_tensors="pt", padding=True, truncation=True) # Generate a response output_ids = model.generate(input_ids, max_length=150, num_return_sequences=1, pad_token_id=tokenizer.eos_token_id) # Decode the generated response generated_response = tokenizer.decode(output_ids[0], skip_special_tokens=True) # Print the generated response print("Bot:", generated_response) # Add bot's response to the conversation history conversation_history.append(generated_response) --- ## Explore and Create QNetworkGPT2 is your ticket to exploring new horizons in text generation. From chatbots and content creation to storytelling and beyond, it's your AI companion for all text adventures. 🌟 Embrace innovation, adaptation, and expansion to conquer your unique text generation challenges. Your text generation revolution starts here! 📚🤖
[ "SEMANTIC_SIMILARITY" ]
[ "CRAFT" ]
Hum-Works/lodestone-base-4096-v1
Hum-Works
sentence-similarity
[ "sentence-transformers", "pytorch", "bert", "feature-extraction", "sentence-similarity", "mteb", "custom_code", "en", "dataset:s2orc", "dataset:flax-sentence-embeddings/stackexchange_title_body_jsonl", "dataset:flax-sentence-embeddings/stackexchange_titlebody_best_voted_answer_jsonl", "dataset:flax-sentence-embeddings/stackexchange_title_best_voted_answer_jsonl", "dataset:flax-sentence-embeddings/stackexchange_titlebody_best_and_down_voted_answer_jsonl", "dataset:sentence-transformers/reddit-title-body", "dataset:msmarco", "dataset:gooaq", "dataset:yahoo_answers_topics", "dataset:code_search_net", "dataset:search_qa", "dataset:eli5", "dataset:snli", "dataset:multi_nli", "dataset:wikihow", "dataset:natural_questions", "dataset:trivia_qa", "dataset:embedding-data/sentence-compression", "dataset:embedding-data/flickr30k-captions", "dataset:embedding-data/altlex", "dataset:embedding-data/simple-wiki", "dataset:embedding-data/QQP", "dataset:embedding-data/SPECTER", "dataset:embedding-data/PAQ_pairs", "dataset:embedding-data/WikiAnswers", "dataset:sentence-transformers/embedding-training-data", "arxiv:2108.12409", "arxiv:1904.06472", "arxiv:2102.07033", "arxiv:2104.08727", "arxiv:1704.05179", "arxiv:1810.09305", "license:apache-2.0", "model-index", "autotrain_compatible", "text-embeddings-inference", "region:us" ]
2023-08-25T16:33:26
2023-10-26T22:00:30
158
11
--- datasets: - s2orc - flax-sentence-embeddings/stackexchange_title_body_jsonl - flax-sentence-embeddings/stackexchange_titlebody_best_voted_answer_jsonl - flax-sentence-embeddings/stackexchange_title_best_voted_answer_jsonl - flax-sentence-embeddings/stackexchange_titlebody_best_and_down_voted_answer_jsonl - sentence-transformers/reddit-title-body - msmarco - gooaq - yahoo_answers_topics - code_search_net - search_qa - eli5 - snli - multi_nli - wikihow - natural_questions - trivia_qa - embedding-data/sentence-compression - embedding-data/flickr30k-captions - embedding-data/altlex - embedding-data/simple-wiki - embedding-data/QQP - embedding-data/SPECTER - embedding-data/PAQ_pairs - embedding-data/WikiAnswers - sentence-transformers/embedding-training-data language: en license: apache-2.0 pipeline_tag: sentence-similarity tags: - sentence-transformers - feature-extraction - sentence-similarity - mteb inference: false model-index: - name: lodestone-base-4096-v1 results: - task: type: Classification dataset: name: MTEB AmazonCounterfactualClassification (en) type: mteb/amazon_counterfactual config: en split: test revision: e8379541af4e31359cca9fbcf4b00f2671dba205 metrics: - type: accuracy value: 69.7313432835821 - type: ap value: 31.618259511417733 - type: f1 value: 63.30313825394228 - task: type: Classification dataset: name: MTEB AmazonPolarityClassification type: mteb/amazon_polarity config: default split: test revision: e2d317d38cd51312af73b3d32a06d1a08b442046 metrics: - type: accuracy value: 86.89837499999999 - type: ap value: 82.39500885672128 - type: f1 value: 86.87317947399657 - task: type: Classification dataset: name: MTEB AmazonReviewsClassification (en) type: mteb/amazon_reviews_multi config: en split: test revision: 1399c76144fd37290681b995c656ef9b2e06e26d metrics: - type: accuracy value: 44.05 - type: f1 value: 42.67624383248947 - task: type: Retrieval dataset: name: MTEB ArguAna type: arguana config: default split: test revision: None metrics: - type: map_at_1 value: 26.173999999999996 - type: map_at_10 value: 40.976 - type: map_at_100 value: 42.067 - type: map_at_1000 value: 42.075 - type: map_at_3 value: 35.917 - type: map_at_5 value: 38.656 - type: mrr_at_1 value: 26.814 - type: mrr_at_10 value: 41.252 - type: mrr_at_100 value: 42.337 - type: mrr_at_1000 value: 42.345 - type: mrr_at_3 value: 36.226 - type: mrr_at_5 value: 38.914 - type: ndcg_at_1 value: 26.173999999999996 - type: ndcg_at_10 value: 49.819 - type: ndcg_at_100 value: 54.403999999999996 - type: ndcg_at_1000 value: 54.59 - type: ndcg_at_3 value: 39.231 - type: ndcg_at_5 value: 44.189 - type: precision_at_1 value: 26.173999999999996 - type: precision_at_10 value: 7.838000000000001 - type: precision_at_100 value: 0.9820000000000001 - type: precision_at_1000 value: 0.1 - type: precision_at_3 value: 16.287 - type: precision_at_5 value: 12.191 - type: recall_at_1 value: 26.173999999999996 - type: recall_at_10 value: 78.378 - type: recall_at_100 value: 98.222 - type: recall_at_1000 value: 99.644 - type: recall_at_3 value: 48.862 - type: recall_at_5 value: 60.953 - task: type: Clustering dataset: name: MTEB ArxivClusteringP2P type: mteb/arxiv-clustering-p2p config: default split: test revision: a122ad7f3f0291bf49cc6f4d32aa80929df69d5d metrics: - type: v_measure value: 42.31689035788179 - task: type: Clustering dataset: name: MTEB ArxivClusteringS2S type: mteb/arxiv-clustering-s2s config: default split: test revision: f910caf1a6075f7329cdf8c1a6135696f37dbd53 metrics: - type: v_measure value: 31.280245136660984 - task: type: Reranking dataset: name: MTEB AskUbuntuDupQuestions type: mteb/askubuntudupquestions-reranking config: default split: test revision: 2000358ca161889fa9c082cb41daa8dcfb161a54 metrics: - type: map value: 58.79109720839415 - type: mrr value: 71.79615705931495 - task: type: STS dataset: name: MTEB BIOSSES type: mteb/biosses-sts config: default split: test revision: d3fb88f8f02e40887cd149695127462bbcf29b4a metrics: - type: cos_sim_pearson value: 76.44918756608115 - type: cos_sim_spearman value: 70.86607256286257 - type: euclidean_pearson value: 74.12154678100815 - type: euclidean_spearman value: 70.86607256286257 - type: manhattan_pearson value: 74.0078626964417 - type: manhattan_spearman value: 70.68353828321327 - task: type: Classification dataset: name: MTEB Banking77Classification type: mteb/banking77 config: default split: test revision: 0fd18e25b25c072e09e0d92ab615fda904d66300 metrics: - type: accuracy value: 75.40584415584415 - type: f1 value: 74.29514617572676 - task: type: Clustering dataset: name: MTEB BiorxivClusteringP2P type: mteb/biorxiv-clustering-p2p config: default split: test revision: 65b79d1d13f80053f67aca9498d9402c2d9f1f40 metrics: - type: v_measure value: 37.41860080664014 - task: type: Clustering dataset: name: MTEB BiorxivClusteringS2S type: mteb/biorxiv-clustering-s2s config: default split: test revision: 258694dd0231531bc1fd9de6ceb52a0853c6d908 metrics: - type: v_measure value: 29.319217023090705 - task: type: Retrieval dataset: name: MTEB CQADupstackAndroidRetrieval type: BeIR/cqadupstack config: default split: test revision: None metrics: - type: map_at_1 value: 26.595000000000002 - type: map_at_10 value: 36.556 - type: map_at_100 value: 37.984 - type: map_at_1000 value: 38.134 - type: map_at_3 value: 33.417 - type: map_at_5 value: 35.160000000000004 - type: mrr_at_1 value: 32.761 - type: mrr_at_10 value: 41.799 - type: mrr_at_100 value: 42.526 - type: mrr_at_1000 value: 42.582 - type: mrr_at_3 value: 39.39 - type: mrr_at_5 value: 40.727000000000004 - type: ndcg_at_1 value: 32.761 - type: ndcg_at_10 value: 42.549 - type: ndcg_at_100 value: 47.915 - type: ndcg_at_1000 value: 50.475 - type: ndcg_at_3 value: 37.93 - type: ndcg_at_5 value: 39.939 - type: precision_at_1 value: 32.761 - type: precision_at_10 value: 8.312 - type: precision_at_100 value: 1.403 - type: precision_at_1000 value: 0.197 - type: precision_at_3 value: 18.741 - type: precision_at_5 value: 13.447999999999999 - type: recall_at_1 value: 26.595000000000002 - type: recall_at_10 value: 54.332 - type: recall_at_100 value: 76.936 - type: recall_at_1000 value: 93.914 - type: recall_at_3 value: 40.666000000000004 - type: recall_at_5 value: 46.513 - type: map_at_1 value: 22.528000000000002 - type: map_at_10 value: 30.751 - type: map_at_100 value: 31.855 - type: map_at_1000 value: 31.972 - type: map_at_3 value: 28.465 - type: map_at_5 value: 29.738 - type: mrr_at_1 value: 28.662 - type: mrr_at_10 value: 35.912 - type: mrr_at_100 value: 36.726 - type: mrr_at_1000 value: 36.777 - type: mrr_at_3 value: 34.013 - type: mrr_at_5 value: 35.156 - type: ndcg_at_1 value: 28.662 - type: ndcg_at_10 value: 35.452 - type: ndcg_at_100 value: 40.1 - type: ndcg_at_1000 value: 42.323 - type: ndcg_at_3 value: 32.112 - type: ndcg_at_5 value: 33.638 - type: precision_at_1 value: 28.662 - type: precision_at_10 value: 6.688 - type: precision_at_100 value: 1.13 - type: precision_at_1000 value: 0.16 - type: precision_at_3 value: 15.562999999999999 - type: precision_at_5 value: 11.019 - type: recall_at_1 value: 22.528000000000002 - type: recall_at_10 value: 43.748 - type: recall_at_100 value: 64.235 - type: recall_at_1000 value: 78.609 - type: recall_at_3 value: 33.937 - type: recall_at_5 value: 38.234 - type: map_at_1 value: 33.117999999999995 - type: map_at_10 value: 44.339 - type: map_at_100 value: 45.367000000000004 - type: map_at_1000 value: 45.437 - type: map_at_3 value: 41.195 - type: map_at_5 value: 42.922 - type: mrr_at_1 value: 38.37 - type: mrr_at_10 value: 47.786 - type: mrr_at_100 value: 48.522 - type: mrr_at_1000 value: 48.567 - type: mrr_at_3 value: 45.371 - type: mrr_at_5 value: 46.857 - type: ndcg_at_1 value: 38.37 - type: ndcg_at_10 value: 50.019999999999996 - type: ndcg_at_100 value: 54.36299999999999 - type: ndcg_at_1000 value: 55.897 - type: ndcg_at_3 value: 44.733000000000004 - type: ndcg_at_5 value: 47.292 - type: precision_at_1 value: 38.37 - type: precision_at_10 value: 8.288 - type: precision_at_100 value: 1.139 - type: precision_at_1000 value: 0.132 - type: precision_at_3 value: 20.293 - type: precision_at_5 value: 14.107 - type: recall_at_1 value: 33.117999999999995 - type: recall_at_10 value: 63.451 - type: recall_at_100 value: 82.767 - type: recall_at_1000 value: 93.786 - type: recall_at_3 value: 48.964999999999996 - type: recall_at_5 value: 55.358 - type: map_at_1 value: 16.028000000000002 - type: map_at_10 value: 23.186999999999998 - type: map_at_100 value: 24.236 - type: map_at_1000 value: 24.337 - type: map_at_3 value: 20.816000000000003 - type: map_at_5 value: 22.311 - type: mrr_at_1 value: 17.514 - type: mrr_at_10 value: 24.84 - type: mrr_at_100 value: 25.838 - type: mrr_at_1000 value: 25.924999999999997 - type: mrr_at_3 value: 22.542 - type: mrr_at_5 value: 24.04 - type: ndcg_at_1 value: 17.514 - type: ndcg_at_10 value: 27.391 - type: ndcg_at_100 value: 32.684999999999995 - type: ndcg_at_1000 value: 35.367 - type: ndcg_at_3 value: 22.820999999999998 - type: ndcg_at_5 value: 25.380999999999997 - type: precision_at_1 value: 17.514 - type: precision_at_10 value: 4.463 - type: precision_at_100 value: 0.745 - type: precision_at_1000 value: 0.101 - type: precision_at_3 value: 10.019 - type: precision_at_5 value: 7.457999999999999 - type: recall_at_1 value: 16.028000000000002 - type: recall_at_10 value: 38.81 - type: recall_at_100 value: 63.295 - type: recall_at_1000 value: 83.762 - type: recall_at_3 value: 26.604 - type: recall_at_5 value: 32.727000000000004 - type: map_at_1 value: 11.962 - type: map_at_10 value: 17.218 - type: map_at_100 value: 18.321 - type: map_at_1000 value: 18.455 - type: map_at_3 value: 15.287999999999998 - type: map_at_5 value: 16.417 - type: mrr_at_1 value: 14.677000000000001 - type: mrr_at_10 value: 20.381 - type: mrr_at_100 value: 21.471999999999998 - type: mrr_at_1000 value: 21.566 - type: mrr_at_3 value: 18.448999999999998 - type: mrr_at_5 value: 19.587 - type: ndcg_at_1 value: 14.677000000000001 - type: ndcg_at_10 value: 20.86 - type: ndcg_at_100 value: 26.519 - type: ndcg_at_1000 value: 30.020000000000003 - type: ndcg_at_3 value: 17.208000000000002 - type: ndcg_at_5 value: 19.037000000000003 - type: precision_at_1 value: 14.677000000000001 - type: precision_at_10 value: 3.856 - type: precision_at_100 value: 0.7889999999999999 - type: precision_at_1000 value: 0.124 - type: precision_at_3 value: 8.043 - type: precision_at_5 value: 6.069999999999999 - type: recall_at_1 value: 11.962 - type: recall_at_10 value: 28.994999999999997 - type: recall_at_100 value: 54.071999999999996 - type: recall_at_1000 value: 79.309 - type: recall_at_3 value: 19.134999999999998 - type: recall_at_5 value: 23.727999999999998 - type: map_at_1 value: 22.764 - type: map_at_10 value: 31.744 - type: map_at_100 value: 33.037 - type: map_at_1000 value: 33.156 - type: map_at_3 value: 29.015 - type: map_at_5 value: 30.434 - type: mrr_at_1 value: 28.296 - type: mrr_at_10 value: 37.03 - type: mrr_at_100 value: 37.902 - type: mrr_at_1000 value: 37.966 - type: mrr_at_3 value: 34.568 - type: mrr_at_5 value: 35.786 - type: ndcg_at_1 value: 28.296 - type: ndcg_at_10 value: 37.289 - type: ndcg_at_100 value: 42.787 - type: ndcg_at_1000 value: 45.382 - type: ndcg_at_3 value: 32.598 - type: ndcg_at_5 value: 34.521 - type: precision_at_1 value: 28.296 - type: precision_at_10 value: 6.901 - type: precision_at_100 value: 1.135 - type: precision_at_1000 value: 0.152 - type: precision_at_3 value: 15.367 - type: precision_at_5 value: 11.03 - type: recall_at_1 value: 22.764 - type: recall_at_10 value: 48.807 - type: recall_at_100 value: 71.859 - type: recall_at_1000 value: 89.606 - type: recall_at_3 value: 35.594 - type: recall_at_5 value: 40.541 - type: map_at_1 value: 19.742 - type: map_at_10 value: 27.741 - type: map_at_100 value: 29.323 - type: map_at_1000 value: 29.438 - type: map_at_3 value: 25.217 - type: map_at_5 value: 26.583000000000002 - type: mrr_at_1 value: 24.657999999999998 - type: mrr_at_10 value: 32.407000000000004 - type: mrr_at_100 value: 33.631 - type: mrr_at_1000 value: 33.686 - type: mrr_at_3 value: 30.194 - type: mrr_at_5 value: 31.444 - type: ndcg_at_1 value: 24.657999999999998 - type: ndcg_at_10 value: 32.614 - type: ndcg_at_100 value: 39.61 - type: ndcg_at_1000 value: 42.114000000000004 - type: ndcg_at_3 value: 28.516000000000002 - type: ndcg_at_5 value: 30.274 - type: precision_at_1 value: 24.657999999999998 - type: precision_at_10 value: 6.176 - type: precision_at_100 value: 1.1400000000000001 - type: precision_at_1000 value: 0.155 - type: precision_at_3 value: 13.927 - type: precision_at_5 value: 9.954 - type: recall_at_1 value: 19.742 - type: recall_at_10 value: 42.427 - type: recall_at_100 value: 72.687 - type: recall_at_1000 value: 89.89 - type: recall_at_3 value: 30.781 - type: recall_at_5 value: 35.606 - type: map_at_1 value: 19.72608333333333 - type: map_at_10 value: 27.165333333333336 - type: map_at_100 value: 28.292499999999997 - type: map_at_1000 value: 28.416333333333327 - type: map_at_3 value: 24.783833333333334 - type: map_at_5 value: 26.101750000000003 - type: mrr_at_1 value: 23.721500000000002 - type: mrr_at_10 value: 30.853333333333328 - type: mrr_at_100 value: 31.741750000000003 - type: mrr_at_1000 value: 31.812999999999995 - type: mrr_at_3 value: 28.732249999999997 - type: mrr_at_5 value: 29.945166666666665 - type: ndcg_at_1 value: 23.721500000000002 - type: ndcg_at_10 value: 31.74883333333333 - type: ndcg_at_100 value: 36.883583333333334 - type: ndcg_at_1000 value: 39.6145 - type: ndcg_at_3 value: 27.639583333333334 - type: ndcg_at_5 value: 29.543666666666667 - type: precision_at_1 value: 23.721500000000002 - type: precision_at_10 value: 5.709083333333333 - type: precision_at_100 value: 0.9859166666666666 - type: precision_at_1000 value: 0.1413333333333333 - type: precision_at_3 value: 12.85683333333333 - type: precision_at_5 value: 9.258166666666668 - type: recall_at_1 value: 19.72608333333333 - type: recall_at_10 value: 41.73583333333334 - type: recall_at_100 value: 64.66566666666668 - type: recall_at_1000 value: 84.09833333333336 - type: recall_at_3 value: 30.223083333333328 - type: recall_at_5 value: 35.153083333333335 - type: map_at_1 value: 17.582 - type: map_at_10 value: 22.803 - type: map_at_100 value: 23.503 - type: map_at_1000 value: 23.599999999999998 - type: map_at_3 value: 21.375 - type: map_at_5 value: 22.052 - type: mrr_at_1 value: 20.399 - type: mrr_at_10 value: 25.369999999999997 - type: mrr_at_100 value: 26.016000000000002 - type: mrr_at_1000 value: 26.090999999999998 - type: mrr_at_3 value: 23.952 - type: mrr_at_5 value: 24.619 - type: ndcg_at_1 value: 20.399 - type: ndcg_at_10 value: 25.964 - type: ndcg_at_100 value: 29.607 - type: ndcg_at_1000 value: 32.349 - type: ndcg_at_3 value: 23.177 - type: ndcg_at_5 value: 24.276 - type: precision_at_1 value: 20.399 - type: precision_at_10 value: 4.018 - type: precision_at_100 value: 0.629 - type: precision_at_1000 value: 0.093 - type: precision_at_3 value: 9.969 - type: precision_at_5 value: 6.748 - type: recall_at_1 value: 17.582 - type: recall_at_10 value: 33.35 - type: recall_at_100 value: 50.219 - type: recall_at_1000 value: 71.06099999999999 - type: recall_at_3 value: 25.619999999999997 - type: recall_at_5 value: 28.291 - type: map_at_1 value: 11.071 - type: map_at_10 value: 16.201999999999998 - type: map_at_100 value: 17.112 - type: map_at_1000 value: 17.238 - type: map_at_3 value: 14.508 - type: map_at_5 value: 15.440999999999999 - type: mrr_at_1 value: 13.833 - type: mrr_at_10 value: 19.235 - type: mrr_at_100 value: 20.108999999999998 - type: mrr_at_1000 value: 20.196 - type: mrr_at_3 value: 17.515 - type: mrr_at_5 value: 18.505 - type: ndcg_at_1 value: 13.833 - type: ndcg_at_10 value: 19.643 - type: ndcg_at_100 value: 24.298000000000002 - type: ndcg_at_1000 value: 27.614 - type: ndcg_at_3 value: 16.528000000000002 - type: ndcg_at_5 value: 17.991 - type: precision_at_1 value: 13.833 - type: precision_at_10 value: 3.6990000000000003 - type: precision_at_100 value: 0.713 - type: precision_at_1000 value: 0.116 - type: precision_at_3 value: 7.9030000000000005 - type: precision_at_5 value: 5.891 - type: recall_at_1 value: 11.071 - type: recall_at_10 value: 27.019 - type: recall_at_100 value: 48.404 - type: recall_at_1000 value: 72.641 - type: recall_at_3 value: 18.336 - type: recall_at_5 value: 21.991 - type: map_at_1 value: 18.573 - type: map_at_10 value: 25.008999999999997 - type: map_at_100 value: 26.015 - type: map_at_1000 value: 26.137 - type: map_at_3 value: 22.798 - type: map_at_5 value: 24.092 - type: mrr_at_1 value: 22.108 - type: mrr_at_10 value: 28.646 - type: mrr_at_100 value: 29.477999999999998 - type: mrr_at_1000 value: 29.57 - type: mrr_at_3 value: 26.415 - type: mrr_at_5 value: 27.693 - type: ndcg_at_1 value: 22.108 - type: ndcg_at_10 value: 29.42 - type: ndcg_at_100 value: 34.385 - type: ndcg_at_1000 value: 37.572 - type: ndcg_at_3 value: 25.274 - type: ndcg_at_5 value: 27.315 - type: precision_at_1 value: 22.108 - type: precision_at_10 value: 5.093 - type: precision_at_100 value: 0.859 - type: precision_at_1000 value: 0.124 - type: precision_at_3 value: 11.474 - type: precision_at_5 value: 8.321000000000002 - type: recall_at_1 value: 18.573 - type: recall_at_10 value: 39.433 - type: recall_at_100 value: 61.597 - type: recall_at_1000 value: 84.69 - type: recall_at_3 value: 27.849 - type: recall_at_5 value: 33.202999999999996 - type: map_at_1 value: 22.807 - type: map_at_10 value: 30.014000000000003 - type: map_at_100 value: 31.422 - type: map_at_1000 value: 31.652 - type: map_at_3 value: 27.447 - type: map_at_5 value: 28.711 - type: mrr_at_1 value: 27.668 - type: mrr_at_10 value: 34.489 - type: mrr_at_100 value: 35.453 - type: mrr_at_1000 value: 35.526 - type: mrr_at_3 value: 32.477000000000004 - type: mrr_at_5 value: 33.603 - type: ndcg_at_1 value: 27.668 - type: ndcg_at_10 value: 34.983 - type: ndcg_at_100 value: 40.535 - type: ndcg_at_1000 value: 43.747 - type: ndcg_at_3 value: 31.026999999999997 - type: ndcg_at_5 value: 32.608 - type: precision_at_1 value: 27.668 - type: precision_at_10 value: 6.837999999999999 - type: precision_at_100 value: 1.411 - type: precision_at_1000 value: 0.23600000000000002 - type: precision_at_3 value: 14.295 - type: precision_at_5 value: 10.435 - type: recall_at_1 value: 22.807 - type: recall_at_10 value: 43.545 - type: recall_at_100 value: 69.39800000000001 - type: recall_at_1000 value: 90.706 - type: recall_at_3 value: 32.183 - type: recall_at_5 value: 36.563 - type: map_at_1 value: 13.943 - type: map_at_10 value: 20.419999999999998 - type: map_at_100 value: 21.335 - type: map_at_1000 value: 21.44 - type: map_at_3 value: 17.865000000000002 - type: map_at_5 value: 19.36 - type: mrr_at_1 value: 15.712000000000002 - type: mrr_at_10 value: 22.345000000000002 - type: mrr_at_100 value: 23.227999999999998 - type: mrr_at_1000 value: 23.304 - type: mrr_at_3 value: 19.901 - type: mrr_at_5 value: 21.325 - type: ndcg_at_1 value: 15.712000000000002 - type: ndcg_at_10 value: 24.801000000000002 - type: ndcg_at_100 value: 29.799 - type: ndcg_at_1000 value: 32.513999999999996 - type: ndcg_at_3 value: 19.750999999999998 - type: ndcg_at_5 value: 22.252 - type: precision_at_1 value: 15.712000000000002 - type: precision_at_10 value: 4.1770000000000005 - type: precision_at_100 value: 0.738 - type: precision_at_1000 value: 0.106 - type: precision_at_3 value: 8.688 - type: precision_at_5 value: 6.617000000000001 - type: recall_at_1 value: 13.943 - type: recall_at_10 value: 36.913000000000004 - type: recall_at_100 value: 60.519 - type: recall_at_1000 value: 81.206 - type: recall_at_3 value: 23.006999999999998 - type: recall_at_5 value: 29.082 - task: type: Retrieval dataset: name: MTEB ClimateFEVER type: climate-fever config: default split: test revision: None metrics: - type: map_at_1 value: 9.468 - type: map_at_10 value: 16.029 - type: map_at_100 value: 17.693 - type: map_at_1000 value: 17.886 - type: map_at_3 value: 13.15 - type: map_at_5 value: 14.568 - type: mrr_at_1 value: 21.173000000000002 - type: mrr_at_10 value: 31.028 - type: mrr_at_100 value: 32.061 - type: mrr_at_1000 value: 32.119 - type: mrr_at_3 value: 27.534999999999997 - type: mrr_at_5 value: 29.431 - type: ndcg_at_1 value: 21.173000000000002 - type: ndcg_at_10 value: 23.224 - type: ndcg_at_100 value: 30.225 - type: ndcg_at_1000 value: 33.961000000000006 - type: ndcg_at_3 value: 18.174 - type: ndcg_at_5 value: 19.897000000000002 - type: precision_at_1 value: 21.173000000000002 - type: precision_at_10 value: 7.4719999999999995 - type: precision_at_100 value: 1.5010000000000001 - type: precision_at_1000 value: 0.219 - type: precision_at_3 value: 13.312 - type: precision_at_5 value: 10.619 - type: recall_at_1 value: 9.468 - type: recall_at_10 value: 28.823 - type: recall_at_100 value: 53.26499999999999 - type: recall_at_1000 value: 74.536 - type: recall_at_3 value: 16.672 - type: recall_at_5 value: 21.302 - task: type: Retrieval dataset: name: MTEB DBPedia type: dbpedia-entity config: default split: test revision: None metrics: - type: map_at_1 value: 6.343 - type: map_at_10 value: 12.717 - type: map_at_100 value: 16.48 - type: map_at_1000 value: 17.381 - type: map_at_3 value: 9.568999999999999 - type: map_at_5 value: 11.125 - type: mrr_at_1 value: 48.75 - type: mrr_at_10 value: 58.425000000000004 - type: mrr_at_100 value: 59.075 - type: mrr_at_1000 value: 59.095 - type: mrr_at_3 value: 56.291999999999994 - type: mrr_at_5 value: 57.679 - type: ndcg_at_1 value: 37.875 - type: ndcg_at_10 value: 27.77 - type: ndcg_at_100 value: 30.288999999999998 - type: ndcg_at_1000 value: 36.187999999999995 - type: ndcg_at_3 value: 31.385999999999996 - type: ndcg_at_5 value: 29.923 - type: precision_at_1 value: 48.75 - type: precision_at_10 value: 22.375 - type: precision_at_100 value: 6.3420000000000005 - type: precision_at_1000 value: 1.4489999999999998 - type: precision_at_3 value: 35.5 - type: precision_at_5 value: 30.55 - type: recall_at_1 value: 6.343 - type: recall_at_10 value: 16.936 - type: recall_at_100 value: 35.955999999999996 - type: recall_at_1000 value: 55.787 - type: recall_at_3 value: 10.771 - type: recall_at_5 value: 13.669999999999998 - task: type: Classification dataset: name: MTEB EmotionClassification type: mteb/emotion config: default split: test revision: 4f58c6b202a23cf9a4da393831edf4f9183cad37 metrics: - type: accuracy value: 41.99 - type: f1 value: 36.823402174564954 - task: type: Retrieval dataset: name: MTEB FEVER type: fever config: default split: test revision: None metrics: - type: map_at_1 value: 40.088 - type: map_at_10 value: 52.69200000000001 - type: map_at_100 value: 53.296 - type: map_at_1000 value: 53.325 - type: map_at_3 value: 49.905 - type: map_at_5 value: 51.617000000000004 - type: mrr_at_1 value: 43.009 - type: mrr_at_10 value: 56.203 - type: mrr_at_100 value: 56.75 - type: mrr_at_1000 value: 56.769000000000005 - type: mrr_at_3 value: 53.400000000000006 - type: mrr_at_5 value: 55.163 - type: ndcg_at_1 value: 43.009 - type: ndcg_at_10 value: 59.39 - type: ndcg_at_100 value: 62.129999999999995 - type: ndcg_at_1000 value: 62.793 - type: ndcg_at_3 value: 53.878 - type: ndcg_at_5 value: 56.887 - type: precision_at_1 value: 43.009 - type: precision_at_10 value: 8.366 - type: precision_at_100 value: 0.983 - type: precision_at_1000 value: 0.105 - type: precision_at_3 value: 22.377 - type: precision_at_5 value: 15.035000000000002 - type: recall_at_1 value: 40.088 - type: recall_at_10 value: 76.68700000000001 - type: recall_at_100 value: 88.91 - type: recall_at_1000 value: 93.782 - type: recall_at_3 value: 61.809999999999995 - type: recall_at_5 value: 69.131 - task: type: Retrieval dataset: name: MTEB FiQA2018 type: fiqa config: default split: test revision: None metrics: - type: map_at_1 value: 10.817 - type: map_at_10 value: 18.9 - type: map_at_100 value: 20.448 - type: map_at_1000 value: 20.660999999999998 - type: map_at_3 value: 15.979 - type: map_at_5 value: 17.415 - type: mrr_at_1 value: 23.148 - type: mrr_at_10 value: 31.208000000000002 - type: mrr_at_100 value: 32.167 - type: mrr_at_1000 value: 32.242 - type: mrr_at_3 value: 28.498 - type: mrr_at_5 value: 29.964000000000002 - type: ndcg_at_1 value: 23.148 - type: ndcg_at_10 value: 25.325999999999997 - type: ndcg_at_100 value: 31.927 - type: ndcg_at_1000 value: 36.081 - type: ndcg_at_3 value: 21.647 - type: ndcg_at_5 value: 22.762999999999998 - type: precision_at_1 value: 23.148 - type: precision_at_10 value: 7.546 - type: precision_at_100 value: 1.415 - type: precision_at_1000 value: 0.216 - type: precision_at_3 value: 14.969 - type: precision_at_5 value: 11.327 - type: recall_at_1 value: 10.817 - type: recall_at_10 value: 32.164 - type: recall_at_100 value: 57.655 - type: recall_at_1000 value: 82.797 - type: recall_at_3 value: 19.709 - type: recall_at_5 value: 24.333 - task: type: Retrieval dataset: name: MTEB HotpotQA type: hotpotqa config: default split: test revision: None metrics: - type: map_at_1 value: 25.380999999999997 - type: map_at_10 value: 33.14 - type: map_at_100 value: 33.948 - type: map_at_1000 value: 34.028000000000006 - type: map_at_3 value: 31.019999999999996 - type: map_at_5 value: 32.23 - type: mrr_at_1 value: 50.763000000000005 - type: mrr_at_10 value: 57.899 - type: mrr_at_100 value: 58.426 - type: mrr_at_1000 value: 58.457 - type: mrr_at_3 value: 56.093 - type: mrr_at_5 value: 57.116 - type: ndcg_at_1 value: 50.763000000000005 - type: ndcg_at_10 value: 41.656 - type: ndcg_at_100 value: 45.079 - type: ndcg_at_1000 value: 46.916999999999994 - type: ndcg_at_3 value: 37.834 - type: ndcg_at_5 value: 39.732 - type: precision_at_1 value: 50.763000000000005 - type: precision_at_10 value: 8.648 - type: precision_at_100 value: 1.135 - type: precision_at_1000 value: 0.13799999999999998 - type: precision_at_3 value: 23.105999999999998 - type: precision_at_5 value: 15.363 - type: recall_at_1 value: 25.380999999999997 - type: recall_at_10 value: 43.241 - type: recall_at_100 value: 56.745000000000005 - type: recall_at_1000 value: 69.048 - type: recall_at_3 value: 34.659 - type: recall_at_5 value: 38.406 - task: type: Classification dataset: name: MTEB ImdbClassification type: mteb/imdb config: default split: test revision: 3d86128a09e091d6018b6d26cad27f2739fc2db7 metrics: - type: accuracy value: 79.544 - type: ap value: 73.82920133396664 - type: f1 value: 79.51048124883265 - task: type: Retrieval dataset: name: MTEB MSMARCO type: msmarco config: default split: dev revision: None metrics: - type: map_at_1 value: 11.174000000000001 - type: map_at_10 value: 19.451999999999998 - type: map_at_100 value: 20.612 - type: map_at_1000 value: 20.703 - type: map_at_3 value: 16.444 - type: map_at_5 value: 18.083 - type: mrr_at_1 value: 11.447000000000001 - type: mrr_at_10 value: 19.808 - type: mrr_at_100 value: 20.958 - type: mrr_at_1000 value: 21.041999999999998 - type: mrr_at_3 value: 16.791 - type: mrr_at_5 value: 18.459 - type: ndcg_at_1 value: 11.447000000000001 - type: ndcg_at_10 value: 24.556 - type: ndcg_at_100 value: 30.637999999999998 - type: ndcg_at_1000 value: 33.14 - type: ndcg_at_3 value: 18.325 - type: ndcg_at_5 value: 21.278 - type: precision_at_1 value: 11.447000000000001 - type: precision_at_10 value: 4.215 - type: precision_at_100 value: 0.732 - type: precision_at_1000 value: 0.095 - type: precision_at_3 value: 8.052 - type: precision_at_5 value: 6.318 - type: recall_at_1 value: 11.174000000000001 - type: recall_at_10 value: 40.543 - type: recall_at_100 value: 69.699 - type: recall_at_1000 value: 89.403 - type: recall_at_3 value: 23.442 - type: recall_at_5 value: 30.536 - task: type: Classification dataset: name: MTEB MTOPDomainClassification (en) type: mteb/mtop_domain config: en split: test revision: d80d48c1eb48d3562165c59d59d0034df9fff0bf metrics: - type: accuracy value: 89.6671226630187 - type: f1 value: 89.57660424361246 - task: type: Classification dataset: name: MTEB MTOPIntentClassification (en) type: mteb/mtop_intent config: en split: test revision: ae001d0e6b1228650b7bd1c2c65fb50ad11a8aba metrics: - type: accuracy value: 60.284997720018254 - type: f1 value: 40.30637400152823 - task: type: Classification dataset: name: MTEB MassiveIntentClassification (en) type: mteb/amazon_massive_intent config: en split: test revision: 31efe3c427b0bae9c22cbb560b8f15491cc6bed7 metrics: - type: accuracy value: 63.33557498318763 - type: f1 value: 60.24039910680179 - task: type: Classification dataset: name: MTEB MassiveScenarioClassification (en) type: mteb/amazon_massive_scenario config: en split: test revision: 7d571f92784cd94a019292a1f45445077d0ef634 metrics: - type: accuracy value: 72.37390719569603 - type: f1 value: 72.33097333477316 - task: type: Clustering dataset: name: MTEB MedrxivClusteringP2P type: mteb/medrxiv-clustering-p2p config: default split: test revision: e7a26af6f3ae46b30dde8737f02c07b1505bcc73 metrics: - type: v_measure value: 34.68158939060552 - task: type: Clustering dataset: name: MTEB MedrxivClusteringS2S type: mteb/medrxiv-clustering-s2s config: default split: test revision: 35191c8c0dca72d8ff3efcd72aa802307d469663 metrics: - type: v_measure value: 30.340061711905236 - task: type: Reranking dataset: name: MTEB MindSmallReranking type: mteb/mind_small config: default split: test revision: 3bdac13927fdc888b903db93b2ffdbd90b295a69 metrics: - type: map value: 32.01814326295803 - type: mrr value: 33.20555240055367 - task: type: Retrieval dataset: name: MTEB NFCorpus type: nfcorpus config: default split: test revision: None metrics: - type: map_at_1 value: 3.3910000000000005 - type: map_at_10 value: 7.7219999999999995 - type: map_at_100 value: 10.286 - type: map_at_1000 value: 11.668000000000001 - type: map_at_3 value: 5.552 - type: map_at_5 value: 6.468 - type: mrr_at_1 value: 34.365 - type: mrr_at_10 value: 42.555 - type: mrr_at_100 value: 43.295 - type: mrr_at_1000 value: 43.357 - type: mrr_at_3 value: 40.299 - type: mrr_at_5 value: 41.182 - type: ndcg_at_1 value: 31.424000000000003 - type: ndcg_at_10 value: 24.758 - type: ndcg_at_100 value: 23.677999999999997 - type: ndcg_at_1000 value: 33.377 - type: ndcg_at_3 value: 28.302 - type: ndcg_at_5 value: 26.342 - type: precision_at_1 value: 33.437 - type: precision_at_10 value: 19.256999999999998 - type: precision_at_100 value: 6.662999999999999 - type: precision_at_1000 value: 1.9900000000000002 - type: precision_at_3 value: 27.761000000000003 - type: precision_at_5 value: 23.715 - type: recall_at_1 value: 3.3910000000000005 - type: recall_at_10 value: 11.068 - type: recall_at_100 value: 25.878 - type: recall_at_1000 value: 60.19 - type: recall_at_3 value: 6.1690000000000005 - type: recall_at_5 value: 7.767 - task: type: Retrieval dataset: name: MTEB NQ type: nq config: default split: test revision: None metrics: - type: map_at_1 value: 15.168000000000001 - type: map_at_10 value: 26.177 - type: map_at_100 value: 27.564 - type: map_at_1000 value: 27.628999999999998 - type: map_at_3 value: 22.03 - type: map_at_5 value: 24.276 - type: mrr_at_1 value: 17.439 - type: mrr_at_10 value: 28.205000000000002 - type: mrr_at_100 value: 29.357 - type: mrr_at_1000 value: 29.408 - type: mrr_at_3 value: 24.377 - type: mrr_at_5 value: 26.540000000000003 - type: ndcg_at_1 value: 17.41 - type: ndcg_at_10 value: 32.936 - type: ndcg_at_100 value: 39.196999999999996 - type: ndcg_at_1000 value: 40.892 - type: ndcg_at_3 value: 24.721 - type: ndcg_at_5 value: 28.615000000000002 - type: precision_at_1 value: 17.41 - type: precision_at_10 value: 6.199000000000001 - type: precision_at_100 value: 0.9690000000000001 - type: precision_at_1000 value: 0.11299999999999999 - type: precision_at_3 value: 11.790000000000001 - type: precision_at_5 value: 9.264 - type: recall_at_1 value: 15.168000000000001 - type: recall_at_10 value: 51.914 - type: recall_at_100 value: 79.804 - type: recall_at_1000 value: 92.75999999999999 - type: recall_at_3 value: 30.212 - type: recall_at_5 value: 39.204 - task: type: Retrieval dataset: name: MTEB QuoraRetrieval type: quora config: default split: test revision: None metrics: - type: map_at_1 value: 67.306 - type: map_at_10 value: 80.634 - type: map_at_100 value: 81.349 - type: map_at_1000 value: 81.37299999999999 - type: map_at_3 value: 77.691 - type: map_at_5 value: 79.512 - type: mrr_at_1 value: 77.56 - type: mrr_at_10 value: 84.177 - type: mrr_at_100 value: 84.35000000000001 - type: mrr_at_1000 value: 84.353 - type: mrr_at_3 value: 83.003 - type: mrr_at_5 value: 83.799 - type: ndcg_at_1 value: 77.58 - type: ndcg_at_10 value: 84.782 - type: ndcg_at_100 value: 86.443 - type: ndcg_at_1000 value: 86.654 - type: ndcg_at_3 value: 81.67 - type: ndcg_at_5 value: 83.356 - type: precision_at_1 value: 77.58 - type: precision_at_10 value: 12.875 - type: precision_at_100 value: 1.503 - type: precision_at_1000 value: 0.156 - type: precision_at_3 value: 35.63 - type: precision_at_5 value: 23.483999999999998 - type: recall_at_1 value: 67.306 - type: recall_at_10 value: 92.64 - type: recall_at_100 value: 98.681 - type: recall_at_1000 value: 99.79 - type: recall_at_3 value: 83.682 - type: recall_at_5 value: 88.424 - task: type: Clustering dataset: name: MTEB RedditClustering type: mteb/reddit-clustering config: default split: test revision: 24640382cdbf8abc73003fb0fa6d111a705499eb metrics: - type: v_measure value: 50.76319866126382 - task: type: Clustering dataset: name: MTEB RedditClusteringP2P type: mteb/reddit-clustering-p2p config: default split: test revision: 282350215ef01743dc01b456c7f5241fa8937f16 metrics: - type: v_measure value: 55.024711941648995 - task: type: Retrieval dataset: name: MTEB SCIDOCS type: scidocs config: default split: test revision: None metrics: - type: map_at_1 value: 3.9379999999999997 - type: map_at_10 value: 8.817 - type: map_at_100 value: 10.546999999999999 - type: map_at_1000 value: 10.852 - type: map_at_3 value: 6.351999999999999 - type: map_at_5 value: 7.453 - type: mrr_at_1 value: 19.400000000000002 - type: mrr_at_10 value: 27.371000000000002 - type: mrr_at_100 value: 28.671999999999997 - type: mrr_at_1000 value: 28.747 - type: mrr_at_3 value: 24.583 - type: mrr_at_5 value: 26.143 - type: ndcg_at_1 value: 19.400000000000002 - type: ndcg_at_10 value: 15.264 - type: ndcg_at_100 value: 22.63 - type: ndcg_at_1000 value: 28.559 - type: ndcg_at_3 value: 14.424999999999999 - type: ndcg_at_5 value: 12.520000000000001 - type: precision_at_1 value: 19.400000000000002 - type: precision_at_10 value: 7.8100000000000005 - type: precision_at_100 value: 1.854 - type: precision_at_1000 value: 0.329 - type: precision_at_3 value: 13.100000000000001 - type: precision_at_5 value: 10.68 - type: recall_at_1 value: 3.9379999999999997 - type: recall_at_10 value: 15.903 - type: recall_at_100 value: 37.645 - type: recall_at_1000 value: 66.86 - type: recall_at_3 value: 7.993 - type: recall_at_5 value: 10.885 - task: type: STS dataset: name: MTEB SICK-R type: mteb/sickr-sts config: default split: test revision: a6ea5a8cab320b040a23452cc28066d9beae2cee metrics: - type: cos_sim_pearson value: 80.12689060151425 - type: cos_sim_spearman value: 70.46515535094771 - type: euclidean_pearson value: 77.17160003557223 - type: euclidean_spearman value: 70.4651757047438 - type: manhattan_pearson value: 77.18129609281937 - type: manhattan_spearman value: 70.46610403752913 - task: type: STS dataset: name: MTEB STS12 type: mteb/sts12-sts config: default split: test revision: a0d554a64d88156834ff5ae9920b964011b16384 metrics: - type: cos_sim_pearson value: 70.451157033355 - type: cos_sim_spearman value: 63.99899601697852 - type: euclidean_pearson value: 67.46985359967678 - type: euclidean_spearman value: 64.00001637764805 - type: manhattan_pearson value: 67.56534741780037 - type: manhattan_spearman value: 64.06533893575366 - task: type: STS dataset: name: MTEB STS13 type: mteb/sts13-sts config: default split: test revision: 7e90230a92c190f1bf69ae9002b8cea547a64cca metrics: - type: cos_sim_pearson value: 77.65086614464292 - type: cos_sim_spearman value: 78.20169706921848 - type: euclidean_pearson value: 77.77758172155283 - type: euclidean_spearman value: 78.20169706921848 - type: manhattan_pearson value: 77.75077884860052 - type: manhattan_spearman value: 78.16875216484164 - task: type: STS dataset: name: MTEB STS14 type: mteb/sts14-sts config: default split: test revision: 6031580fec1f6af667f0bd2da0a551cf4f0b2375 metrics: - type: cos_sim_pearson value: 76.26381598259717 - type: cos_sim_spearman value: 70.78377709313477 - type: euclidean_pearson value: 74.82646556532096 - type: euclidean_spearman value: 70.78377658155212 - type: manhattan_pearson value: 74.81784766108225 - type: manhattan_spearman value: 70.79351454692176 - task: type: STS dataset: name: MTEB STS15 type: mteb/sts15-sts config: default split: test revision: ae752c7c21bf194d8b67fd573edf7ae58183cbe3 metrics: - type: cos_sim_pearson value: 79.00532026789739 - type: cos_sim_spearman value: 80.02708383244838 - type: euclidean_pearson value: 79.48345422610525 - type: euclidean_spearman value: 80.02708383244838 - type: manhattan_pearson value: 79.44519739854803 - type: manhattan_spearman value: 79.98344094559687 - task: type: STS dataset: name: MTEB STS16 type: mteb/sts16-sts config: default split: test revision: 4d8694f8f0e0100860b497b999b3dbed754a0513 metrics: - type: cos_sim_pearson value: 77.32783048164805 - type: cos_sim_spearman value: 78.79729961288045 - type: euclidean_pearson value: 78.72111945793154 - type: euclidean_spearman value: 78.79729904606872 - type: manhattan_pearson value: 78.72464311117116 - type: manhattan_spearman value: 78.822591248334 - task: type: STS dataset: name: MTEB STS17 (en-en) type: mteb/sts17-crosslingual-sts config: en-en split: test revision: af5e6fb845001ecf41f4c1e033ce921939a2a68d metrics: - type: cos_sim_pearson value: 82.04318630630854 - type: cos_sim_spearman value: 83.87886389259836 - type: euclidean_pearson value: 83.40385877895086 - type: euclidean_spearman value: 83.87886389259836 - type: manhattan_pearson value: 83.46337128901547 - type: manhattan_spearman value: 83.9723106941644 - task: type: STS dataset: name: MTEB STS22 (en) type: mteb/sts22-crosslingual-sts config: en split: test revision: 6d1ba47164174a496b7fa5d3569dae26a6813b80 metrics: - type: cos_sim_pearson value: 63.003511169944595 - type: cos_sim_spearman value: 64.39318805580227 - type: euclidean_pearson value: 65.4797990735967 - type: euclidean_spearman value: 64.39318805580227 - type: manhattan_pearson value: 65.44604544280844 - type: manhattan_spearman value: 64.38742899984233 - task: type: STS dataset: name: MTEB STSBenchmark type: mteb/stsbenchmark-sts config: default split: test revision: b0fddb56ed78048fa8b90373c8a3cfc37b684831 metrics: - type: cos_sim_pearson value: 76.63101237585029 - type: cos_sim_spearman value: 75.57446967644269 - type: euclidean_pearson value: 76.93491768734478 - type: euclidean_spearman value: 75.57446967644269 - type: manhattan_pearson value: 76.92187567800636 - type: manhattan_spearman value: 75.57239337194585 - task: type: Reranking dataset: name: MTEB SciDocsRR type: mteb/scidocs-reranking config: default split: test revision: d3c5e1fc0b855ab6097bf1cda04dd73947d7caab metrics: - type: map value: 78.5376604868993 - type: mrr value: 92.94422897364073 - task: type: Retrieval dataset: name: MTEB SciFact type: scifact config: default split: test revision: None metrics: - type: map_at_1 value: 38.872 - type: map_at_10 value: 50.417 - type: map_at_100 value: 51.202000000000005 - type: map_at_1000 value: 51.25999999999999 - type: map_at_3 value: 47.02 - type: map_at_5 value: 49.326 - type: mrr_at_1 value: 41.0 - type: mrr_at_10 value: 51.674 - type: mrr_at_100 value: 52.32599999999999 - type: mrr_at_1000 value: 52.376999999999995 - type: mrr_at_3 value: 48.778 - type: mrr_at_5 value: 50.744 - type: ndcg_at_1 value: 41.0 - type: ndcg_at_10 value: 56.027 - type: ndcg_at_100 value: 59.362 - type: ndcg_at_1000 value: 60.839 - type: ndcg_at_3 value: 50.019999999999996 - type: ndcg_at_5 value: 53.644999999999996 - type: precision_at_1 value: 41.0 - type: precision_at_10 value: 8.1 - type: precision_at_100 value: 0.987 - type: precision_at_1000 value: 0.11100000000000002 - type: precision_at_3 value: 20.444000000000003 - type: precision_at_5 value: 14.466999999999999 - type: recall_at_1 value: 38.872 - type: recall_at_10 value: 71.906 - type: recall_at_100 value: 86.367 - type: recall_at_1000 value: 98.0 - type: recall_at_3 value: 56.206 - type: recall_at_5 value: 65.05 - task: type: PairClassification dataset: name: MTEB SprintDuplicateQuestions type: mteb/sprintduplicatequestions-pairclassification config: default split: test revision: d66bd1f72af766a5cc4b0ca5e00c162f89e8cc46 metrics: - type: cos_sim_accuracy value: 99.7039603960396 - type: cos_sim_ap value: 90.40809844250262 - type: cos_sim_f1 value: 84.53181583031557 - type: cos_sim_precision value: 87.56698821007502 - type: cos_sim_recall value: 81.69999999999999 - type: dot_accuracy value: 99.7039603960396 - type: dot_ap value: 90.40809844250262 - type: dot_f1 value: 84.53181583031557 - type: dot_precision value: 87.56698821007502 - type: dot_recall value: 81.69999999999999 - type: euclidean_accuracy value: 99.7039603960396 - type: euclidean_ap value: 90.4080982863383 - type: euclidean_f1 value: 84.53181583031557 - type: euclidean_precision value: 87.56698821007502 - type: euclidean_recall value: 81.69999999999999 - type: manhattan_accuracy value: 99.7 - type: manhattan_ap value: 90.39771161966652 - type: manhattan_f1 value: 84.32989690721648 - type: manhattan_precision value: 87.02127659574468 - type: manhattan_recall value: 81.8 - type: max_accuracy value: 99.7039603960396 - type: max_ap value: 90.40809844250262 - type: max_f1 value: 84.53181583031557 - task: type: Clustering dataset: name: MTEB StackExchangeClustering type: mteb/stackexchange-clustering config: default split: test revision: 6cbc1f7b2bc0622f2e39d2c77fa502909748c259 metrics: - type: v_measure value: 59.663210666678715 - task: type: Clustering dataset: name: MTEB StackExchangeClusteringP2P type: mteb/stackexchange-clustering-p2p config: default split: test revision: 815ca46b2622cec33ccafc3735d572c266efdb44 metrics: - type: v_measure value: 32.107791216468776 - task: type: Reranking dataset: name: MTEB StackOverflowDupQuestions type: mteb/stackoverflowdupquestions-reranking config: default split: test revision: e185fbe320c72810689fc5848eb6114e1ef5ec69 metrics: - type: map value: 46.440691925067604 - type: mrr value: 47.03390257618199 - task: type: Summarization dataset: name: MTEB SummEval type: mteb/summeval config: default split: test revision: cda12ad7615edc362dbf25a00fdd61d3b1eaf93c metrics: - type: cos_sim_pearson value: 31.067177519784074 - type: cos_sim_spearman value: 31.234728424648967 - type: dot_pearson value: 31.06717083018107 - type: dot_spearman value: 31.234728424648967 - task: type: Retrieval dataset: name: MTEB TRECCOVID type: trec-covid config: default split: test revision: None metrics: - type: map_at_1 value: 0.136 - type: map_at_10 value: 0.767 - type: map_at_100 value: 3.3689999999999998 - type: map_at_1000 value: 8.613999999999999 - type: map_at_3 value: 0.369 - type: map_at_5 value: 0.514 - type: mrr_at_1 value: 48.0 - type: mrr_at_10 value: 63.908 - type: mrr_at_100 value: 64.615 - type: mrr_at_1000 value: 64.615 - type: mrr_at_3 value: 62.0 - type: mrr_at_5 value: 63.4 - type: ndcg_at_1 value: 44.0 - type: ndcg_at_10 value: 38.579 - type: ndcg_at_100 value: 26.409 - type: ndcg_at_1000 value: 26.858999999999998 - type: ndcg_at_3 value: 47.134 - type: ndcg_at_5 value: 43.287 - type: precision_at_1 value: 48.0 - type: precision_at_10 value: 40.400000000000006 - type: precision_at_100 value: 26.640000000000004 - type: precision_at_1000 value: 12.04 - type: precision_at_3 value: 52.666999999999994 - type: precision_at_5 value: 46.800000000000004 - type: recall_at_1 value: 0.136 - type: recall_at_10 value: 1.0070000000000001 - type: recall_at_100 value: 6.318 - type: recall_at_1000 value: 26.522000000000002 - type: recall_at_3 value: 0.41700000000000004 - type: recall_at_5 value: 0.606 - task: type: Retrieval dataset: name: MTEB Touche2020 type: webis-touche2020 config: default split: test revision: None metrics: - type: map_at_1 value: 1.9949999999999999 - type: map_at_10 value: 8.304 - type: map_at_100 value: 13.644 - type: map_at_1000 value: 15.43 - type: map_at_3 value: 4.788 - type: map_at_5 value: 6.22 - type: mrr_at_1 value: 22.448999999999998 - type: mrr_at_10 value: 37.658 - type: mrr_at_100 value: 38.491 - type: mrr_at_1000 value: 38.503 - type: mrr_at_3 value: 32.312999999999995 - type: mrr_at_5 value: 35.68 - type: ndcg_at_1 value: 21.429000000000002 - type: ndcg_at_10 value: 18.995 - type: ndcg_at_100 value: 32.029999999999994 - type: ndcg_at_1000 value: 44.852 - type: ndcg_at_3 value: 19.464000000000002 - type: ndcg_at_5 value: 19.172 - type: precision_at_1 value: 22.448999999999998 - type: precision_at_10 value: 17.143 - type: precision_at_100 value: 6.877999999999999 - type: precision_at_1000 value: 1.524 - type: precision_at_3 value: 21.769 - type: precision_at_5 value: 20.0 - type: recall_at_1 value: 1.9949999999999999 - type: recall_at_10 value: 13.395999999999999 - type: recall_at_100 value: 44.348 - type: recall_at_1000 value: 82.622 - type: recall_at_3 value: 5.896 - type: recall_at_5 value: 8.554 - task: type: Classification dataset: name: MTEB ToxicConversationsClassification type: mteb/toxic_conversations_50k config: default split: test revision: d7c0de2777da35d6aae2200a62c6e0e5af397c4c metrics: - type: accuracy value: 67.9394 - type: ap value: 12.943337263423334 - type: f1 value: 52.28243093094156 - task: type: Classification dataset: name: MTEB TweetSentimentExtractionClassification type: mteb/tweet_sentiment_extraction config: default split: test revision: d604517c81ca91fe16a244d1248fc021f9ecee7a metrics: - type: accuracy value: 56.414827391058296 - type: f1 value: 56.666412409573105 - task: type: Clustering dataset: name: MTEB TwentyNewsgroupsClustering type: mteb/twentynewsgroups-clustering config: default split: test revision: 6125ec4e24fa026cec8a478383ee943acfbd5449 metrics: - type: v_measure value: 47.009746255495465 - task: type: PairClassification dataset: name: MTEB TwitterSemEval2015 type: mteb/twittersemeval2015-pairclassification config: default split: test revision: 70970daeab8776df92f5ea462b6173c0b46fd2d1 metrics: - type: cos_sim_accuracy value: 84.02574953805807 - type: cos_sim_ap value: 67.66599910763128 - type: cos_sim_f1 value: 63.491277990844985 - type: cos_sim_precision value: 59.77172140694154 - type: cos_sim_recall value: 67.70448548812665 - type: dot_accuracy value: 84.02574953805807 - type: dot_ap value: 67.66600090945406 - type: dot_f1 value: 63.491277990844985 - type: dot_precision value: 59.77172140694154 - type: dot_recall value: 67.70448548812665 - type: euclidean_accuracy value: 84.02574953805807 - type: euclidean_ap value: 67.6659842364448 - type: euclidean_f1 value: 63.491277990844985 - type: euclidean_precision value: 59.77172140694154 - type: euclidean_recall value: 67.70448548812665 - type: manhattan_accuracy value: 84.0317100792752 - type: manhattan_ap value: 67.66351692448987 - type: manhattan_f1 value: 63.48610948306178 - type: manhattan_precision value: 57.11875131828729 - type: manhattan_recall value: 71.45118733509234 - type: max_accuracy value: 84.0317100792752 - type: max_ap value: 67.66600090945406 - type: max_f1 value: 63.491277990844985 - task: type: PairClassification dataset: name: MTEB TwitterURLCorpus type: mteb/twitterurlcorpus-pairclassification config: default split: test revision: 8b6510b0b1fa4e4c4f879467980e9be563ec1cdf metrics: - type: cos_sim_accuracy value: 87.53832421314084 - type: cos_sim_ap value: 83.11416594316626 - type: cos_sim_f1 value: 75.41118114347518 - type: cos_sim_precision value: 73.12839059674504 - type: cos_sim_recall value: 77.8410840776101 - type: dot_accuracy value: 87.53832421314084 - type: dot_ap value: 83.11416226342155 - type: dot_f1 value: 75.41118114347518 - type: dot_precision value: 73.12839059674504 - type: dot_recall value: 77.8410840776101 - type: euclidean_accuracy value: 87.53832421314084 - type: euclidean_ap value: 83.11416284455395 - type: euclidean_f1 value: 75.41118114347518 - type: euclidean_precision value: 73.12839059674504 - type: euclidean_recall value: 77.8410840776101 - type: manhattan_accuracy value: 87.49369348391353 - type: manhattan_ap value: 83.08066812574694 - type: manhattan_f1 value: 75.36561228603892 - type: manhattan_precision value: 71.9202518363064 - type: manhattan_recall value: 79.15768401601478 - type: max_accuracy value: 87.53832421314084 - type: max_ap value: 83.11416594316626 - type: max_f1 value: 75.41118114347518 --- # lodestone-base-4096-v1 [Hum-Works/lodestone-base-4096-v1](https://huggingface.co/Hum-Works/lodestone-base-4096-v1). [Griffin McCauley](https://huggingface.co/gmccaul1), [Will Fortin](https://huggingface.co/willathum), [Dylan DiGioia](https://huggingface.co/dylanAtHum) 2023 This new [sentence-transformers](https://www.SBERT.net) model from [Hum](https://www.hum.works/) maps long sentences & paragraphs to a 768 dimensional dense vector space and can be used for tasks like clustering or semantic search. ## Abstract In the hopes of furthering Hum's overarching mission of increasing the accessibility and interconnectivity of human knowledge, this model was developed as part of a project intending to boost the maximum input sequence length of sentence embedding models by leveraging recent architectural advances in the design of transformer models such as the incorporation of FlashAttention, Attention with Linear Biases (ALiBi), and Gated Linear Units (GLU). These modifications and enhancements were implemented by the team at MosaicML who designed and constructed the pre-trained [`mosaic-bert-base-seqlen-2048`](https://huggingface.co/mosaicml/mosaic-bert-base-seqlen-2048) model, and more information regarding the details of their development and testing specifications can be found on the model card. While the fine-tuning procedure followed during the course of this project loosely mirrors that of the of the original [Flax-sentence-embeddings](https://huggingface.co/flax-sentence-embeddings) team responsible for the creation of many other popular sentence-transformers models (e.g. [all-mpnet-base-v2](https://huggingface.co/sentence-transformers/all-mpnet-base-v2), [all-distilroberta-v1](https://huggingface.co/sentence-transformers/all-distilroberta-v1), and [all-MiniLM-L6-v2](https://huggingface.co/sentence-transformers/all-MiniLM-L6-v2)), our methodology includes novel techniques for data loading, batch sampling, and model checkpointing intended to improve training efficiency with regards to memory allocation and data storage. Through combining these well-established and proven fine-tuning practices with novel advances in transformer architectural elements, our `lodestone-base-4096-v1` model is able to achieve comparable performance metrics on standard text embedding evaluation benchmarks while also supporting a longer and more robust input sequence length of 4096 while retaining a smaller, more manageable size capable of being run on either a GPU or CPU. ## Usage Using this model becomes relatively easy when you have [sentence-transformers](https://www.SBERT.net) installed. *At the time of publishing, sentence-transformers does not support remote code which is required for flash-attention used by the model. A fork of the sentence-transformers repository that allows remote code execution is provided for convenience. It can be installed using the following command:* ``` pip install git+https://github.com/Hum-Works/sentence-transformers.git pip install einops ``` Then you can use the model like this: ```python from sentence_transformers import SentenceTransformer model = SentenceTransformer('Hum-Works/lodestone-base-4096-v1', trust_remote_code=True, revision='v1.0.0') sentences = ["This is an example sentence", "Each sentence is converted"] embeddings = model.encode(sentences) print(embeddings) ``` *Note: The model will use the openAI/Triton implementation of FlashAttention if installed. This is more performant than the fallback, torch implementation. Some platforms and GPUs may not be supported by Triton - up to date compatibility can be found on [Triton’s github page](https://github.com/openai/triton#compatibility).* ------ ## Background The project aims to train sentence embedding models on very large sentence level datasets using a self-supervised contrastive learning objective. We used the pretrained [`mosaic-bert-base-seqlen-2048`](https://huggingface.co/mosaicml/mosaic-bert-base-seqlen-2048) model and fine-tuned it on a nearly 1.5B sentence pairs dataset. We use a contrastive learning objective: given a sentence from the pair, the model should predict which out of a set of randomly sampled other sentences, was actually paired with it in our dataset. ## Intended uses Our model is intended to be used as a long sentence and paragraph encoder. Given an input text, it outputs a vector containing the semantic information. The sentence vector may be used for information retrieval, clustering, or sentence similarity tasks. ## Training procedure ### Pre-training We use the pretrained [`mosaic-bert-base-seqlen-2048`](https://huggingface.co/mosaicml/mosaic-bert-base-seqlen-2048). Please refer to the model card for more detailed information about the pre-training procedure. ### Fine-tuning We fine-tune the model using a contrastive objective. Formally, we compute the dot product of each possible sentence pairing in the batch. We then apply the cross entropy loss by comparing with true pairs. #### Hyperparameters We trained our model on an ml.g5.4xlarge EC2 instance with 1 NVIDIA A10G Tensor Core GPU. We train the model during 1.4 million steps using a batch size of 16. We use a learning rate warm up of 500. The sequence length during training was limited to 2048 tokens. We used the AdamW optimizer with a 2e-5 learning rate and weight decay of 0.01 (i.e. the default parameter values for SentenceTransformer.fit()). The full training script is accessible in this current repository: `Training.py`. ## Model Architecture By incorporating FlashAttention, [Attention with Linear Biases (ALiBi)](https://arxiv.org/abs/2108.12409), and Gated Linear Units (GLU), this model is able to handle input sequences of 4096, 8x longer than that supported by most comparable sentence embedding models. The model was trained using a sequence length maximum of 2048, but the final model has a maximum sequence length of 4096. This is accomplished by taking advantage of ALiBi’s positional attention extrapolation which has been shown to allow sequence lengths of 2x the initial trained length. ## Full Model Architecture ``` SentenceTransformer( (0): Transformer({'max_seq_length': 4096, 'do_lower_case': False}) with Transformer model: BertModel (1): Pooling({'word_embedding_dimension': 768, 'pooling_mode_cls_token': False, 'pooling_mode_mean_tokens': True, 'pooling_mode_max_tokens': False, 'pooling_mode_mean_sqrt_len_tokens': False, 'pooling_mode_weightedmean_tokens': False, 'pooling_mode_lasttoken': False}) (2): Normalize() ) ``` #### Training data We use the concatenation from multiple datasets to fine-tune our model. The total number of sentence pairs is nearly 1.5 billion sentences. We sampled each dataset given a weighted probability proportional to its relative contribution to the entire dataset. The breakdown of the dataset can be seen below, and the entire dataset can be publicly accessed and uploaded via the `Dataloading.ipynb` located within this repository. | Dataset | Paper | Number of training tuples | |--------------------------------------------------------|:----------------------------------------:|:--------------------------:| | [Reddit comments (2015-2018)](https://github.com/PolyAI-LDN/conversational-datasets/tree/master/reddit) | [paper](https://arxiv.org/abs/1904.06472) | 726,484,430 | | **[S2ORC](https://github.com/allenai/s2orc) Citation pairs (Abstracts)** | [paper](https://aclanthology.org/2020.acl-main.447/) | 252,102,397 | | **[Reddit posts](https://huggingface.co/datasets/sentence-transformers/reddit-title-body) (Title, Body) pairs** | - | 127,445,911 | | **[Amazon reviews (2018)](https://huggingface.co/datasets/sentence-transformers/embedding-training-data) (Title, Review) pairs** | - | 87,877,725 | | [WikiAnswers](https://github.com/afader/oqa#wikianswers-corpus) Duplicate question pairs | [paper](https://doi.org/10.1145/2623330.2623677) | 77,427,422 | | [PAQ](https://github.com/facebookresearch/PAQ) (Question, Answer) pairs | [paper](https://arxiv.org/abs/2102.07033) | 64,371,441 | | [S2ORC](https://github.com/allenai/s2orc) Citation pairs (Titles) | [paper](https://aclanthology.org/2020.acl-main.447/) | 52,603,982 | | [S2ORC](https://github.com/allenai/s2orc) (Title, Abstract) | [paper](https://aclanthology.org/2020.acl-main.447/) | 41,769,185 | | [Stack Exchange](https://huggingface.co/datasets/flax-sentence-embeddings/stackexchange_title_body_jsonl) (Title, Body) pairs | - | 25,368,423 | | [MS MARCO](https://microsoft.github.io/msmarco/) triplets | [paper](https://doi.org/10.1145/3404835.3462804) | 9,144,553 | | **[Stack Exchange](https://huggingface.co/datasets/flax-sentence-embeddings/stackexchange_title_best_voted_answer_jsonl) (Title, Most Upvoted Answer) pairs** | - | 4,784,250 | | **[Stack Exchange](https://huggingface.co/datasets/flax-sentence-embeddings/stackexchange_titlebody_best_voted_answer_jsonl) (Title+Body, Most Upvoted Answer) pairs** | - | 4,551,660 | | [GOOAQ: Open Question Answering with Diverse Answer Types](https://github.com/allenai/gooaq) | [paper](https://arxiv.org/pdf/2104.08727.pdf) | 3,012,496 | | **[Amazon QA](https://huggingface.co/datasets/sentence-transformers/embedding-training-data)** | - | 2,507,114 | | [Code Search](https://huggingface.co/datasets/code_search_net) | - | 1,375,067 | | [Yahoo Answers](https://www.kaggle.com/soumikrakshit/yahoo-answers-dataset) (Title, Answer) | [paper](https://proceedings.neurips.cc/paper/2015/hash/250cf8b51c773f3f8dc8b4be867a9a02-Abstract.html) | 1,198,260 | | **[AG News]((Title, Description) pairs of news articles from the AG News dataset)** | - | 1,157,745 | | [COCO](https://cocodataset.org/#home) Image captions | [paper](https://link.springer.com/chapter/10.1007%2F978-3-319-10602-1_48) | 828,395| | [SPECTER](https://github.com/allenai/specter) citation triplets | [paper](https://doi.org/10.18653/v1/2020.acl-main.207) | 684,100 | | [Yahoo Answers](https://www.kaggle.com/soumikrakshit/yahoo-answers-dataset) (Question, Answer) | [paper](https://proceedings.neurips.cc/paper/2015/hash/250cf8b51c773f3f8dc8b4be867a9a02-Abstract.html) | 681,164 | | [Yahoo Answers](https://www.kaggle.com/soumikrakshit/yahoo-answers-dataset) (Title, Question) | [paper](https://proceedings.neurips.cc/paper/2015/hash/250cf8b51c773f3f8dc8b4be867a9a02-Abstract.html) | 659,896 | | **[CC News](https://huggingface.co/datasets/sentence-transformers/embedding-training-data) (Title, article) pairs** | - | 614,664 | | **[NPR](https://huggingface.co/datasets/sentence-transformers/embedding-training-data) (Title, Body) pairs** | - | 594,384 | | [SearchQA](https://huggingface.co/datasets/search_qa) | [paper](https://arxiv.org/abs/1704.05179) | 582,261 | | **[MS Marco](https://microsoft.github.io/msmarco/) (Query, Answer Passage) pairs** | [paper](https://doi.org/10.1145/3404835.3462804) | 532,751 | | [Stack Exchange](https://docs.google.com/spreadsheets/d/1vXJrIg38cEaKjOG5y4I4PQwAQFUmCkohbViJ9zj_Emg/edit#gid=0) (Title, Body) pairs | - | 364,000 | | [Eli5](https://huggingface.co/datasets/eli5) | [paper](https://doi.org/10.18653/v1/p19-1346) | 325,475 | | [Flickr 30k](https://shannon.cs.illinois.edu/DenotationGraph/) | [paper](https://transacl.org/ojs/index.php/tacl/article/view/229/33) | 317,695 | | **[CNN & DailyMail](https://huggingface.co/datasets/sentence-transformers/embedding-training-data) (highlight sentences, article) pairs** | - | 311,971 | | [Stack Exchange](https://docs.google.com/spreadsheets/d/1vXJrIg38cEaKjOG5y4I4PQwAQFUmCkohbViJ9zj_Emg/edit#gid=0) Duplicate questions (titles) | - | 304,524 | | AllNLI ([SNLI](https://nlp.stanford.edu/projects/snli/) and [MultiNLI](https://cims.nyu.edu/~sbowman/multinli/) | [paper SNLI](https://doi.org/10.18653/v1/d15-1075), [paper MultiNLI](https://doi.org/10.18653/v1/n18-1101) | 277,230 | | [Stack Exchange](https://docs.google.com/spreadsheets/d/1vXJrIg38cEaKjOG5y4I4PQwAQFUmCkohbViJ9zj_Emg/edit#gid=0) Duplicate questions (bodies) | - | 250,518 | | [Stack Exchange](https://docs.google.com/spreadsheets/d/1vXJrIg38cEaKjOG5y4I4PQwAQFUmCkohbViJ9zj_Emg/edit#gid=0) Duplicate questions (titles+bodies) | - | 250,459 | | **[XSUM](https://huggingface.co/datasets/sentence-transformers/embedding-training-data) (Summary, News Article) pairs** | - | 226,711 | | **[Stack Exchange](https://huggingface.co/datasets/flax-sentence-embeddings/stackexchange_titlebody_best_and_down_voted_answer_jsonl) (Title+Body, Most Upvoted Answer, Most Downvoted Answer) triplets** | - | 216,454 | | [Sentence Compression](https://github.com/google-research-datasets/sentence-compression) | [paper](https://www.aclweb.org/anthology/D13-1155/) | 180,000 | | **[FEVER](https://docs.google.com/spreadsheets/d/1vXJrIg38cEaKjOG5y4I4PQwAQFUmCkohbViJ9zj_Emg/edit#gid=0) training data** | - | 139,051 | | [Wikihow](https://github.com/pvl/wikihow_pairs_dataset) | [paper](https://arxiv.org/abs/1810.09305) | 128,542 | | **[SearchQA](https://huggingface.co/datasets/search_qa) (Question, Top-Snippet)** | [paper](https://arxiv.org/abs/1704.05179) | 117,384 | | [Altlex](https://github.com/chridey/altlex/) | [paper](https://aclanthology.org/P16-1135.pdf) | 112,696 | | **[Quora Question Duplicates](https://quoradata.quora.com/First-Quora-Dataset-Release-Question-Pairs)** | - | 103,663 | | [Quora Question Triplets](https://quoradata.quora.com/First-Quora-Dataset-Release-Question-Pairs) | - | 103,663 | | [Simple Wikipedia](https://cs.pomona.edu/~dkauchak/simplification/) | [paper](https://www.aclweb.org/anthology/P11-2117/) | 102,225 | | [Natural Questions (NQ)](https://ai.google.com/research/NaturalQuestions) | [paper](https://transacl.org/ojs/index.php/tacl/article/view/1455) | 100,231 | | [SQuAD2.0](https://rajpurkar.github.io/SQuAD-explorer/) | [paper](https://aclanthology.org/P18-2124.pdf) | 87,599 | | [TriviaQA](https://huggingface.co/datasets/trivia_qa) | - | 73,346 | | **Total** | | **1,492,453,113** | #### Replication The entire fine-tuning process for this model can be replicated by following the steps outlined in the `Replication.txt` file within this repository. This document explains how to modify the [sentence-transformers](https://www.SBERT.net) library, configure the pre-trained [`mosaic-bert-base-seqlen-2048`](https://huggingface.co/mosaicml/mosaic-bert-base-seqlen-2048) model, load all of the training data, and execute the training script. #### Limitations Due to technical constraints (e.g. limited GPU memory capacity), this model was trained with a smaller batch size of 16, making it so that each step during training was less well-informed than it would have been on a higher performance system. This smaller than ideal hyperparameter value will generally cause the model to be more likely to get stuck in a local minimum and for the parameter configuration to take a longer time to converge to the optimum. In order to counteract this potential risk, we trained the model for a larger number of steps than many of its contemporaries to ensure a greater chance of achieving strong performance, but this is an area which could be improved if further fine-tuning was performed. It is also worth noting that, while this model is able to handle longer input sequences of up to 4096 word pieces, the training dataset used consists of sentence and paragraph pairs and triplets which do not necessarily reach that maximum sequence length. Since the data was not tailored specifically for this larger input size, further fine-tuning may be required to ensure highly accurate embeddings for longer texts of that magnitude. Finally, as stated on https://huggingface.co/datasets/sentence-transformers/reddit-title-body, an additional reminder and warning regarding the Reddit posts data is that one should "Be aware that this dataset is not filtered for biases, hate-speech, spam, racial slurs etc. It depicts the content as it is posted on Reddit." Thus, while we believe this has not induced any pathological behaviors in the model's performance due to its relatively low prevalence of records in the whole dataset of nearly 1.5B sentence pairs and the fact that this model was trained to produce semantic embeddings rather than generative text outputs, it is always important to be aware of vulnerabilities to bias.
[ "QUESTION_ANSWERING", "SUMMARIZATION" ]
[ "BIOSSES", "SCIFACT" ]
QuantFactory/Llama-3.1-EIRAI-8B-GGUF
QuantFactory
null
[ "transformers", "gguf", "medical", "text-generation-inference", "llama-3.1", "finetuning", "th", "en", "arxiv:2409.08523", "base_model:meta-llama/Llama-3.1-8B", "base_model:quantized:meta-llama/Llama-3.1-8B", "license:llama3.1", "endpoints_compatible", "region:us", "conversational" ]
2024-09-20T12:15:38
2024-09-20T12:58:27
157
2
--- base_model: meta-llama/Meta-Llama-3.1-8B language: - th - en library_name: transformers license: llama3.1 tags: - medical - text-generation-inference - llama-3.1 - finetuning --- [![QuantFactory Banner](https://lh7-rt.googleusercontent.com/docsz/AD_4nXeiuCm7c8lEwEJuRey9kiVZsRn2W-b4pWlu3-X534V3YmVuVc2ZL-NXg2RkzSOOS2JXGHutDuyyNAUtdJI65jGTo8jT9Y99tMi4H4MqL44Uc5QKG77B0d6-JfIkZHFaUA71-RtjyYZWVIhqsNZcx8-OMaA?key=xt3VSDoCbmTY7o-cwwOFwQ)](https://hf.co/QuantFactory) # QuantFactory/Llama-3.1-EIRAI-8B-GGUF This is quantized version of [EIRTHAIMED/Llama-3.1-EIRAI-8B](https://huggingface.co/EIRTHAIMED/Llama-3.1-EIRAI-8B) created using llama.cpp # Original Model Card <p align="center"> <img src="https://cdn-uploads.huggingface.co/production/uploads/66bf1cd096583c59b024a3c5/oG16EyLMfyiqvXrbNPGZd.png" alt="Logo_Website" width="400"/> </p> # **Thai Medical Large Language Model** **Github** : [Github Evaluate](https://github.com/EIRAI-Thaimedical/EIRAI)<br> **PaPer** : <br> ## **Llama-3.1-EIRAI-8B-instruct** **Llama-3.1-EIRAI-8B-instruct**: developed an **8-billion parameter model** specifically tailored for **Thai medical applications**, with expertise in both **Thai medical language** and **English medical terminology**. The model has demonstrated its capabilities through key benchmarks such as **MMLU**, **MedQA**, **PubMedQA**, and **MedMCQA**, as well as Thai language assessments like **ThaiExam**, **M3Exam**, **XNLI**, and **XCOPA**. Additionally, we have created a **Clinically Adapted Model Enhanced test** using the **Thai language** to support **clinical use in hospitals** and to further improve the performance of **Thai medical Retrieval-Augmented Generation (RAG)**. ## Notice While **Eir AI Thai Medical LLM** is designed to encode high-quality medical knowledge, it is **not yet optimized for safe, practical use** in real-world medical settings. The model is still in the research phase and should **not be used for clinical decision-making** without further validation, including randomized controlled trials. It is available for researchers to explore the potential of LLMs in medical contexts, but **real-world deployment is not recommended** in its current version. ## Safety and Future Work The current version of **Eir AI Thai Medical LLM** is under active development. We advise against using it for medical applications until further testing is completed. Our goal is to continue enhancing the model through **rigorous testing** and **real-world evaluation**, ensuring that it can be safely integrated into healthcare systems in the future. ## Model Overview - **Model Architecture:** Meta-Llama-3.1-8B-Instruct - **Version:** 1.0 - **License(s):** [llama3.1](https://huggingface.co/meta-llama/Meta-Llama-3.1-8B/blob/main/LICENSE) ### Evaluations | Medical Model | Clinical KG | Medical Genetics | Anatomy | Pro Medicine | College Biology | College Medicine | MedQA | PubMedQA | MedMCQA | Avg. | |--------------------------|---------------------|---------------------|--------------------|--------------------|--------------------|--------------------|-------------------|-------------------|-------------------|-------------------| | **GPT-3.5 Turbo 1106** | 74.7 | 60.2 | 65.9 | 72.0 | 64.73 | 64.73 | 57.71 | 72.66 | 66.0 | 66.6 | |Thai LLMs | | | | | | | | | | | | **Eir AI-8B** | 75.1 | 80.0 | 69.6 | 76.8 | 77.1 | 66.5 | 64.5 | **79.0** | 58.6 | 71.9 | | **Eir AI-8B + Prob** | **83.8** | **89.0** | **83.0** | **84.9** | **89.6** | **75.7** | **69.6** | 78.8 | **67.1** | **80.2** | | **Typhoon-v1.5x-8B** | 75.9 | 79.0 | 63.7 | 70.6 | 77.1 | 63.6 | 59.7 | 74.4 | 58.0 | 69.1 | | **OpenThaiGPT-beta-7B** | 37.4 | 38.0 | 4.5 | 32.7 | 36.1 | 32.4 | 32.4 | 62.0 | 31.8 | 34.1 | ## Translation Performance Metrics | **Model** | **BLEU Score** | **N-gram Precisions (%)** | **BP** | **Ratio** | |-------------------------------|----------------|---------------------------------|---------|-----------| | Typhoon-v1.5x-8B-Instruct | 34.42 | 71.3/50.6/38.6/29.6 | 0.764 | 0.788 | | Meta Llama 3.1-8B Instruct | 35.74 | 62.8/42.3/31.7/24.1 | 0.946 | 0.948 | | **Eir AI-8B** | **61.10** | **76.1/64.6/56.6/50.1** | **1.000**| **1.006** | | Eir AI-8B-prob | 47.91 | 74.0/58.0/48.2/40.6 | 0.890 | 0.896 | ## Clinically Adapted Thai Medical Task Performance | Task | GPT-3.5 | Typhoon-v1.5x-8B-instruct | GPT-4o | Eir AI-8B | |----------------------------------------|---------|----------------------------|--------|-----------| | Named Entity Recognition | 3.26 | 5.55 | 6.34 | **7.08** | | Temporal Information Extraction | 3.83 | 5.46 | 6.15 | **7.05** | | Paraphrasing | 2.36 | 4.68 | 6.35 | **7.06** | | Natural Language Generation | 2.63 | 4.87 | 6.91 | **7.66** | | Keyword Extraction | 2.60 | 5.15 | 7.01 | **7.35** | | Text Classification | 2.92 | 6.21 | 5.36 | **6.75** | | Relation Extraction | 3.29 | 5.94 | 4.37 | **6.92** | | Question Answering | 3.70 | 4.92 | 6.11 | **6.82** | | Text Summarization | 2.98 | 5.44 | **7.51**| **7.51** | | Abbreviation Expansion | 3.99 | 5.96 | 6.24 | **7.82** | | Clinical Concept Normalization | 2.67 | 5.63 | 5.82 | **6.55** | | Open-ended Question | 3.32 | 5.55 | 6.77 | **7.27** | | Multiple-Choice Question | 3.90 | 5.00 | 5.40 | **6.40** | | Coreference Resolution | 3.48 | 4.55 | 4.88 | **6.43** | | Yes/No Question | 2.71 | 5.86 | 4.86 | **7.38** | | Medical Translation | 3.00 | 4.00 | **7.79**| 7.65 | | Medical Thai Extraction | 2.81 | 7.16 | **8.62**| 8.16 | | Medical ICD Prediction | 2.08 | 3.16 | **8.12**| 6.41 | | **Average Score** | 3.05 | 5.33 | 6.38 | **7.11** | # Prompt Template This model uses `ChatML` prompt template: ``` <|begin_of_text|><|start_header_id|>system<|end_header_id|> {system_prompt}<|eot_id|><|start_header_id|>user<|end_header_id|> {prompt}<|eot_id|><|start_header_id|>assistant<|end_header_id|> ```` # Example Clinical Adapted ICD 10 Prediction ```` <|begin_of_text|><|start_header_id|>system<|end_header_id|> You are responsible for accurately assigning ICD-10 codes and to diagnose and document medical records. Your expertise ensures that healthcare providers are properly reimbursed and that patient care is well-documented. In this scenario, you will be presented with a series of medical records and your task is to provide the correct ICD-10 code(s) and ICD-9 CM in procedures based on the information provided. <|eot_id|> <|start_header_id|>user<|end_header_id|> "Chief Complaint :5วันก่อนมารพ.มีไข้ ไอ มีเสมหะ มีน้ำมูก เหนื่อย ปวดเมื่อยตามตัว \r\n Present illness : 5วันก่อนมารพ.มีไข้ ไอ มีเสมหะ มีน้ำมูก เหนื่อย ปวดเมื่อยตามตัว มีน้ำมูก เลือดกำเดาจาากข้างขวา ปฏิการกระทบกระแทก ไม่มีเจ็บคอ ไม่มีอาการอ่อนเพลีย มีอาการอ่อนเพลีย ไอมาก ไอตลอด มีอาการระคายคอ ปัสสาวะปกติ ไม่มีถ่ายเหลว \r\n\r\nAllergy : |\r\n\r\nOther : no underlying disease\r\n\r\nPlan Treatment Day 1 of hospitalization : admit ward \r\n\r\nReview of System { \r\n\r\n General :a thai adult female ,look sickness fatigue dry lip moderate dehydration \r\n Skin :no MP rash \r\n Eyes :not pale ,no icteric sclera \r\n Chest :secretion sound in both lung ,no crepitation , no wheezing \r \n } VitalSign First : {\n BP : 117.0/63.0 mmHg\n Pulse : 62.0 BPm\n Temperature : 37.0 Celsius\n Respiratory rate : 20.0\n Weight : 50.000 kgs.\n Height : 165.0 cm.\n Painscore: N/A\n O2SAT : 100\n}\n Lab Results: \n Electrolyte:Sodium (Na), Result : 143 mmol/L\r\n Electrolyte:Potassium (K),Result : 3.8 mmol/L\r\n Electrolyte:Chloride (Cl), Result : 108 mmol/L\r\n Electrolyte:Bicarbonate (CO2),Result : 27.0 mmol/L\r\n Creatinine (Serum):Creatinine, Result : 0.69 mg/dL\r\n Creatinine (Serum):eGFR,Result : 100.41 ml/min/1.73 m^2\r\n AST/SGOT:AST/SGOT, Result : 48 U/L\r\n ALT/SGPT:ALT/SGPT, Result : 42 U/L\r\n CBC:WBC Count,Result : 3.2 10^3/uL\r\n CBC:RBC Count, Result : 3.57 10^6/uL\r\n CBC:Hemoglobin (Hb), Result : 10.7 g/dL\r\n CBC:Hematocrit (HCT),Result : 32.4 %\r\n CBC:MCV, Result : 91 fL\r\n CBC:MCH, Result : 30.0 pg\r\n CBC:MCHC, Result : 33.0 g/dL\r\n CBC:RDW-CV,Result : 12.9 %\r\n CBC:Platelet Count, Result : 226 10^3/uL\r\n CBC:Platelet Estimates, Result : Adequate \r\n CBC:Neutrophils,Result : 41 %\r\n CBC:Lymphocytes, Result : 46 %\r\n CBC:Monocytes, Result : 9 %\r\n CBC:Eosinophils, Result : 4 %\r\n CBC:Basophils,Result : 0 %\r\n COVID-19 Antigen (Rapid Test) For Admit:COVID-19 Antigen (Rapid Test), Result : Positive \r\n Influenza A/B screening:Flu A,Result : Negative \r\n Influenza A/B screening:Flu B, Result : Negative \n X-ray Results: 15-05-2567 12:56 CHEST (PA, UPRIGHT)\r\n \r\nHISTORY: Fever with productive cough and dyspnea for 5 days. \r\nCOMPARISON: None.\r\n \r\n FINDINGS:\r\n TUBES/LINES: None.\r\nLUNGS: Normal lung volumes. Band-like opacity at right apical lung. Clear left lung.\r\n PLEURA: No pneumothorax or effusion. Mild bilateral apical pleural thickening. \r\n HEART AND MEDIASTINUM: Normal cardiac silhouette. Normal mediastinal contour.\r\n BONY STRUCTURES: Mild scoliosis of the thoracic spine. \r\n\r\n IMPRESSION:\r\n - Band-like opacity at right apical lung, probably plate atelectasis or fibrosis\r\n- Mild bilateral apical pleural thickening" <|eot_id|> <|start_header_id|>assistant<|end_header_id|> Let's analyze the provided information and document the appropriate ICD-10 codes based on the patient's symptoms and diagnosis. ### Step-by-Step Analysis: 1. **Understand the Symptoms and Diagnosis**: - The patient presents with a range of symptoms including fever, cough with sputum, fatigue, nasal discharge (including blood), and dyspnea. The lab tests returned a positive COVID-19 antigen test, while the influenza test was negative. 2. **Identify Key Terms**: - Primary key terms include 'COVID-19 positive', 'fever', 'productive cough', 'dyspnea', and pulmonary findings from the X-ray. 3. **Locate the Main Category**: - The main categories for the conditions noted are: - COVID-19: U07.1 - Possible acute bronchitis: J20 (based on the productive cough) - Other symptoms such as dizziness: R42 - Evidence of acute respiratory infection: J06.9 4. **Identify Specific Codes**: - Based on the diagnosis: - COVID-19 confirmed case: U07.1 - Acute bronchitis: J20.9 (considering unspecified acute bronchitis) - Dizziness (if noted as significant and problematic): R42 - Unspecified acute upper respiratory infection: J06.9 5. **Check for Additional Codes or Modifiers**: - Considering the details provided, the primary diagnosis is the confirmed COVID-19 infection, while bronchiolitis and dizziness might be secondary diagnoses. 6. **Validate the Code Selection**: - Confirm that the selected codes reflect the patient's symptoms accurately, including acute bronchitis and any other significant symptoms presented. 7. **Document the ICD-10 Code**: - The ICD-10 codes are documented for all noted conditions accordingly." ```json Answer: { "Primary_Diagnosis": { "Code": "U071", "Description": "COVID-19, virus identified" }, "Secondary_Diagnosis": [ { "Code": "J20", "Description": "Acute bronchitis due to unspecified organism" }, { "Code": "R42", "Description": "Dizziness and giddiness" }, { "Code": "J069", "Description": "Acute upper respiratory infection, unspecified" } ], "ICD9_Procedure": [] } ```` # Example Clinical Adapted Thai Medical Extraction ```` <|begin_of_text|><|start_header_id|>system<|end_header_id|> Task : Extract input the following patient information into output format Tone: the following medical text into Thai in a fluent and elegant style. Output Format.1.Age: \n2.Gender: \n3.Weight :\n4.Height : \n5.Chief Complaint: \n6.Symptoms and Signs: \n7.Medical History: \n 8.Current Medications: \n9.Laboratory Results: \n10.Imaging Findings: \n11.Allergy: \n12.Drug Allergy: <|eot_id|> <|start_header_id|>user<|end_header_id|> ผู้ป่วยของเราเป็นชายถนัดทั้งสองมือ อายุ 43 ปี มีประวัติการชักที่ไม่สามารถควบคุมได้มาเป็นเวลา 20 ปี ลักษณะการชักของเขามักจะรวมถึงการรู้สึกร้อนวูบวาบและอาการทางประสาทสัมผัสอื่น ๆ ที่พัฒนาไปสู่การเคลื่อนไหวของกล้ามเนื้อที่มีจุดศูนย์กลางส่วนใหญ่ทางด้านขวา การตรวจหาสาเหตุของการชักรวมถึงการถ่ายภาพด้วยคลื่นแม่เหล็กไฟฟ้า (MRI) ซึ่งเผยให้เห็นเนื้องอกไขมันขนาดใหญ่ที่เส้นกลางสมอง การพัฒนาไม่สมบูรณ์ของคอร์ปัสคาโลซัมบางส่วน และรอยโรคที่อยู่ใกล้เคียงในสมองส่วนหน้าซ้ายที่คาดว่าจะเป็นเนื้องอกกลีอาล (glial neoplasm) ตามลักษณะภาพถ่ายทางรังสี รอยโรคในสมองส่วนหน้าซ้ายด้านหน้าและตรงกลางประกอบด้วยการกลายเป็นหินปูนแบบเป็นก้อนพร้อมการเพิ่มขึ้นของสัญญาณ FLAIR ที่กว้างขวางซึ่งเกี่ยวข้องกับไจรัสซิงกูเลตทั้งสองข้างและสมองส่วนหน้าซ้าย (รูปที่ ).\n\nการจัดการทางการแพทย์ล้มเหลวในการควบคุมการชักของเขาและเขาถูกส่งต่อเพื่อหาทางเลือกในการรักษาด้วยการผ่าตัด รอยโรคที่เพิ่มขึ้นถูกสังเกตด้วยการถ่ายภาพเพิ่มเติมและขอบเขตของอาการบวมน้ำก็เพิ่มขึ้นด้วย ความกังวลเกี่ยวกับการพัฒนาเนื้องอกกลีอาลที่เพิ่มขึ้นและการควบคุมการชักที่ไม่ดีทำให้มีการแนะนำให้ทำการผ่าตัด การตัดสินใจถูกทำขึ้นเพื่อดำเนินการผ่าตัดนำทางด้วยระบบประสาทเพื่อตัดมวลที่เพิ่มขึ้นในสมองส่วนหน้าซ้ายและการตัดสมองส่วนหน้าบางส่วนโดยใช้การตรวจคลื่นไฟฟ้าสมองระหว่างการผ่าตัด (intraoperative electroencephalogram - EEG), การทำแผนที่คอร์ติคอล (cortical mapping) และการตรวจวัดศักย์ไฟฟ้าที่เกิดจากการกระตุ้นประสาทรับความรู้สึก (somatosensory evoked potentials - SSEP)\n\nตัวอย่างที่ส่งไปตรวจทางพยาธิวิทยาแบบแช่แข็งในระหว่างการผ่าตัดพบว่ามีเส้นใยโรเซนธาล (Rosenthal fibers) และการกลายเป็นหินปูนแบบเป็นจุดซึ่งคาดว่าจะเป็นเนื้องอกกลีอาล การประเมินทางพยาธิวิทยาแบบถาวรเผยให้เห็นเนื้องอกไขมัน (lipoma) และความผิดปกติของคอร์ติคอลแบบเฉพาะจุด (focal cortical dysplasia) แบบ Palmini Type IA ในสมองที่อยู่ใกล้เคียง ความผิดปกติเล็กน้อยของโครงสร้างคอร์ติคอลและการเกิดกลีโอซิส (gliosis) ในเนื้อสมองขาวที่เกี่ยวข้องสามารถเห็นได้ในคราบสีฮีมาโทซิลินและอีโอซิน (hematoxylin and eosin - H&E) และคราบสีโปรตีนกรดกลีอาลไฟบริลลารี (glial fibrillary acidic protein - GFAP) และการย้อมสีโปรตีนเส้นประสาท (neurofilament protein - NFP) ในบริเวณที่เกิดกลีโอซิสไม่แสดงหลักฐานของเซลล์ประสาทที่ผิดรูป เซลล์ประสาทขนาดใหญ่ หรือเซลล์ลูกโป่ง (รูปที่ ).\n\n การถ่ายภาพหลังการผ่าตัด (รูปที่ ) แสดงให้เห็นการตัดรอยโรคที่เพิ่มขึ้นใกล้เคียงทั้งหมดในสมองส่วนหน้าซ้ายและไจรัสซิงกูเลต การลดลงอย่างมากของอาการบวมน้ำที่เกิดจากหลอดเลือดถูกสังเกตในภาพ FLAIR เนื้องอกไขมันที่เส้นกลางสมองไม่เปลี่ยนแปลง\n\nการฟื้นตัวหลังการตัดรวมถึงอาการเสริมในบริเวณมอเตอร์ซินโดรมระดับปานกลางรวมถึงอาการอ่อนแรงครึ่งซีก การพูดล่าช้า และการเปลี่ยนแปลงทางอารมณ์ การเปลี่ยนแปลงเหล่านี้ค่อยๆ หายไปและพบว่าผู้ป่วยมีความสมบูรณ์ทางระบบประสาทในการติดตามผลที่คลินิก การควบคุมการชักเกิดขึ้นทันทีหลังการตัด ยาต้านการชักถูกใช้อย่างต่อเนื่องและค่อยๆ ถูกถอดออกในเดือนต่อมาโดยไม่มีการกลับมาเป็นซ้ำ ผู้ป่วยไม่มีการชักเป็นเวลา 3 ปีหลังการตัด การถ่ายภาพติดตามผล 18 เดือนพบการเปลี่ยนแปลง T2/FLAIR ที่คงที่พร้อมกับบริเวณเล็ก ๆ <|eot_id|> <|start_header_id|>assistant<|end_header_id|> 1. อายุ: 43 ปี\n 2. เพศ: ชาย\n 3. น้ำหนัก: ไม่ระบุ\n 4. ส่วนสูง: ไม่ระบุ\n 5. อาการหลัก: การชักที่ไม่สามารถควบคุมได้\n 6. อาการและสัญญาณ: รู้สึกร้อนวูบวาบ, อาการทางประสาทสัมผัส, การเคลื่อนไหวของกล้ามเนื้อที่มีจุดศูนย์กลางส่วนใหญ่ทางด้านขวา\n 7. ประวัติการรักษา: การจัดการทางการแพทย์ล้มเหลวในการควบคุมการชัก\n 8. ยาที่ใช้ในปัจจุบัน: ยาต้านการชัก\n9. ผลการตรวจทางห้องปฏิบัติการ: ไม่ระบุ\n 10. ผลการตรวจภาพ: MRI แสดงเนื้องอกไขมันขนาดใหญ่ที่เส้นกลางสมอง, การพัฒนาไม่สมบูรณ์ของคอร์ปัสคาโลซัมบางส่วน, รอยโรคที่อยู่ใกล้เคียงในสมองส่วนหน้าซ้ายที่คาดว่าจะเป็นเนื้องอกกลีอาล\n 11. อาการแพ้: ไม่ระบุ\n 12. อาการแพ้ยา: ไม่ระบุ\n การผ่าตัดนำทางด้วยระบบประสาทเพื่อตัดมวลที่เพิ่มขึ้นในสมองส่วนหน้าซ้ายและการตัดสมองส่วนหน้าบางส่วนได้ถูกดำเนินการ และผลการตรวจทางพยาธิวิทยาเผยให้เห็นว่าเป็นเนื้องอกไขมันและความผิดปกติของคอร์ติคอลแบบเฉพาะจุด (focal cortical dysplasia) แบบ Palmini Type IA ในสมองที่อยู่ใกล้เคียง การฟื้นตัวหลังการผ่าตัดรวมถึงอาการเสริมในบริเวณมอเตอร์ซินโดรมระดับปานกลาง แต่การควบคุมการชักเกิดขึ้นทันทีหลังการผ่าตัดและผู้ป่วยไม่มีการชักเป็นเวลา 3 ปีหลังการผ่าตัด การถ่ายภาพติดตามผล 18 เดือนพบการเปลี่ยนแปลง T2/FLAIR ที่คงที่พร้อมกับบริเวณเล็ก ๆ ที่เพิ่มขึ้นของการเพิ่มขึ้นใกล้เคียงที่ไม่เปลี่ยนแปลง. ```` # How to use ```python from transformers import AutoTokenizer, AutoModelForCausalLM, BitsAndBytesConfig import torch model_id = "EIRTHAIMED/Llama-3.1-EIRAI-8B" nf4_config = BitsAndBytesConfig( load_in_4bit=True, bnb_4bit_quant_type="nf4", bnb_4bit_use_double_quant=True, bnb_4bit_compute_dtype=torch.bfloat16 ) # Load the base model tokenizer = AutoTokenizer.from_pretrained(model_id) model = AutoModelForCausalLM.from_pretrained( model_id, torch_dtype=torch.bfloat16, # quantization_config=nf4_config, # uncomment this line for 4 bit loading device_map="auto", attn_implementation="flash_attention_2" ) messages = [ {"role": "system", "content": "You are an expert medical assistant named EIR , developed by EIR Thai Medical LLM. You are to be a helpful, respectful, and honest assistant."}, {"role": "user", "content": "การใช้ clinical tracer มีบทบาทอย่างไรในการพัฒนาคุณภาพการดูแลผู้ป่วย?"} ] input = tokenizer.apply_chat_template( messages, tokenize = True, add_generation_prompt = True, # Must add for generation return_tensors = "pt", ).to("cuda") from transformers import TextStreamer text_streamer = TextStreamer(tokenizer, skip_prompt = True) _ = model.generate(input, streamer = text_streamer, max_new_tokens = 1500, do_sample=True, temperature=0.01, top_k=100, top_p=0.95) ``` ``` @article{EirAI, title={Eir: Thai Medical Large Language Models}, author={Yutthakorn Thiprak and Rungtam Ngodngamthaweesuk and Songtam Ngodngamtaweesuk, MD}, year={2024}, journal={arXiv preprint arXiv:2409.08523}, url={https://arxiv.org/abs/2409.08523} } ``` --- **Thank you very much** Asst.Prof.Dr. Ekapol Chuangsuwanich and Praj Bhargava @Meta Research Engineer, for your valuable endorsement of our preprint paper on arXiv. **Thank you** Draft Reviewer Report [Kullawat Chaowanawatee](https://www.computing.psu.ac.th/profile/index.php?staffid=coc0051) and [Dr. Jakapan Suaboot](https://www.computing.psu.ac.th/profile/index.php?staffid=coc0056) from Prince of Songkla University, Phuket Campus <br> Draft Industry Reviewer Report [Mr. Piyawat Maneenual](https://ieeexplore.ieee.org/author/37086452350) ,Assistant IT Manager ,Thonburi Rajyindee Hospital<br>
[ "NAMED_ENTITY_RECOGNITION", "RELATION_EXTRACTION", "TEXT_CLASSIFICATION", "COREFERENCE_RESOLUTION", "QUESTION_ANSWERING", "TRANSLATION", "SUMMARIZATION", "PARAPHRASING" ]
[ "MEDQA", "PUBMEDQA" ]
Cloyne/vietnamese-sbert
Cloyne
sentence-similarity
[ "sentence-transformers", "safetensors", "roberta", "sentence-similarity", "feature-extraction", "generated_from_trainer", "dataset_size:120210", "loss:MultipleNegativesRankingLoss", "arxiv:1908.10084", "arxiv:1705.00652", "base_model:keepitreal/vietnamese-sbert", "base_model:finetune:keepitreal/vietnamese-sbert", "autotrain_compatible", "endpoints_compatible", "region:us" ]
2024-10-28T14:49:39
2024-10-28T14:49:54
157
0
--- base_model: keepitreal/vietnamese-sbert library_name: sentence-transformers pipeline_tag: sentence-similarity tags: - sentence-transformers - sentence-similarity - feature-extraction - generated_from_trainer - dataset_size:120210 - loss:MultipleNegativesRankingLoss widget: - source_sentence: Chủ tịch Ủy ban nhân dân xã có quyền ra quyết định cưỡng chế tháo dỡ công trình xây dựng trên đất nông nghiệp khi chưa chuyển mục đích sử dụng đất hay không? sentences: - 'Đối tượng, điều kiện kéo dài tuổi phục vụ tại ngũ 1. Đối tượng: a) Quân nhân chuyên nghiệp có trình độ cao đẳng trở lên đang đảm nhiệm các chức danh: Kỹ thuật viên, Nhân viên Kỹ thuật, Huấn luyện viên, Nghệ sĩ, Nhạc sĩ, Diễn viên làm việc đúng chuyên ngành đào tạo ở các cơ sở nghiên cứu, nhà trường, bệnh viện, trung tâm thể dục thể thao, đoàn nghệ thuật, nhà máy, doanh nghiệp quốc phòng; đơn vị đóng quân ở địa bàn vùng sâu, vùng xa, biên giới, hải đảo. b) Quân nhân chuyên nghiệp đang làm việc thuộc các chuyên ngành hẹp được đào tạo công phu hoặc chuyên ngành Quân đội chưa đào tạo được; thợ bậc cao. c) Quân nhân chuyên nghiệp đang đảm nhiệm chức vụ chỉ huy, quản lý ở các nhà máy, doanh nghiệp quốc phòng. d) Quân nhân chuyên nghiệp không thuộc đối tượng quy định tại điểm a, điểm b, điểm c khoản này do Bộ trưởng Bộ Quốc phòng quyết định. 2. Điều kiện: Quân nhân chuyên nghiệp thuộc đối tượng quy định tại khoản 1 Điều này được kéo dài tuổi phục vụ tại ngũ khi có đủ các điều kiện sau: a) Đơn vị có biên chế và nhu cầu sử dụng; b) Hết hạn tuổi phục vụ tại ngũ cao nhất theo cấp bậc quân hàm quy định tại khoản 2 Điều 17 Luật Quân nhân chuyên nghiệp, công nhân và viên chức quốc phòng; chưa có người thay thế; tự nguyện tiếp tục phục vụ tại ngũ; c) Có đủ phẩm chất chính trị, đạo đức, sức khỏe để hoàn thành nhiệm vụ được giao; d) Có trình độ chuyên môn kỹ thuật, nghiệp vụ giỏi; tay nghề cao; chất lượng, hiệu quả công tác tốt.' - 'Thi hành quyết định cưỡng chế 1. Người ra quyết định cưỡng chế có trách nhiệm gửi ngay quyết định cưỡng chế cho các cá nhân, tổ chức liên quan và tổ chức thực hiện việc cưỡng chế thi hành quyết định xử phạt của mình và của cấp dưới. ..."' - 'Trình tự, thủ tục đăng ký tài khoản định danh điện tử đối với công dân Việt Nam 1. Đăng ký tài khoản định danh điện tử mức độ 1 qua ứng dụng VNelD đối với công dân đã có thẻ Căn cước công dân gắn chíp điện tử a) Công dân sử dụng thiết bị di động tải và cài đặt ứng dụng VNelD. b) Công dân sử dụng ứng dụng VNelD để nhập thông tin về số định danh cá nhân và số điện thoại hoặc địa chỉ thư điện tử; cung cấp các thông tin theo hướng dẫn trên ứng dụng VNelD; thu nhận ảnh chân dung bằng thiết bị di động và gửi yêu cầu đề nghị cấp tài khoản định danh điện tử tới cơ quan quản lý định danh và xác thực điện tử qua ứng dụng VNelD. c) Cơ quan quản lý định danh điện tử thông báo kết quả đăng ký tài khoản qua ứng dụng VNelD hoặc tin nhắn SMS hoặc địa chỉ thư điện tử. 2. Đăng ký tài khoản định danh điện tử mức độ 2 a) Đối với công dân đã được cấp thẻ Căn cước công dân gắn chíp điện tử: Công dân đến Công an xã, phường, thị trấn hoặc nơi làm thủ tục cấp thẻ Căn cước công dân để làm thủ tục cấp tài khoản định danh điện tử. Công dân xuất trình thẻ Căn cước công dân gắn chíp điện tử, cung cấp thông tin về số điện thoại hoặc địa chỉ thư điện tử và đề nghị bổ sung thông tin được tích hợp vào tài khoản định danh điện tử. Cán bộ tiếp nhận nhập thông tin công dân cung cấp vào hệ thống định danh và xác thực điện tử; chụp ảnh chân dung, thu nhận vân tay của công dân đến làm thủ tục để xác thực với Cơ sở dữ liệu căn cước công dân và khẳng định sự đồng ý đăng ký tạo lập tài khoản định danh điện tử. Cơ quan quản lý định danh điện tử thông báo kết quả đăng ký tài khoản qua ứng dụng VNelD hoặc tin nhắn SMS hoặc địa chỉ thư điện tử. b) Cơ quan Công an tiến hành cấp tài khoản định danh điện tử mức độ 2 cùng với cấp thẻ Căn cước công dân với trường hợp công dân chưa được cấp Căn cước công dân gắn chíp điện tử.' - source_sentence: Mức hưởng chế độ thai sản đối với lao động nam là người nước ngoài được pháp luật quy định như thế nào? sentences: - '"Điều 21. Thông báo kết quả và xác nhận nhập học 1. Cơ sở đào tạo gửi giấy báo trúng tuyển cho những thí sinh trúng tuyển, trong đó ghi rõ những thủ tục cần thiết đối với thí sinh khi nhập học và phương thức nhập học của thí sinh. 2. Thí sinh xác nhận nhập học bằng hình thức trực tuyến trên hệ thống, trước khi nhập học tại cơ sở đào tạo. 3. Đối với những thí sinh không xác nhận nhập học trong thời hạn quy định: a) Nếu không có lý do chính đáng thì coi như thí sinh từ chối nhập học và cơ sở đào tạo có quyền không tiếp nhận; b) Nếu do ốm đau, tai nạn, có giấy xác nhận của bệnh viện quận, huyện trở lên hoặc do thiên tai có xác nhận của UBND quận, huyện trở lên, cơ sở đào tạo xem xét quyết định tiếp nhận thí sinh vào học hoặc bảo lưu kết quả tuyển sinh để thí sinh vào học sau; c) Nếu do sai sót, nhầm lẫn của cán bộ thực hiện công tác tuyển sinh hoặc cá nhân thí sinh gây ra, cơ sở đào tạo chủ động phối hợp với các cá nhân, tổ chức liên quan xem xét các minh chứng và quyết định việc tiếp nhận thí sinh vào học hoặc bảo lưu kết quả tuyển sinh để thí sinh vào học sau. 4. Thí sinh đã xác nhận nhập học tại một cơ sở đào tạo không được tham gia xét tuyển ở nơi khác hoặc ở các đợt xét tuyển bổ sung, trừ trường hợp được cơ sở đào tạo cho phép."' - 'Tổ chức, nhiệm vụ, quyền hạn của Ban Chỉ huy ... 2. Nhiệm vụ, quyền hạn của Ban Chỉ huy: a) Chỉ đạo xây dựng, ban hành quy định về công tác bảo đảm an toàn PCCC và CNCH tại Trụ sở cơ quan Bộ Tư pháp. b) Hướng dẫn, phối hợp với các đơn vị thuộc Bộ và chỉ đạo Đội PCCC và CNCH cơ sở tổ chức tuyên truyền, bồi dưỡng nghiệp vụ PCCC và CNCH. c) Chỉ đạo Đội PCCC và CNCH cơ sở tại Trụ sở cơ quan Bộ Tư pháp xây dựng, trình cấp có thẩm quyền phê duyệt và tổ chức thực tập phương án PCCC, phương án CNCH. d) Chỉ đạo Đội PCCC và CNCH cơ sở tại Trụ sở cơ quan Bộ Tư pháp quản lý các trang thiết bị PCCC và CNCH. đ) Chỉ đạo chữa cháy, CNCH khi xảy ra cháy, sự cố, tai nạn tại Trụ sở cơ quan Bộ Tư pháp. e) Chỉ đạo việc tổ chức lập và lưu giữ hồ sơ quản lý, theo dõi hoạt động PCCC, CNCH tại Trụ sở cơ quan Bộ Tư pháp. g) Chỉ đạo việc sơ kết, tổng kết các hoạt động về PCCC và CNCH của cơ quan; kiểm tra, đôn đốc việc chấp hành các quy định về PCCC và CNCH. h) Đề xuất việc khen thưởng, kỷ luật các tập thể, cá nhân trong việc thực hiện công tác PCCC, CNCH. i) Chỉ đạo Đội PCCC và CNCH cơ sở dự trù kinh phí cho các hoạt động PCCC và CNCH tại Trụ sở cơ quan Bộ Tư pháp. k) Thực hiện các nhiệm vụ khác do Bộ trưởng giao và theo quy định của pháp luật.' - 'Mức hưởng chế độ thai sản ... b) Mức hưởng một ngày đối với trường hợp quy định tại Điều 32 và khoản 2 Điều 34 của Luật này được tính bằng mức hưởng chế độ thai sản theo tháng chia cho 24 ngày.' - source_sentence: Doanh nghiệp được áp dụng chế độ ưu tiên không cung cấp báo cáo kiểm toán đúng thời hạn bị phạt bao nhiêu tiền? sentences: - 'Thay đổi Thẩm phán, Hội thẩm 1. Thẩm phán, Hội thẩm phải từ chối tham gia xét xử hoặc bị thay đổi khi thuộc một trong các trường hợp: a) Trường hợp quy định tại Điều 49 của Bộ luật này; b) Họ cùng trong một Hội đồng xét xử và là người thân thích với nhau; c) Đã tham gia xét xử sơ thẩm hoặc phúc thẩm hoặc tiến hành tố tụng vụ án đó với tư cách là Điều tra viên, Cán bộ điều tra, Kiểm sát viên, Kiểm tra viên, Thẩm tra viên, Thư ký Tòa án. 2. Việc thay đổi Thẩm phán, Hội thẩm trước khi mở phiên tòa do Chánh án hoặc Phó Chánh án Tòa án được phân công giải quyết vụ án quyết định. Thẩm phán bị thay đổi là Chánh án Tòa án thì do Chánh án Tòa án trên một cấp quyết định. Việc thay đổi Thẩm phán, Hội thẩm tại phiên tòa do Hội đồng xét xử quyết định trước khi bắt đầu xét hỏi bằng cách biểu quyết tại phòng nghị án. Khi xem xét thay đổi thành viên nào thì thành viên đó được trình bày ý kiến của mình, Hội đồng quyết định theo đa số. Trường hợp phải thay đổi Thẩm phán, Hội thẩm tại phiên tòa thì Hội đồng xét xử ra quyết định hoãn phiên tòa.' - '“Điều 21. Chấm dứt hưởng trợ cấp thất nghiệp 1. Các trường hợp người lao động đang hưởng trợ cấp thất nghiệp bị chấm dứt hưởng trợ cấp thất nghiệp được quy định như sau: e) Trong thời gian hưởng trợ cấp thất nghiệp, 03 tháng liên tục không thực hiện thông báo hằng tháng về việc tìm kiếm việc làm với trung tâm dịch vụ việc làm theo quy định Ngày mà người lao động được xác định bị chấm dứt hưởng trợ cấp thất nghiệp là ngày kết thúc của thời hạn thông báo tìm kiếm việc làm của tháng thứ 3 liên tục mà người lao động không thực hiện thông báo hằng tháng về việc tìm kiếm việc làm."' - 'Vi phạm quy định về thời hạn làm thủ tục hải quan, nộp hồ sơ thuế ... 2. Phạt tiền từ 1.000.000 đồng đến 2.000.000 đồng đối với hành vi không thực hiện đúng thời hạn quy định thuộc một trong các trường hợp sau: a) Cung cấp báo cáo kiểm toán, báo cáo tài chính của doanh nghiệp được áp dụng chế độ ưu tiên; b) Thông báo cho cơ quan hải quan quyết định xử lý vi phạm pháp luật về quản lý thuế, kế toán đối với doanh nghiệp được áp dụng chế độ ưu tiên; c) Báo cáo về lượng hàng hóa nhập khẩu phục vụ xây dựng nhà xưởng, hàng hóa gửi kho bên ngoài của doanh nghiệp chế xuất; d) Báo cáo về lượng hàng hóa trung chuyển đưa vào, đưa ra, còn lưu tại cảng; đ) Báo cáo thống kê thông quan hàng bưu chính đưa vào Việt Nam để chuyển tiếp đi quốc tế. ...' - source_sentence: Tài chính của Hội Kiểm toán viên hành nghề Việt Nam được chi cho những khoản nào? sentences: - 'Giải thể và xử lý tài chính khi giải thể 1. Khi xét thấy hoạt động của Hội không có hiệu quả, không mang lại lợi ích cho Hội viên hoặc gây phiền hà, cản trở cho Hội viên thì BCH Hội quyết định triệu tập Đại hội để bàn biện pháp củng cố tổ chức hoặc giải thể Hội. Nếu giải thể Hội thì do Đại hội đại biểu hoặc Đại hội toàn quốc của Hội thông qua và đề nghị cơ quan Nhà nước có thẩm quyền xem xét, quyết định. 2. Khi Hội bị giải thể, Ban Thường trực và Ban Kiểm tra của Hội phải tiến hành kiểm kê tài sản, kiểm quỹ và báo cáo BCH Hội quyết định việc xử lý tài sản, tiền tồn quỹ và tiến hành thủ tục giải thể theo quy định của pháp luật.' - '"Điều 14. Miễn trừ đối với thỏa thuận hạn chế cạnh tranh bị cấm 1. Thỏa thuận hạn chế cạnh tranh quy định tại các khoản 1, 2, 3, 7, 8, 9, 10 và 11 Điều 11 bị cấm theo quy định tại Điều 12 của Luật này được miễn trừ có thời hạn nếu có lợi cho người tiêu dùng và đáp ứng một trong các điều kiện sau đây: a) Tác động thúc đẩy tiến bộ kỹ thuật, công nghệ, nâng cao chất lượng hàng hóa, dịch vụ; b) Tăng cường sức cạnh tranh của doanh nghiệp Việt Nam trên thị trường quốc tế; c) Thúc đẩy việc áp dụng thống nhất tiêu chuẩn chất lượng, định mức kỹ thuật của chủng loại sản phẩm; d) Thống nhất các điều kiện thực hiện hợp đồng, giao hàng, thanh toán nhưng không liên quan đến giá và các yếu tố của giá. 2. Thỏa thuận lao động, thỏa thuận hợp tác trong các ngành, lĩnh vực đặc thù được thực hiện theo quy định của luật khác thì thực hiện theo quy định của luật đó".' - '"Điều 2. Sửa đổi, bổ sung một số điều của Nghị định số 15/2019/NĐ-CP ngày 01 tháng 02 năm 2019 của Chính phủ quy định chi tiết một số điều và biện pháp thi hành Luật Giáo dục nghề nghiệp ... 12. Sửa đổi, bổ sung Điều 24 như sau: Điều 24. Thẩm quyền cấp giấy chứng nhận đăng ký hoạt động liên kết đào tạo với nước ngoài 1. Tổng cục Giáo dục nghề nghiệp cấp giấy chứng nhận đăng ký hoạt động liên kết đào tạo với nước ngoài đối với trường cao đẳng. 2. Sở Lao động - Thương binh và Xã hội nơi trường trung cấp, trung tâm giáo dục nghề nghiệp, trung tâm giáo dục nghề nghiệp - giáo dục thường xuyên và doanh nghiệp tổ chức hoạt động liên kết đào tạo với nước ngoài cấp giấy chứng nhận đăng ký hoạt động liên kết đào tạo với nước ngoài đối với trường trung cấp, trung tâm giáo dục nghề nghiệp, trung tâm giáo dục nghề nghiệp - giáo dục thường xuyên và doanh nghiệp."' - source_sentence: NLĐ ký nhiều hợp đồng lao động thì đóng BHYT như thế nào? sentences: - 'Hồ sơ, thủ tục xác định trường hợp được bồi thường [...] 3. Trong thời hạn 05 ngày làm việc, kể từ ngày nhận được đơn và các giấy tờ hợp lệ, nếu xác định yêu cầu thuộc trách nhiệm giải quyết của mình thì Sở Y tế phải thụ lý và thông báo bằng văn bản về việc thụ lý đơn cho người bị thiệt hại hoặc thân nhân của người bị thiệt hại (sau đây gọi tắt là người bị thiệt hại). Trường hợp hồ sơ không đầy đủ thì Sở Y tế có văn bản hướng dẫn người bị thiệt hại bổ sung. 4. Trong thời hạn 15 ngày, kể từ ngày nhận được đơn yêu cầu của người bị thiệt hại, Sở Y tế phải hoàn thành việc xác định nguyên nhân gây tai biến, mức độ tổn thương và thông báo bằng văn bản cho người yêu cầu đồng thời báo cáo Bộ Y tế.' - 'Chuyển nhượng quyền thăm dò khoáng sản 1. Tổ chức, cá nhân nhận chuyển nhượng quyền thăm dò khoáng sản phải có đủ điều kiện để được cấp Giấy phép thăm dò khoáng sản theo quy định của Luật này. 2. Việc chuyển nhượng quyền thăm dò khoáng sản phải được cơ quan quản lý nhà nước có thẩm quyền cấp Giấy phép thăm dò khoáng sản chấp thuận; trường hợp được chấp thuận, tổ chức, cá nhân nhận chuyển nhượng quyền thăm dò khoáng sản được cấp Giấy phép thăm dò khoáng sản mới. 3. Tổ chức, cá nhân chuyển nhượng quyền thăm dò khoáng sản đã thực hiện được ít nhất 50% dự toán của đề án thăm dò khoáng sản. 4. Chính phủ quy định chi tiết việc chuyển nhượng quyền thăm dò khoáng sản.' - '"Sửa đổi, bổ sung một số điều của Luật bảo hiểm y tế: ... 6. Sửa đổi, bổ sung Điều 12 như sau: “Điều 12. Đối tượng tham gia bảo hiểm y tế 1. Nhóm do người lao động và người sử dụng lao động đóng, bao gồm: a) Người lao động làm việc theo hợp đồng lao động không xác định thời hạn, hợp đồng lao động có thời hạn từ đủ 3 tháng trở lên; người lao động là người quản lý doanh nghiệp hưởng tiền lương; cán bộ, công chức, viên chức (sau đây gọi chung là người lao động); b) Người hoạt động không chuyên trách ở xã, phường, thị trấn theo quy định của pháp luật.= ... 4. Nhóm được ngân sách nhà nước hỗ trợ mức đóng, bao gồm: a) Người thuộc hộ gia đình cận nghèo; b) Học sinh, sinh viên. 5. Nhóm tham gia bảo hiểm y tế theo hộ gia đình gồm những người thuộc hộ gia đình, trừ đối tượng quy định tại các khoản 1, 2, 3 và 4 Điều này. 6. Chính phủ quy định các đối tượng khác ngoài các đối tượng quy định tại các khoản 3, 4 và 5 Điều này; quy định việc cấp thẻ bảo hiểm y tế đối với đối tượng do Bộ Quốc phòng, Bộ Công an quản lý và đối tượng quy định tại điểm 1 khoản 3 Điều này; quy định lộ trình thực hiện bảo hiểm y tế, phạm vi quyền lợi, mức hưởng bảo hiểm y tế, khám bệnh, chữa bệnh bảo hiểm y tế, quản lý, sử dụng phần kinh phí dành cho khám bệnh, chữa bệnh bảo hiểm y tế, giám định bảo hiểm y tế, thanh toán, quyết toán bảo hiểm y tế đối với các đối tượng quy định tại điểm a khoản 3 Điều này.”' --- # SentenceTransformer based on keepitreal/vietnamese-sbert This is a [sentence-transformers](https://www.SBERT.net) model finetuned from [keepitreal/vietnamese-sbert](https://huggingface.co/keepitreal/vietnamese-sbert) on the csv dataset. It maps sentences & paragraphs to a 768-dimensional dense vector space and can be used for semantic textual similarity, semantic search, paraphrase mining, text classification, clustering, and more. ## Model Details ### Model Description - **Model Type:** Sentence Transformer - **Base model:** [keepitreal/vietnamese-sbert](https://huggingface.co/keepitreal/vietnamese-sbert) <!-- at revision a9467ef2ef47caa6448edeabfd8e5e5ce0fa2a23 --> - **Maximum Sequence Length:** 256 tokens - **Output Dimensionality:** 768 tokens - **Similarity Function:** Cosine Similarity - **Training Dataset:** - csv <!-- - **Language:** Unknown --> <!-- - **License:** Unknown --> ### Model Sources - **Documentation:** [Sentence Transformers Documentation](https://sbert.net) - **Repository:** [Sentence Transformers on GitHub](https://github.com/UKPLab/sentence-transformers) - **Hugging Face:** [Sentence Transformers on Hugging Face](https://huggingface.co/models?library=sentence-transformers) ### Full Model Architecture ``` SentenceTransformer( (0): Transformer({'max_seq_length': 256, 'do_lower_case': False}) with Transformer model: RobertaModel (1): Pooling({'word_embedding_dimension': 768, 'pooling_mode_cls_token': False, 'pooling_mode_mean_tokens': True, 'pooling_mode_max_tokens': False, 'pooling_mode_mean_sqrt_len_tokens': False, 'pooling_mode_weightedmean_tokens': False, 'pooling_mode_lasttoken': False, 'include_prompt': True}) ) ``` ## Usage ### Direct Usage (Sentence Transformers) First install the Sentence Transformers library: ```bash pip install -U sentence-transformers ``` Then you can load this model and run inference. ```python from sentence_transformers import SentenceTransformer # Download from the 🤗 Hub model = SentenceTransformer("Cloyne/vietnamese-embedding_finetuned") # Run inference sentences = [ 'NLĐ ký nhiều hợp đồng lao động thì đóng BHYT như thế nào?', '"Sửa đổi, bổ sung một số điều của Luật bảo hiểm y tế:\n...\n6. Sửa đổi, bổ sung Điều 12 như sau:\n“Điều 12. Đối tượng tham gia bảo hiểm y tế\n1. Nhóm do người lao động và người sử dụng lao động đóng, bao gồm:\na) Người lao động làm việc theo hợp đồng lao động không xác định thời hạn, hợp đồng lao động có thời hạn từ đủ 3 tháng trở lên; người lao động là người quản lý doanh nghiệp hưởng tiền lương; cán bộ, công chức, viên chức (sau đây gọi chung là người lao động);\nb) Người hoạt động không chuyên trách ở xã, phường, thị trấn theo quy định của pháp luật.=\n...\n4. Nhóm được ngân sách nhà nước hỗ trợ mức đóng, bao gồm:\na) Người thuộc hộ gia đình cận nghèo;\nb) Học sinh, sinh viên.\n5. Nhóm tham gia bảo hiểm y tế theo hộ gia đình gồm những người thuộc hộ gia đình, trừ đối tượng quy định tại các khoản 1, 2, 3 và 4 Điều này.\n6. Chính phủ quy định các đối tượng khác ngoài các đối tượng quy định tại các khoản 3, 4 và 5 Điều này; quy định việc cấp thẻ bảo hiểm y tế đối với đối tượng do Bộ Quốc phòng, Bộ Công an quản lý và đối tượng quy định tại điểm 1 khoản 3 Điều này; quy định lộ trình thực hiện bảo hiểm y tế, phạm vi quyền lợi, mức hưởng bảo hiểm y tế, khám bệnh, chữa bệnh bảo hiểm y tế, quản lý, sử dụng phần kinh phí dành cho khám bệnh, chữa bệnh bảo hiểm y tế, giám định bảo hiểm y tế, thanh toán, quyết toán bảo hiểm y tế đối với các đối tượng quy định tại điểm a khoản 3 Điều này.”', 'Hồ sơ, thủ tục xác định trường hợp được bồi thường\n[...]\n3. Trong thời hạn 05 ngày làm việc, kể từ ngày nhận được đơn và các giấy tờ hợp lệ, nếu xác định yêu cầu thuộc trách nhiệm giải quyết của mình thì Sở Y tế phải thụ lý và thông báo bằng văn bản về việc thụ lý đơn cho người bị thiệt hại hoặc thân nhân của người bị thiệt hại (sau đây gọi tắt là người bị thiệt hại). Trường hợp hồ sơ không đầy đủ thì Sở Y tế có văn bản hướng dẫn người bị thiệt hại bổ sung.\n4. Trong thời hạn 15 ngày, kể từ ngày nhận được đơn yêu cầu của người bị thiệt hại, Sở Y tế phải hoàn thành việc xác định nguyên nhân gây tai biến, mức độ tổn thương và thông báo bằng văn bản cho người yêu cầu đồng thời báo cáo Bộ Y tế.', ] embeddings = model.encode(sentences) print(embeddings.shape) # [3, 768] # Get the similarity scores for the embeddings similarities = model.similarity(embeddings, embeddings) print(similarities.shape) # [3, 3] ``` <!-- ### Direct Usage (Transformers) <details><summary>Click to see the direct usage in Transformers</summary> </details> --> <!-- ### Downstream Usage (Sentence Transformers) You can finetune this model on your own dataset. <details><summary>Click to expand</summary> </details> --> <!-- ### Out-of-Scope Use *List how the model may foreseeably be misused and address what users ought not to do with the model.* --> <!-- ## Bias, Risks and Limitations *What are the known or foreseeable issues stemming from this model? You could also flag here known failure cases or weaknesses of the model.* --> <!-- ### Recommendations *What are recommendations with respect to the foreseeable issues? For example, filtering explicit content.* --> ## Training Details ### Training Dataset #### csv * Dataset: csv * Size: 120,210 training samples * Columns: <code>anchor</code> and <code>positive</code> * Approximate statistics based on the first 1000 samples: | | anchor | positive | |:--------|:----------------------------------------------------------------------------------|:-------------------------------------------------------------------------------------| | type | string | string | | details | <ul><li>min: 8 tokens</li><li>mean: 25.08 tokens</li><li>max: 49 tokens</li></ul> | <ul><li>min: 21 tokens</li><li>mean: 206.98 tokens</li><li>max: 256 tokens</li></ul> | * Samples: | anchor | positive | |:--------------------------------------------------------------------------------------------------------------------------------------------------------|:--------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------| | <code>Nội dung lồng ghép vấn đề bình đẳng giới trong xây dựng văn bản quy phạm pháp luật được quy định thế nào?</code> | <code>Nội dung lồng ghép vấn đề bình đẳng giới trong xây dựng văn bản quy phạm pháp luật<br>Trong phạm vi điều chỉnh của văn bản quy phạm pháp luật:<br>1. Xác định nội dung liên quan đến vấn đề bình đẳng giới hoặc vấn đề bất bình đẳng giới, phân biệt đối xử về giới.<br>2. Quy định các biện pháp cần thiết để thực hiện bình đẳng giới hoặc để giải quyết vấn đề bất bình đẳng giới, phân biệt đối xử về giới; dự báo tác động của các quy định đó đối với nam và nữ sau khi được ban hành.<br>3. Xác định nguồn nhân lực, tài chính cần thiết để triển khai các biện pháp thực hiện bình đẳng giới hoặc để giải quyết vấn đề bất bình đẳng giới, phân biệt đối xử về giới.</code> | | <code>Điều kiện để giáo viên trong cơ sở giáo dục mầm non, tiểu học ngoài công lập bị ảnh hưởng bởi Covid-19 được hưởng chính sách hỗ trợ là gì?</code> | <code>Điều kiện được hưởng<br>Cán bộ quản lý, giáo viên, nhân viên được hưởng chính sách khi bảo đảm các điều kiện sau:<br>1. Là người đang làm việc tại cơ sở giáo dục ngoài công lập trước khi cơ sở phải tạm dừng hoạt động theo yêu cầu của cơ quan nhà nước có thẩm quyền để phòng, chống dịch COVID-19 tính từ ngày 01 tháng 5 năm 2021 đến hết ngày 31 tháng 12 năm 2021.<br>2. Nghỉ việc không hưởng lương từ 01 tháng trở lên tính từ ngày 01 tháng 5 năm 2021 đến hết ngày 31 tháng 12 năm 2021.<br>3. Chưa được hưởng chính sách hỗ trợ đối với người lao động tạm hoãn hợp đồng lao động, nghỉ việc không hưởng lương theo quy định tại khoản 4, khoản 5, khoản 6 Mục II Nghị quyết số 68/NQ-CP ngày 01 tháng 7 năm 2021 của Chính phủ về một số chính sách hỗ trợ người lao động và người sử dụng lao động gặp khó khăn do đại dịch COVID-19, Nghị quyết số 126/NQ-CP ngày 08 tháng 10 năm 2021 của Chính phủ sửa đổi, bổ sung Nghị quyết số 68/NQ-CP ngày 01 tháng 7 năm 2021 của Chính phủ về một số chính sách hỗ trợ người lao động và người sử dụng lao động gặp khó khăn do đại dịch COVID-19 (sau đây gọi tắt là Nghị quyết số 68/NQ-CP) do không tham gia Bảo hiểm xã hội bắt buộc.<br>4. Có xác nhận làm việc tại cơ sở giáo dục ngoài công lập ít nhất hết năm học 2021 - 2022 theo kế hoạch năm học của địa phương, bao gồm cơ sở giáo dục ngoài công lập đã làm việc trước đây hoặc cơ sở giáo dục ngoài công lập khác trong trường hợp cơ sở giáo dục ngoài công lập trước đây làm việc không hoạt động trở lại.</code> | | <code>Nguyên tắc áp dụng phụ cấp ưu đãi nghề y tế thế nào?</code> | <code>Nguyên tắc áp dụng<br>1. Trường hợp công chức, viên chức chuyên môn y tế thuộc đối tượng được hưởng các mức phụ cấp ưu đãi theo nghề khác nhau thì được hưởng một mức phụ cấp ưu đãi theo nghề cao nhất.<br>2. Công chức, viên chức đã hưởng phụ cấp ưu đãi theo nghề quy định tại Thông tư liên tịch số 06/2010/TTLT-BYT-BNV-BTC ngày 22/3/2010 của Bộ Y tế, Bộ Nội vụ, Bộ Tài chính hướng dẫn thực hiện Nghị định số 64/2009/NĐ-CP ngày 30/7/2009 của Chính phủ về chính sách đối với cán bộ, viên chức y tế công tác ở vùng có điều kiện kinh tế - xã hội đặc biệt khó khăn thì không hưởng phụ cấp ưu đãi theo nghề quy định tại Thông tư liên tịch này.</code> | * Loss: [<code>MultipleNegativesRankingLoss</code>](https://sbert.net/docs/package_reference/sentence_transformer/losses.html#multiplenegativesrankingloss) with these parameters: ```json { "scale": 20.0, "similarity_fct": "cos_sim" } ``` ### Evaluation Dataset #### train * Dataset: train * Size: 13,357 evaluation samples * Columns: <code>anchor</code> and <code>positive</code> * Approximate statistics based on the first 1000 samples: | | anchor | positive | |:--------|:----------------------------------------------------------------------------------|:-------------------------------------------------------------------------------------| | type | string | string | | details | <ul><li>min: 7 tokens</li><li>mean: 24.61 tokens</li><li>max: 51 tokens</li></ul> | <ul><li>min: 17 tokens</li><li>mean: 202.71 tokens</li><li>max: 256 tokens</li></ul> | * Samples: | anchor | positive | |:-------------------------------------------------------------------------------------------------------------------------------------------|:----------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------| | <code>Toà án cấp nào có thẩm quyền giải quyết việc đòi tài sản đã cho người khác vay theo hợp đồng cho vay?</code> | <code>"Điều 35. Thẩm quyền của Tòa án nhân dân cấp huyện<br>1. Tòa án nhân dân cấp huyện có thẩm quyền giải quyết theo thủ tục sơ thẩm những tranh chấp sau đây:<br>a) Tranh chấp về dân sự, hôn nhân và gia đình quy định tại Điều 26 và Điều 28 của Bộ luật này, trừ tranh chấp quy định tại khoản 7 Điều 26 của Bộ luật này;<br>b) Tranh chấp về kinh doanh, thương mại quy định tại khoản 1 Điều 30 của Bộ luật này;<br>c) Tranh chấp về lao động quy định tại Điều 32 của Bộ luật này.<br>2. Tòa án nhân dân cấp huyện có thẩm quyền giải quyết những yêu cầu sau đây:<br>a) Yêu cầu về dân sự quy định tại các khoản 1, 2, 3, 4, 6, 7, 8, 9 và 10 Điều 27 của Bộ luật này;<br>b) Yêu cầu về hôn nhân và gia đình quy định tại các khoản 1, 2, 3, 4, 5, 6, 7, 8, 10 và 11 Điều 29 của Bộ luật này;<br>c) Yêu cầu về kinh doanh, thương mại quy định tại khoản 1 và khoản 6 Điều 31 của Bộ luật này;<br>d) Yêu cầu về lao động quy định tại khoản 1 và khoản 5 Điều 33 của Bộ luật này.<br>3. Những tranh chấp, yêu cầu quy định tại khoản 1 và khoản 2 Điều này mà có đương sự hoặc tài sản ở nước ngoài hoặc cần phải ủy thác tư pháp cho cơ quan đại diện nước Cộng hòa xã hội chủ nghĩa Việt Nam ở nước ngoài, cho Tòa án, cơ quan có thẩm quyền của nước ngoài không thuộc thẩm quyền giải quyết của Tòa án nhân dân cấp huyện, trừ trường hợp quy định tại khoản 4 Điều này.<br>4. Tòa án nhân dân cấp huyện nơi cư trú của công dân Việt Nam hủy việc kết hôn trái pháp luật, giải quyết việc ly hôn, các tranh chấp về quyền và nghĩa vụ của vợ chồng, cha mẹ và con, về nhận cha, mẹ, con, nuôi con nuôi và giám hộ giữa công dân Việt Nam cư trú ở khu vực biên giới với công dân của nước láng giềng cùng cư trú ở khu vực biên giới với Việt Nam theo quy định của Bộ luật này và các quy định khác của pháp luật Việt Nam."</code> | | <code>Những phiếu bầu nào được xem là không hợp lệ?</code> | <code>Phiếu bầu không hợp lệ<br>1. Những phiếu bầu sau đây là phiếu bầu không hợp lệ:<br>a) Phiếu không theo mẫu quy định do Tổ bầu cử phát ra;<br>b) Phiếu không có dấu của Tổ bầu cử;<br>c) Phiếu để số người được bầu nhiều hơn số lượng đại biểu được bầu đã ấn định cho đơn vị bầu cử;<br>d) Phiếu gạch xóa hết tên những người ứng cử;<br>đ) Phiếu ghi thêm tên người ngoài danh sách những người ứng cử hoặc phiếu có ghi thêm nội dung khác.<br>2. Trường hợp có phiếu bầu được cho là không hợp lệ thì Tổ trường Tổ bầu cử đưa ra để toàn Tổ xem xét, quyết định. Tổ bầu cử không được gạch xóa hoặc sửa các tên ghi trên phiếu bầu.</code> | | <code>Đề nghị tạm đình chỉ chấp hành quyết định áp dụng biện pháp đưa vào trường giáo dưỡng cho học sinh cần đảm bảo nguyên tắc gì?</code> | <code>Nguyên tắc xét duyệt, đề nghị giảm thời hạn, tạm đình chỉ chấp hành quyết định, miễn chấp hành phần thời gian còn lại cho học sinh trường giáo dưỡng, trại viên cơ sở giáo dục bắt buộc<br>1. Tuân thủ quy định của pháp luật về thi hành biện pháp xử lý hành chính đưa vào trường giáo dưỡng, cơ sở giáo dục bắt buộc, quy định tại Thông tư này và quy định của pháp luật có liên quan.<br>2. Bảo đảm khách quan, công khai, minh bạch, đúng trình tự, thủ tục, thẩm quyền; tôn trọng và bảo vệ quyền, lợi ích hợp pháp của học sinh trường giáo dưỡng, trại viên cơ sở giáo dục bắt buộc.</code> | * Loss: [<code>MultipleNegativesRankingLoss</code>](https://sbert.net/docs/package_reference/sentence_transformer/losses.html#multiplenegativesrankingloss) with these parameters: ```json { "scale": 20.0, "similarity_fct": "cos_sim" } ``` ### Training Hyperparameters #### Non-Default Hyperparameters - `eval_strategy`: steps - `per_device_train_batch_size`: 16 - `per_device_eval_batch_size`: 32 - `num_train_epochs`: 4 - `warmup_ratio`: 0.1 - `fp16`: True - `batch_sampler`: no_duplicates #### All Hyperparameters <details><summary>Click to expand</summary> - `overwrite_output_dir`: False - `do_predict`: False - `eval_strategy`: steps - `prediction_loss_only`: True - `per_device_train_batch_size`: 16 - `per_device_eval_batch_size`: 32 - `per_gpu_train_batch_size`: None - `per_gpu_eval_batch_size`: None - `gradient_accumulation_steps`: 1 - `eval_accumulation_steps`: None - `torch_empty_cache_steps`: None - `learning_rate`: 5e-05 - `weight_decay`: 0.0 - `adam_beta1`: 0.9 - `adam_beta2`: 0.999 - `adam_epsilon`: 1e-08 - `max_grad_norm`: 1.0 - `num_train_epochs`: 4 - `max_steps`: -1 - `lr_scheduler_type`: linear - `lr_scheduler_kwargs`: {} - `warmup_ratio`: 0.1 - `warmup_steps`: 0 - `log_level`: passive - `log_level_replica`: warning - `log_on_each_node`: True - `logging_nan_inf_filter`: True - `save_safetensors`: True - `save_on_each_node`: False - `save_only_model`: False - `restore_callback_states_from_checkpoint`: False - `no_cuda`: False - `use_cpu`: False - `use_mps_device`: False - `seed`: 42 - `data_seed`: None - `jit_mode_eval`: False - `use_ipex`: False - `bf16`: False - `fp16`: True - `fp16_opt_level`: O1 - `half_precision_backend`: auto - `bf16_full_eval`: False - `fp16_full_eval`: False - `tf32`: None - `local_rank`: 0 - `ddp_backend`: None - `tpu_num_cores`: None - `tpu_metrics_debug`: False - `debug`: [] - `dataloader_drop_last`: False - `dataloader_num_workers`: 0 - `dataloader_prefetch_factor`: None - `past_index`: -1 - `disable_tqdm`: False - `remove_unused_columns`: True - `label_names`: None - `load_best_model_at_end`: False - `ignore_data_skip`: False - `fsdp`: [] - `fsdp_min_num_params`: 0 - `fsdp_config`: {'min_num_params': 0, 'xla': False, 'xla_fsdp_v2': False, 'xla_fsdp_grad_ckpt': False} - `fsdp_transformer_layer_cls_to_wrap`: None - `accelerator_config`: {'split_batches': False, 'dispatch_batches': None, 'even_batches': True, 'use_seedable_sampler': True, 'non_blocking': False, 'gradient_accumulation_kwargs': None} - `deepspeed`: None - `label_smoothing_factor`: 0.0 - `optim`: adamw_torch - `optim_args`: None - `adafactor`: False - `group_by_length`: False - `length_column_name`: length - `ddp_find_unused_parameters`: None - `ddp_bucket_cap_mb`: None - `ddp_broadcast_buffers`: False - `dataloader_pin_memory`: True - `dataloader_persistent_workers`: False - `skip_memory_metrics`: True - `use_legacy_prediction_loop`: False - `push_to_hub`: False - `resume_from_checkpoint`: None - `hub_model_id`: None - `hub_strategy`: every_save - `hub_private_repo`: False - `hub_always_push`: False - `gradient_checkpointing`: False - `gradient_checkpointing_kwargs`: None - `include_inputs_for_metrics`: False - `eval_do_concat_batches`: True - `fp16_backend`: auto - `push_to_hub_model_id`: None - `push_to_hub_organization`: None - `mp_parameters`: - `auto_find_batch_size`: False - `full_determinism`: False - `torchdynamo`: None - `ray_scope`: last - `ddp_timeout`: 1800 - `torch_compile`: False - `torch_compile_backend`: None - `torch_compile_mode`: None - `dispatch_batches`: None - `split_batches`: None - `include_tokens_per_second`: False - `include_num_input_tokens_seen`: False - `neftune_noise_alpha`: None - `optim_target_modules`: None - `batch_eval_metrics`: False - `eval_on_start`: False - `use_liger_kernel`: False - `eval_use_gather_object`: False - `batch_sampler`: no_duplicates - `multi_dataset_batch_sampler`: proportional </details> ### Training Logs | Epoch | Step | Training Loss | train loss | |:------:|:-----:|:-------------:|:----------:| | 0.1331 | 500 | 0.3247 | 0.2239 | | 0.2662 | 1000 | 0.1513 | 0.1605 | | 0.3993 | 1500 | 0.119 | 0.1664 | | 0.5323 | 2000 | 0.1047 | 0.1384 | | 0.6654 | 2500 | 0.0915 | 0.1269 | | 0.7985 | 3000 | 0.0861 | 0.1140 | | 0.9316 | 3500 | 0.0839 | 0.1091 | | 1.0647 | 4000 | 0.0693 | 0.0989 | | 1.1978 | 4500 | 0.0582 | 0.0931 | | 1.3308 | 5000 | 0.0457 | 0.0953 | | 1.4639 | 5500 | 0.0284 | 0.0826 | | 1.5970 | 6000 | 0.0233 | 0.0848 | | 1.7301 | 6500 | 0.0256 | 0.0785 | | 1.8632 | 7000 | 0.0236 | 0.0829 | | 1.9963 | 7500 | 0.0203 | 0.0827 | | 2.1294 | 8000 | 0.0182 | 0.0730 | | 2.2624 | 8500 | 0.0143 | 0.0718 | | 2.3955 | 9000 | 0.0103 | 0.0720 | | 2.5286 | 9500 | 0.0086 | 0.0720 | | 2.6617 | 10000 | 0.0058 | 0.0706 | | 2.7948 | 10500 | 0.0074 | 0.0675 | | 2.9279 | 11000 | 0.0073 | 0.0650 | | 3.0610 | 11500 | 0.0054 | 0.0651 | | 3.1940 | 12000 | 0.0043 | 0.0639 | | 3.3271 | 12500 | 0.004 | 0.0626 | | 3.4602 | 13000 | 0.0035 | 0.0617 | | 3.5933 | 13500 | 0.0022 | 0.0614 | | 3.7264 | 14000 | 0.003 | 0.0624 | | 3.8595 | 14500 | 0.0022 | 0.0616 | | 3.9925 | 15000 | 0.0028 | 0.0606 | ### Framework Versions - Python: 3.10.14 - Sentence Transformers: 3.2.1 - Transformers: 4.45.1 - PyTorch: 2.4.0 - Accelerate: 0.34.2 - Datasets: 3.0.1 - Tokenizers: 0.20.0 ## Citation ### BibTeX #### Sentence Transformers ```bibtex @inproceedings{reimers-2019-sentence-bert, title = "Sentence-BERT: Sentence Embeddings using Siamese BERT-Networks", author = "Reimers, Nils and Gurevych, Iryna", booktitle = "Proceedings of the 2019 Conference on Empirical Methods in Natural Language Processing", month = "11", year = "2019", publisher = "Association for Computational Linguistics", url = "https://arxiv.org/abs/1908.10084", } ``` #### MultipleNegativesRankingLoss ```bibtex @misc{henderson2017efficient, title={Efficient Natural Language Response Suggestion for Smart Reply}, author={Matthew Henderson and Rami Al-Rfou and Brian Strope and Yun-hsuan Sung and Laszlo Lukacs and Ruiqi Guo and Sanjiv Kumar and Balint Miklos and Ray Kurzweil}, year={2017}, eprint={1705.00652}, archivePrefix={arXiv}, primaryClass={cs.CL} } ``` <!-- ## Glossary *Clearly define terms in order to be accessible across audiences.* --> <!-- ## Model Card Authors *Lists the people who create the model card, providing recognition and accountability for the detailed work that goes into its construction.* --> <!-- ## Model Card Contact *Provides a way for people who have updates to the Model Card, suggestions, or questions, to contact the Model Card authors.* -->
[ "TEXT_CLASSIFICATION" ]
[ "CHIA" ]
mini1013/master_cate_sl4
mini1013
text-classification
[ "setfit", "safetensors", "roberta", "sentence-transformers", "text-classification", "generated_from_setfit_trainer", "arxiv:2209.11055", "base_model:mini1013/master_domain", "base_model:finetune:mini1013/master_domain", "model-index", "region:us" ]
2025-01-21T07:00:42
2025-01-21T07:01:05
157
0
--- base_model: mini1013/master_domain library_name: setfit metrics: - accuracy pipeline_tag: text-classification tags: - setfit - sentence-transformers - text-classification - generated_from_setfit_trainer widget: - text: 가퍼 스포츠 낚시 벨트 어깨 하 해상 스탠드업 물고기 싸움 로드 홀더 스포츠/레저>낚시>낚시의류/잡화>힙커버/힙가드 - text: 낚시 태클박스 36리터 세트8 초경량 멀티 테이블 의자 받침대 루어 민물 바다 케리어 BSS158-3 스포츠/레저>낚시>낚시용품>태클박스 - text: 메이저 크래프트 자이언트 킬링 Major Craft GK5SJ-B663 스포츠/레저>낚시>루어낚시>루어낚시세트 - text: 갸프 낚싯대 용골 핸들 땀 흡수 스트랩 미끄럼 방지 절연 라켓 손잡이 커버 스포츠/레저>낚시>낚시용품>가프 - text: 송어베이스 루어 세트 스푼 미끼 스피너 보빈 인공 스포츠/레저>낚시>루어낚시>루어낚시세트 inference: true model-index: - name: SetFit with mini1013/master_domain results: - task: type: text-classification name: Text Classification dataset: name: Unknown type: unknown split: test metrics: - type: accuracy value: 1.0 name: Accuracy --- # SetFit with mini1013/master_domain This is a [SetFit](https://github.com/huggingface/setfit) model that can be used for Text Classification. This SetFit model uses [mini1013/master_domain](https://huggingface.co/mini1013/master_domain) as the Sentence Transformer embedding model. A [LogisticRegression](https://scikit-learn.org/stable/modules/generated/sklearn.linear_model.LogisticRegression.html) instance is used for classification. The model has been trained using an efficient few-shot learning technique that involves: 1. Fine-tuning a [Sentence Transformer](https://www.sbert.net) with contrastive learning. 2. Training a classification head with features from the fine-tuned Sentence Transformer. ## Model Details ### Model Description - **Model Type:** SetFit - **Sentence Transformer body:** [mini1013/master_domain](https://huggingface.co/mini1013/master_domain) - **Classification head:** a [LogisticRegression](https://scikit-learn.org/stable/modules/generated/sklearn.linear_model.LogisticRegression.html) instance - **Maximum Sequence Length:** 512 tokens - **Number of Classes:** 8 classes <!-- - **Training Dataset:** [Unknown](https://huggingface.co/datasets/unknown) --> <!-- - **Language:** Unknown --> <!-- - **License:** Unknown --> ### Model Sources - **Repository:** [SetFit on GitHub](https://github.com/huggingface/setfit) - **Paper:** [Efficient Few-Shot Learning Without Prompts](https://arxiv.org/abs/2209.11055) - **Blogpost:** [SetFit: Efficient Few-Shot Learning Without Prompts](https://huggingface.co/blog/setfit) ### Model Labels | Label | Examples | |:------|:----------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------| | 7.0 | <ul><li>'다이와 DAIWA 한국다이와정공 소품케이스 클리어 파우치 S C 스포츠/레저>낚시>바다낚시>찌케이스'</li><li>'갓포스 고급 루어 낚시가방 루어대 원투대 하드 로드케이스 낚시대수납 단품 112CM-157CM 스포츠/레저>낚시>바다낚시>바다낚시가방'</li><li>'다이와 포터블 휴대용 로드케이스 B 140R 스포츠/레저>낚시>바다낚시>바다낚시가방'</li></ul> | | 3.0 | <ul><li>'이공조구 원 포인트 바다루어낚싯대 S180 스포츠/레저>낚시>낚싯대>바다루어낚싯대'</li><li>'엔에스 블랙 매직아이 슬로우피치 바다루어낚싯대 B-592H3MF 스포츠/레저>낚시>낚싯대>바다루어낚싯대'</li><li>'은성 실스타 DHC 명파S 민물낚싯대 30칸 스포츠/레저>낚시>낚싯대>민물낚싯대'</li></ul> | | 1.0 | <ul><li>'메이호 태클박스 루어케이스 도구통 지그통 VS-388DD 스포츠/레저>낚시>낚시용품>태클박스'</li><li>'다이와 쿨라인 알파 3 펄 TS2000 스포츠/레저>낚시>낚시용품>쿨백'</li><li>'슬라이드 낚시 쪽가위 라인커터기 합사가위 T74464474 스포츠/레저>낚시>낚시공구>가위/라인커터/핀온릴'</li></ul> | | 5.0 | <ul><li>'다미끼 맘바2 러버지그-배스 루어 민물루어 1 2oz 스포츠/레저>낚시>루어낚시>하드베이트'</li><li>'루어 낚시 가물치 배스 5pcs 개구리 세트 프로그 스포츠/레저>낚시>루어낚시>루어낚시세트'</li><li>'KFP 미노우 KS01 하드베이트 싱킹타입 루어 포퍼 웜 크랭크 프로팅 싱킹 배스 미끼 농어 베이트 스포츠/레저>낚시>루어낚시>하드베이트'</li></ul> | | 0.0 | <ul><li>'다이와 레브로스 스피닝릴 LT2500D-XH 스포츠/레저>낚시>낚시릴>스피닝릴'</li><li>'바낙스 LJ100x 장구통릴 티탄 스포츠/레저>낚시>낚시릴>베이트릴'</li><li>'시마노 FX 1000 스피닝릴 스포츠/레저>낚시>낚시릴>스피닝릴'</li></ul> | | 4.0 | <ul><li>'가마라 쇼크리더 카본 목줄 50m 6호 GFLUORO506 스포츠/레저>낚시>낚싯줄>카본라인'</li><li>'선라인 토네이도 마츠다 스페셜 블랙 스트림 낚싯줄 70m 1.75호 스포츠/레저>낚시>낚싯줄>카본라인'</li><li>'선라인 슈터 FC 스나이퍼 100m 4.5LB 스포츠/레저>낚시>낚싯줄>카본라인'</li></ul> | | 2.0 | <ul><li>'다이와 낚시화 부츠 운동화 스파이크 슈즈 DAIWA 일본직구 DS-2150CD 스포츠/레저>낚시>낚시의류/잡화>낚시신발'</li><li>'HDF 해동 피나투라 올컷 방한 덮개장갑 낚시장갑 스포츠/레저>낚시>낚시의류/잡화>낚시장갑'</li><li>'가마가츠 낚시 코듀라 힙가드 로우백 타입 단일사이즈 GM3727 스포츠/레저>낚시>낚시의류/잡화>힙커버/힙가드'</li></ul> | | 6.0 | <ul><li>'루웍스 빙어 초릿대 23cm 스포츠/레저>낚시>민물낚시>얼음낚시'</li><li>'바다 민물 고기 낚시대 보관 수납 가방 하드케이스 스포츠/레저>낚시>민물낚시>민물낚시가방'</li><li>'고급 내림찌케이스 대형찌보관함 플로팅 보관박스 스포츠/레저>낚시>민물낚시>찌케이스'</li></ul> | ## Evaluation ### Metrics | Label | Accuracy | |:--------|:---------| | **all** | 1.0 | ## Uses ### Direct Use for Inference First install the SetFit library: ```bash pip install setfit ``` Then you can load this model and run inference. ```python from setfit import SetFitModel # Download from the 🤗 Hub model = SetFitModel.from_pretrained("mini1013/master_cate_sl4") # Run inference preds = model("송어베이스 루어 세트 스푼 미끼 스피너 보빈 인공 스포츠/레저>낚시>루어낚시>루어낚시세트") ``` <!-- ### Downstream Use *List how someone could finetune this model on their own dataset.* --> <!-- ### Out-of-Scope Use *List how the model may foreseeably be misused and address what users ought not to do with the model.* --> <!-- ## Bias, Risks and Limitations *What are the known or foreseeable issues stemming from this model? You could also flag here known failure cases or weaknesses of the model.* --> <!-- ### Recommendations *What are recommendations with respect to the foreseeable issues? For example, filtering explicit content.* --> ## Training Details ### Training Set Metrics | Training set | Min | Median | Max | |:-------------|:----|:-------|:----| | Word count | 2 | 7.8018 | 19 | | Label | Training Sample Count | |:------|:----------------------| | 0.0 | 70 | | 1.0 | 70 | | 2.0 | 70 | | 3.0 | 70 | | 4.0 | 70 | | 5.0 | 70 | | 6.0 | 70 | | 7.0 | 70 | ### Training Hyperparameters - batch_size: (256, 256) - num_epochs: (30, 30) - max_steps: -1 - sampling_strategy: oversampling - num_iterations: 50 - body_learning_rate: (2e-05, 1e-05) - head_learning_rate: 0.01 - loss: CosineSimilarityLoss - distance_metric: cosine_distance - margin: 0.25 - end_to_end: False - use_amp: False - warmup_proportion: 0.1 - l2_weight: 0.01 - seed: 42 - eval_max_steps: -1 - load_best_model_at_end: False ### Training Results | Epoch | Step | Training Loss | Validation Loss | |:-------:|:----:|:-------------:|:---------------:| | 0.0091 | 1 | 0.4946 | - | | 0.4545 | 50 | 0.5017 | - | | 0.9091 | 100 | 0.2322 | - | | 1.3636 | 150 | 0.0559 | - | | 1.8182 | 200 | 0.0182 | - | | 2.2727 | 250 | 0.0165 | - | | 2.7273 | 300 | 0.0018 | - | | 3.1818 | 350 | 0.0001 | - | | 3.6364 | 400 | 0.0001 | - | | 4.0909 | 450 | 0.0001 | - | | 4.5455 | 500 | 0.0 | - | | 5.0 | 550 | 0.0 | - | | 5.4545 | 600 | 0.0 | - | | 5.9091 | 650 | 0.0 | - | | 6.3636 | 700 | 0.0 | - | | 6.8182 | 750 | 0.0 | - | | 7.2727 | 800 | 0.0 | - | | 7.7273 | 850 | 0.0 | - | | 8.1818 | 900 | 0.0 | - | | 8.6364 | 950 | 0.0 | - | | 9.0909 | 1000 | 0.0 | - | | 9.5455 | 1050 | 0.0 | - | | 10.0 | 1100 | 0.0 | - | | 10.4545 | 1150 | 0.0 | - | | 10.9091 | 1200 | 0.0 | - | | 11.3636 | 1250 | 0.0 | - | | 11.8182 | 1300 | 0.0 | - | | 12.2727 | 1350 | 0.0 | - | | 12.7273 | 1400 | 0.0 | - | | 13.1818 | 1450 | 0.0 | - | | 13.6364 | 1500 | 0.0 | - | | 14.0909 | 1550 | 0.0 | - | | 14.5455 | 1600 | 0.0 | - | | 15.0 | 1650 | 0.0 | - | | 15.4545 | 1700 | 0.0 | - | | 15.9091 | 1750 | 0.0 | - | | 16.3636 | 1800 | 0.0 | - | | 16.8182 | 1850 | 0.0 | - | | 17.2727 | 1900 | 0.0 | - | | 17.7273 | 1950 | 0.0 | - | | 18.1818 | 2000 | 0.0 | - | | 18.6364 | 2050 | 0.0 | - | | 19.0909 | 2100 | 0.0 | - | | 19.5455 | 2150 | 0.0 | - | | 20.0 | 2200 | 0.0 | - | | 20.4545 | 2250 | 0.0 | - | | 20.9091 | 2300 | 0.0 | - | | 21.3636 | 2350 | 0.0 | - | | 21.8182 | 2400 | 0.0 | - | | 22.2727 | 2450 | 0.0 | - | | 22.7273 | 2500 | 0.0 | - | | 23.1818 | 2550 | 0.0 | - | | 23.6364 | 2600 | 0.0 | - | | 24.0909 | 2650 | 0.0 | - | | 24.5455 | 2700 | 0.0 | - | | 25.0 | 2750 | 0.0 | - | | 25.4545 | 2800 | 0.0 | - | | 25.9091 | 2850 | 0.0 | - | | 26.3636 | 2900 | 0.0 | - | | 26.8182 | 2950 | 0.0 | - | | 27.2727 | 3000 | 0.0 | - | | 27.7273 | 3050 | 0.0 | - | | 28.1818 | 3100 | 0.0 | - | | 28.6364 | 3150 | 0.0 | - | | 29.0909 | 3200 | 0.0 | - | | 29.5455 | 3250 | 0.0 | - | | 30.0 | 3300 | 0.0 | - | ### Framework Versions - Python: 3.10.12 - SetFit: 1.1.0 - Sentence Transformers: 3.3.1 - Transformers: 4.44.2 - PyTorch: 2.2.0a0+81ea7a4 - Datasets: 3.2.0 - Tokenizers: 0.19.1 ## Citation ### BibTeX ```bibtex @article{https://doi.org/10.48550/arxiv.2209.11055, doi = {10.48550/ARXIV.2209.11055}, url = {https://arxiv.org/abs/2209.11055}, author = {Tunstall, Lewis and Reimers, Nils and Jo, Unso Eun Seo and Bates, Luke and Korat, Daniel and Wasserblat, Moshe and Pereg, Oren}, keywords = {Computation and Language (cs.CL), FOS: Computer and information sciences, FOS: Computer and information sciences}, title = {Efficient Few-Shot Learning Without Prompts}, publisher = {arXiv}, year = {2022}, copyright = {Creative Commons Attribution 4.0 International} } ``` <!-- ## Glossary *Clearly define terms in order to be accessible across audiences.* --> <!-- ## Model Card Authors *Lists the people who create the model card, providing recognition and accountability for the detailed work that goes into its construction.* --> <!-- ## Model Card Contact *Provides a way for people who have updates to the Model Card, suggestions, or questions, to contact the Model Card authors.* -->
[ "TEXT_CLASSIFICATION" ]
[ "CRAFT" ]
BSC-LT/roberta-base-biomedical-clinical-es
BSC-LT
fill-mask
[ "transformers", "pytorch", "roberta", "fill-mask", "biomedical", "clinical", "spanish", "es", "arxiv:2109.03570", "arxiv:2109.07765", "license:apache-2.0", "autotrain_compatible", "endpoints_compatible", "region:us" ]
2022-03-02T23:29:04
2021-10-21T10:28:12
156
7
--- language: - es license: apache-2.0 metrics: - ppl tags: - biomedical - clinical - spanish widget: - text: El único antecedente personal a reseñar era la <mask> arterial. - text: Las radiologías óseas de cuerpo entero no detectan alteraciones <mask>, ni alteraciones vertebrales. - text: En el <mask> toraco-abdómino-pélvico no se encontraron hallazgos patológicos de interés. --- **⚠️NOTICE⚠️: THIS MODEL HAS BEEN MOVED TO THE FOLLOWING URL AND WILL SOON BE REMOVED:** https://huggingface.co/PlanTL-GOB-ES/roberta-base-biomedical-clinical-es # Biomedical-clinical language model for Spanish Biomedical pretrained language model for Spanish. For more details about the corpus, the pretraining and the evaluation, check the official [repository](https://github.com/PlanTL-SANIDAD/lm-biomedical-clinical-es) and read our [preprint](https://arxiv.org/abs/2109.03570) "_Carrino, C. P., Armengol-Estapé, J., Gutiérrez-Fandiño, A., Llop-Palao, J., Pàmies, M., Gonzalez-Agirre, A., & Villegas, M. (2021). Biomedical and Clinical Language Models for Spanish: On the Benefits of Domain-Specific Pretraining in a Mid-Resource Scenario._". ## Tokenization and model pretraining This model is a [RoBERTa-based](https://github.com/pytorch/fairseq/tree/master/examples/roberta) model trained on a **biomedical-clinical** corpus in Spanish collected from several sources (see next section). The training corpus has been tokenized using a byte version of [Byte-Pair Encoding (BPE)](https://github.com/openai/gpt-2) used in the original [RoBERTA](https://github.com/pytorch/fairseq/tree/master/examples/roberta) model with a vocabulary size of 52,000 tokens. The pretraining consists of a masked language model training at the subword level following the approach employed for the RoBERTa base model with the same hyperparameters as in the original work. The training lasted a total of 48 hours with 16 NVIDIA V100 GPUs of 16GB DDRAM, using Adam optimizer with a peak learning rate of 0.0005 and an effective batch size of 2,048 sentences. ## Training corpora and preprocessing The training corpus is composed of several biomedical corpora in Spanish, collected from publicly available corpora and crawlers, and a real-world clinical corpus collected from more than 278K clinical documents and notes. To obtain a high-quality training corpus while retaining the idiosyncrasies of the clinical language, a cleaning pipeline has been applied only to the biomedical corpora, keeping the clinical corpus uncleaned. Essentially, the cleaning operations used are: - data parsing in different formats - sentence splitting - language detection - filtering of ill-formed sentences - deduplication of repetitive contents - keep the original document boundaries Then, the biomedical corpora are concatenated and further global deduplication among the biomedical corpora have been applied. Eventually, the clinical corpus is concatenated to the cleaned biomedical corpus resulting in a medium-size biomedical-clinical corpus for Spanish composed of more than 1B tokens. The table below shows some basic statistics of the individual cleaned corpora: | Name | No. tokens | Description | |-----------------------------------------------------------------------------------------|-------------|------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------| | [Medical crawler](https://zenodo.org/record/4561970) | 745,705,946 | Crawler of more than 3,000 URLs belonging to Spanish biomedical and health domains. | | Clinical cases misc. | 102,855,267 | A miscellany of medical content, essentially clinical cases. Note that a clinical case report is a scientific publication where medical practitioners share patient cases and it is different from a clinical note or document. | | Clinical notes/documents | 91,250,080 | Collection of more than 278K clinical documents, including discharge reports, clinical course notes and X-ray reports, for a total of 91M tokens. | | [Scielo](https://github.com/PlanTL-SANIDAD/SciELO-Spain-Crawler) | 60,007,289 | Publications written in Spanish crawled from the Spanish SciELO server in 2017. | | [BARR2_background](https://temu.bsc.es/BARR2/downloads/background_set.raw_text.tar.bz2) | 24,516,442 | Biomedical Abbreviation Recognition and Resolution (BARR2) containing Spanish clinical case study sections from a variety of clinical disciplines. | | Wikipedia_life_sciences | 13,890,501 | Wikipedia articles crawled 04/01/2021 with the [Wikipedia API python library](https://pypi.org/project/Wikipedia-API/) starting from the "Ciencias\_de\_la\_vida" category up to a maximum of 5 subcategories. Multiple links to the same articles are then discarded to avoid repeating content. | | Patents | 13,463,387 | Google Patent in Medical Domain for Spain (Spanish). The accepted codes (Medical Domain) for Json files of patents are: "A61B", "A61C","A61F", "A61H", "A61K", "A61L","A61M", "A61B", "A61P". | | [EMEA](http://opus.nlpl.eu/download.php?f=EMEA/v3/moses/en-es.txt.zip) | 5,377,448 | Spanish-side documents extracted from parallel corpora made out of PDF documents from the European Medicines Agency. | | [mespen_Medline](https://zenodo.org/record/3562536#.YTt1fH2xXbR) | 4,166,077 | Spanish-side articles extracted from a collection of Spanish-English parallel corpus consisting of biomedical scientific literature. The collection of parallel resources are aggregated from the MedlinePlus source. | | PubMed | 1,858,966 | Open-access articles from the PubMed repository crawled in 2017. | ## Evaluation and results The model has been evaluated on the Named Entity Recognition (NER) using the following datasets: - [PharmaCoNER](https://zenodo.org/record/4270158): is a track on chemical and drug mention recognition from Spanish medical texts (for more info see: https://temu.bsc.es/pharmaconer/). - [CANTEMIST](https://zenodo.org/record/3978041#.YTt5qH2xXbQ): is a shared task specifically focusing on named entity recognition of tumor morphology, in Spanish (for more info see: https://zenodo.org/record/3978041#.YTt5qH2xXbQ). - ICTUSnet: consists of 1,006 hospital discharge reports of patients admitted for stroke from 18 different Spanish hospitals. It contains more than 79,000 annotations for 51 different kinds of variables. The evaluation results are compared against the [mBERT](https://huggingface.co/bert-base-multilingual-cased) and [BETO](https://huggingface.co/dccuchile/bert-base-spanish-wwm-cased) models: | F1 - Precision - Recall | roberta-base-biomedical-clinical-es | mBERT | BETO | |---------------------------|----------------------------|-------------------------------|-------------------------| | PharmaCoNER | **90.04** - **88.92** - **91.18** | 87.46 - 86.50 - 88.46 | 88.18 - 87.12 - 89.28 | | CANTEMIST | **83.34** - **81.48** - **85.30** | 82.61 - 81.12 - 84.15 | 82.42 - 80.91 - 84.00 | | ICTUSnet | **88.08** - **84.92** - **91.50** | 86.75 - 83.53 - 90.23 | 85.95 - 83.10 - 89.02 | ## Intended uses & limitations The model is ready-to-use only for masked language modelling to perform the Fill Mask task (try the inference API or read the next section) However, the is intended to be fine-tuned on downstream tasks such as Named Entity Recognition or Text Classification. ## Cite If you use our models, please cite our latest preprint: ```bibtex @misc{carrino2021biomedical, title={Biomedical and Clinical Language Models for Spanish: On the Benefits of Domain-Specific Pretraining in a Mid-Resource Scenario}, author={Casimiro Pio Carrino and Jordi Armengol-Estapé and Asier Gutiérrez-Fandiño and Joan Llop-Palao and Marc Pàmies and Aitor Gonzalez-Agirre and Marta Villegas}, year={2021}, eprint={2109.03570}, archivePrefix={arXiv}, primaryClass={cs.CL} } ``` If you use our Medical Crawler corpus, please cite the preprint: ```bibtex @misc{carrino2021spanish, title={Spanish Biomedical Crawled Corpus: A Large, Diverse Dataset for Spanish Biomedical Language Models}, author={Casimiro Pio Carrino and Jordi Armengol-Estapé and Ona de Gibert Bonet and Asier Gutiérrez-Fandiño and Aitor Gonzalez-Agirre and Martin Krallinger and Marta Villegas}, year={2021}, eprint={2109.07765}, archivePrefix={arXiv}, primaryClass={cs.CL} } ``` --- --- ## How to use ```python from transformers import AutoTokenizer, AutoModelForMaskedLM tokenizer = AutoTokenizer.from_pretrained("BSC-TeMU/roberta-base-biomedical-es") model = AutoModelForMaskedLM.from_pretrained("BSC-TeMU/roberta-base-biomedical-es") from transformers import pipeline unmasker = pipeline('fill-mask', model="BSC-TeMU/roberta-base-biomedical-es") unmasker("El único antecedente personal a reseñar era la <mask> arterial.") ``` ``` # Output [ { "sequence": " El único antecedente personal a reseñar era la hipertensión arterial.", "score": 0.9855039715766907, "token": 3529, "token_str": " hipertensión" }, { "sequence": " El único antecedente personal a reseñar era la diabetes arterial.", "score": 0.0039140828885138035, "token": 1945, "token_str": " diabetes" }, { "sequence": " El único antecedente personal a reseñar era la hipotensión arterial.", "score": 0.002484665485098958, "token": 11483, "token_str": " hipotensión" }, { "sequence": " El único antecedente personal a reseñar era la Hipertensión arterial.", "score": 0.0023484621196985245, "token": 12238, "token_str": " Hipertensión" }, { "sequence": " El único antecedente personal a reseñar era la presión arterial.", "score": 0.0008009297889657319, "token": 2267, "token_str": " presión" } ] ```
[ "NAMED_ENTITY_RECOGNITION", "TEXT_CLASSIFICATION" ]
[ "CANTEMIST", "PHARMACONER", "SCIELO" ]
sschet/scibert_scivocab_uncased-finetuned-ner
sschet
token-classification
[ "transformers", "pytorch", "bert", "token-classification", "Named Entity Recognition", "SciBERT", "Adverse Effect", "Drug", "Medical", "en", "dataset:ade_corpus_v2", "dataset:tner/bc5cdr", "dataset:commanderstrife/jnlpba", "dataset:bc2gm_corpus", "dataset:drAbreu/bc4chemd_ner", "dataset:linnaeus", "dataset:chintagunta85/ncbi_disease", "autotrain_compatible", "endpoints_compatible", "region:us" ]
2023-02-01T02:41:03
2023-02-01T03:44:18
152
0
--- datasets: - ade_corpus_v2 - tner/bc5cdr - commanderstrife/jnlpba - bc2gm_corpus - drAbreu/bc4chemd_ner - linnaeus - chintagunta85/ncbi_disease language: - en tags: - Named Entity Recognition - SciBERT - Adverse Effect - Drug - Medical widget: - text: Abortion, miscarriage or uterine hemorrhage associated with misoprostol (Cytotec), a labor-inducing drug. example_title: Abortion, miscarriage, ... - text: Addiction to many sedatives and analgesics, such as diazepam, morphine, etc. example_title: Addiction to many... - text: Birth defects associated with thalidomide example_title: Birth defects associated... - text: Bleeding of the intestine associated with aspirin therapy example_title: Bleeding of the intestine... - text: Cardiovascular disease associated with COX-2 inhibitors (i.e. Vioxx) example_title: Cardiovascular disease... --- This is a SciBERT-based model fine-tuned to perform Named Entity Recognition for drug names and adverse drug effects. ![model image](https://raw.githubusercontent.com/jsylee/personal-projects/master/Hugging%20Face%20ADR%20Fine-Tuning/hf_adr.png) This model classifies input tokens into one of five classes: - `B-DRUG`: beginning of a drug entity - `I-DRUG`: within a drug entity - `B-EFFECT`: beginning of an AE entity - `I-EFFECT`: within an AE entity - `O`: outside either of the above entities To get started using this model for inference, simply set up an NER `pipeline` like below: ```python from transformers import (AutoModelForTokenClassification, AutoTokenizer, pipeline, ) model_checkpoint = "jsylee/scibert_scivocab_uncased-finetuned-ner" model = AutoModelForTokenClassification.from_pretrained(model_checkpoint, num_labels=5, id2label={0: 'O', 1: 'B-DRUG', 2: 'I-DRUG', 3: 'B-EFFECT', 4: 'I-EFFECT'} ) tokenizer = AutoTokenizer.from_pretrained(model_checkpoint) model_pipeline = pipeline(task="ner", model=model, tokenizer=tokenizer) print( model_pipeline ("Abortion, miscarriage or uterine hemorrhage associated with misoprostol (Cytotec), a labor-inducing drug.")) ``` SciBERT: https://huggingface.co/allenai/scibert_scivocab_uncased Dataset: https://huggingface.co/datasets/ade_corpus_v2
[ "NAMED_ENTITY_RECOGNITION" ]
[ "BC5CDR", "JNLPBA", "LINNAEUS", "NCBI DISEASE" ]
RichardErkhov/Upword_-_gpt-neox-20b-embeddings-gguf
RichardErkhov
null
[ "gguf", "arxiv:2204.06745", "arxiv:2101.00027", "arxiv:2201.07311", "arxiv:2104.09864", "endpoints_compatible", "region:us" ]
2024-10-27T08:53:00
2024-10-27T14:25:45
152
0
--- {} --- Quantization made by Richard Erkhov. [Github](https://github.com/RichardErkhov) [Discord](https://discord.gg/pvy7H8DZMG) [Request more models](https://github.com/RichardErkhov/quant_request) gpt-neox-20b-embeddings - GGUF - Model creator: https://huggingface.co/Upword/ - Original model: https://huggingface.co/Upword/gpt-neox-20b-embeddings/ | Name | Quant method | Size | | ---- | ---- | ---- | | [gpt-neox-20b-embeddings.Q2_K.gguf](https://huggingface.co/RichardErkhov/Upword_-_gpt-neox-20b-embeddings-gguf/blob/main/gpt-neox-20b-embeddings.Q2_K.gguf) | Q2_K | 7.22GB | | [gpt-neox-20b-embeddings.Q3_K_S.gguf](https://huggingface.co/RichardErkhov/Upword_-_gpt-neox-20b-embeddings-gguf/blob/main/gpt-neox-20b-embeddings.Q3_K_S.gguf) | Q3_K_S | 8.35GB | | [gpt-neox-20b-embeddings.Q3_K.gguf](https://huggingface.co/RichardErkhov/Upword_-_gpt-neox-20b-embeddings-gguf/blob/main/gpt-neox-20b-embeddings.Q3_K.gguf) | Q3_K | 10.03GB | | [gpt-neox-20b-embeddings.Q3_K_M.gguf](https://huggingface.co/RichardErkhov/Upword_-_gpt-neox-20b-embeddings-gguf/blob/main/gpt-neox-20b-embeddings.Q3_K_M.gguf) | Q3_K_M | 10.03GB | | [gpt-neox-20b-embeddings.Q3_K_L.gguf](https://huggingface.co/RichardErkhov/Upword_-_gpt-neox-20b-embeddings-gguf/blob/main/gpt-neox-20b-embeddings.Q3_K_L.gguf) | Q3_K_L | 10.96GB | | [gpt-neox-20b-embeddings.IQ4_XS.gguf](https://huggingface.co/RichardErkhov/Upword_-_gpt-neox-20b-embeddings-gguf/blob/main/gpt-neox-20b-embeddings.IQ4_XS.gguf) | IQ4_XS | 10.38GB | | [gpt-neox-20b-embeddings.Q4_0.gguf](https://huggingface.co/RichardErkhov/Upword_-_gpt-neox-20b-embeddings-gguf/blob/main/gpt-neox-20b-embeddings.Q4_0.gguf) | Q4_0 | 10.86GB | | [gpt-neox-20b-embeddings.IQ4_NL.gguf](https://huggingface.co/RichardErkhov/Upword_-_gpt-neox-20b-embeddings-gguf/blob/main/gpt-neox-20b-embeddings.IQ4_NL.gguf) | IQ4_NL | 10.94GB | | [gpt-neox-20b-embeddings.Q4_K_S.gguf](https://huggingface.co/RichardErkhov/Upword_-_gpt-neox-20b-embeddings-gguf/blob/main/gpt-neox-20b-embeddings.Q4_K_S.gguf) | Q4_K_S | 10.94GB | | [gpt-neox-20b-embeddings.Q4_K.gguf](https://huggingface.co/RichardErkhov/Upword_-_gpt-neox-20b-embeddings-gguf/blob/main/gpt-neox-20b-embeddings.Q4_K.gguf) | Q4_K | 12.23GB | | [gpt-neox-20b-embeddings.Q4_K_M.gguf](https://huggingface.co/RichardErkhov/Upword_-_gpt-neox-20b-embeddings-gguf/blob/main/gpt-neox-20b-embeddings.Q4_K_M.gguf) | Q4_K_M | 12.23GB | | [gpt-neox-20b-embeddings.Q4_1.gguf](https://huggingface.co/RichardErkhov/Upword_-_gpt-neox-20b-embeddings-gguf/blob/main/gpt-neox-20b-embeddings.Q4_1.gguf) | Q4_1 | 12.03GB | | [gpt-neox-20b-embeddings.Q5_0.gguf](https://huggingface.co/RichardErkhov/Upword_-_gpt-neox-20b-embeddings-gguf/blob/main/gpt-neox-20b-embeddings.Q5_0.gguf) | Q5_0 | 13.21GB | | [gpt-neox-20b-embeddings.Q5_K_S.gguf](https://huggingface.co/RichardErkhov/Upword_-_gpt-neox-20b-embeddings-gguf/blob/main/gpt-neox-20b-embeddings.Q5_K_S.gguf) | Q5_K_S | 13.21GB | | [gpt-neox-20b-embeddings.Q5_K.gguf](https://huggingface.co/RichardErkhov/Upword_-_gpt-neox-20b-embeddings-gguf/blob/main/gpt-neox-20b-embeddings.Q5_K.gguf) | Q5_K | 14.24GB | | [gpt-neox-20b-embeddings.Q5_K_M.gguf](https://huggingface.co/RichardErkhov/Upword_-_gpt-neox-20b-embeddings-gguf/blob/main/gpt-neox-20b-embeddings.Q5_K_M.gguf) | Q5_K_M | 14.24GB | | [gpt-neox-20b-embeddings.Q5_1.gguf](https://huggingface.co/RichardErkhov/Upword_-_gpt-neox-20b-embeddings-gguf/blob/main/gpt-neox-20b-embeddings.Q5_1.gguf) | Q5_1 | 14.39GB | | [gpt-neox-20b-embeddings.Q6_K.gguf](https://huggingface.co/RichardErkhov/Upword_-_gpt-neox-20b-embeddings-gguf/blob/main/gpt-neox-20b-embeddings.Q6_K.gguf) | Q6_K | 15.72GB | | [gpt-neox-20b-embeddings.Q8_0.gguf](https://huggingface.co/RichardErkhov/Upword_-_gpt-neox-20b-embeddings-gguf/blob/main/gpt-neox-20b-embeddings.Q8_0.gguf) | Q8_0 | 20.35GB | Original model description: --- language: - en tags: - pytorch - causal-lm license: apache-2.0 datasets: - the_pile duplicated_from: EleutherAI/gpt-neox-20b --- GPT-NeoX-20B is a 20 billion parameter autoregressive language model trained on [the Pile](https://pile.eleuther.ai/) using the [GPT-NeoX library](https://github.com/EleutherAI/gpt-neox). Its architecture intentionally resembles that of GPT-3, and is almost identical to that of [GPT-J- 6B](https://huggingface.co/EleutherAI/gpt-j-6B). Its training dataset contains a multitude of English-language texts, reflecting the general-purpose nature of this model. See the [accompanying paper](https://arxiv.org/abs/2204.06745) for details about model architecture (including how it differs from GPT-3), training procedure, and additional evaluations. ### Model details - Developed by: [EleutherAI](http://eleuther.ai) - Model type: Transformer-based Language Model - Language: English - Learn more: [GPT-NeoX-20B: An Open-Source Autoregressive Language Model](https://arxiv.org/abs/2204.06745). For details about the training dataset, see [the Pile paper](https://arxiv.org/abs/2101.00027), and [its data sheet](https://arxiv.org/abs/2201.07311). - License: Apache 2.0 - Contact: to ask questions about this model, join the [EleutherAI Discord](https://discord.gg/zBGx3azzUn), and post them in `#release-discussion`. Please read the existing GPT-NeoX-20B documentation before asking about the model on Discord. For general correspondence: [contact@eleuther. ai](mailto:[email protected]). <figure style="width:30em"> | Hyperparameter | Value | | ---------------------- | ----------- | | n<sub>parameters</sub> | 20554567680 | | n<sub>layers</sub> | 44 | | d<sub>model</sub> | 6144 | | n<sub>heads</sub> | 64 | | d<sub>head</sub> | 96 | | n<sub>vocab</sub> | 50257 | | Sequence Length | 2048 | | Learning Rate | 0.97 x 10<sup>-5</sup> | | Positional Encoding | [Rotary Position Embedding (RoPE)](https://arxiv.org/abs/2104.09864) | </figure> ### Uses and limitations #### Intended use GPT-NeoX-20B was developed primarily for research purposes. It learns an inner representation of the English language that can be used to extract features useful for downstream tasks. In addition to scientific uses, you may also further fine-tune and adapt GPT-NeoX-20B for deployment, as long as your use is in accordance with the Apache 2.0 license. This model works with the [Transformers Library](https://huggingface.co/docs/transformers/index). If you decide to use pre-trained GPT-NeoX-20B as a basis for your fine-tuned model, please note that you need to conduct your own risk and bias assessment. #### Out-of-scope use GPT-NeoX-20B is **not** intended for deployment as-is. It is not a product and cannot be used for human-facing interactions without supervision. GPT-NeoX-20B has not been fine-tuned for downstream tasks for which language models are commonly deployed, such as writing genre prose, or commercial chatbots. This means GPT-NeoX-20B will likely **not** respond to a given prompt the way products such as ChatGPT do. This is because, unlike GPT-NeoX-20B, ChatGPT was fine-tuned using methods such as Reinforcement Learning from Human Feedback (RLHF) to better “understand” human instructions and dialogue. This model is English-language only, and thus cannot be used for translation or generating text in other languages. #### Limitations and biases The core functionality of GPT-NeoX-20B is to take a string of text and predict the next token. Remember that the statistically most likely next token need not result in the most “accurate” text. Never rely on GPT-NeoX-20B to produce factually accurate output. This model was trained on [the Pile](https://pile.eleuther.ai/), a dataset known to contain profanity and texts that are lewd or otherwise offensive. See [Section 6 of the Pile paper](https://arxiv.org/abs/2101.00027) for a discussion of documented biases with regards to gender, religion, and race. GPT-NeoX-20B may produce socially unacceptable or undesirable text, *even if* the prompt itself does not include anything explicitly offensive. We recommend curating the outputs of this model before presenting it to a human reader. Please inform your audience that you are using artificially generated text. #### How to use If you simply want to try out some prompts, check out [this playground](https://20b.eleuther.ai/). GPT-NeoX-20B can be loaded using the `AutoModelForCausalLM` functionality: ```python from transformers import AutoTokenizer, AutoModelForCausalLM tokenizer = AutoTokenizer.from_pretrained("EleutherAI/gpt-neox-20b") model = AutoModelForCausalLM.from_pretrained("EleutherAI/gpt-neox-20b") ``` ### Training #### Training dataset The Pile is a 825GiB general-purpose dataset in English. It was created by EleutherAI specifically for training large language models. It contains texts from 22 diverse sources, roughly broken down into five categories: academic writing (e.g. arXiv), internet (e.g. CommonCrawl), prose (e.g. Project Gutenberg), dialogue (e.g. YouTube subtitles), and miscellaneous (e.g. GitHub, Enron Emails). See [the Pile paper](https://arxiv.org/abs/2101.00027) for a breakdown of all data sources, methodology, and a discussion of ethical implications. Consult [the datasheet](https://arxiv.org/abs/2201.07311) for more detailed documentation about the Pile and its component datasets. The Pile can be downloaded from the [official website](https://pile.eleuther.ai/), or from a [community mirror](https://the-eye.eu/public/AI/pile/). The Pile was **not** deduplicated before being used to train GPT-NeoX-20B. #### Training procedure GPT-NeoX-20B was trained with a batch size of approximately 3.15M tokens (1538 sequences of 2048 tokens each), for a total of 150,000 steps. Tensor parallelism and pipeline parallelism were used to distribute the model across GPUs. Additional details about the training procedure are in [Section 3 of the accompanying paper](https://arxiv.org/abs/2204.06745). ### Evaluations <figure style="width:55em"> | Model | OpenAI’s LAMBADA | SciQ | PIQA | TriviaQA | ARC (Challenge) | | ------------- | :--------------: | :-----------: | :-----------: | :-----------: | :-------------: | | GPT-J-6B | 0.683 ± 0.006 | 0.910 ± 0.009 | 0.752 ± 0.010 | 0.170 ± 0.004 | 0.340 ± 0.014 | | FairSeq 6.7B | 0.673 ± 0.007 | 0.895 ± 0.010 | 0.762 ± 0.010 | 0.221 ± 0.004 | 0.329 ± 0.014 | | GPT-3 Curie | 0.693 ± 0.006 | 0.918 ± 0.009 | 0.767 ± 0.010 | 0.196 ± 0.004 | 0.334 ± 0.014 | | FairSeq 13B | 0.709 ± 0.006 | 0.910 ± 0.009 | 0.769 ± 0.010 | 0.270 ± 0.004 | 0.345 ± 0.014 | | GPT-NeoX-20B | 0.720 ± 0.006 | 0.928 ± 0.008 | 0.779 ± 0.010 | 0.259 ± 0.004 | 0.380 ± 0.014 | | GPT-3 DaVinci | 0.752 ± 0.006 | 0.949 ± 0.007 | 0.791 ± 0.009 | 0.409 ± 0.005 | 0.435 ± 0.014 | <figcaption>Zero-shot performance on selected natural language tasks.</figcaption> </figure> This is a heavily abridged version of the evaluation results. Appendix D of the [GPT-NeoX-20B paper](https://arxiv.org/abs/2204.06745) compares more model sizes, and contains additional evaluations, including on: zero and five-shot natural language tasks, zero and five-shot Basic Arithmetic and MATH, and zero-shot Hendrycks tasks. ### BibTeX To cite the GPT-NeoX-20B paper: ``` @misc{https://doi.org/10.48550/arxiv.2204.06745, doi = {10.48550/ARXIV.2204.06745}, url = {https://arxiv.org/abs/2204.06745}, author = {Black, Sid and Biderman, Stella and Hallahan, Eric and Anthony, Quentin and Gao, Leo and Golding, Laurence and He, Horace and Leahy, Connor and McDonell, Kyle and Phang, Jason and Pieler, Michael and Prashanth, USVSN Sai and Purohit, Shivanshu and Reynolds, Laria and Tow, Jonathan and Wang, Ben and Weinbach, Samuel}, keywords = {Computation and Language (cs.CL), FOS: Computer and information sciences, FOS: Computer and information sciences}, title = {GPT-NeoX-20B: An Open-Source Autoregressive Language Model}, publisher = {arXiv}, year = {2022}, copyright = {Creative Commons Attribution 4.0 International} } ```
[ "TRANSLATION" ]
[ "SCIQ" ]
izhx/udever-bloom-560m
izhx
feature-extraction
[ "transformers", "pytorch", "bloom", "feature-extraction", "mteb", "ak", "ar", "as", "bm", "bn", "ca", "code", "en", "es", "eu", "fon", "fr", "gu", "hi", "id", "ig", "ki", "kn", "lg", "ln", "ml", "mr", "ne", "nso", "ny", "or", "pa", "pt", "rn", "rw", "sn", "st", "sw", "ta", "te", "tn", "ts", "tum", "tw", "ur", "vi", "wo", "xh", "yo", "zh", "zhs", "zht", "zu", "arxiv:2310.08232", "license:bigscience-bloom-rail-1.0", "model-index", "text-generation-inference", "endpoints_compatible", "region:us" ]
2023-10-24T10:49:45
2023-11-07T06:57:25
150
1
--- language: - ak - ar - as - bm - bn - ca - code - en - es - eu - fon - fr - gu - hi - id - ig - ki - kn - lg - ln - ml - mr - ne - nso - ny - or - pa - pt - rn - rw - sn - st - sw - ta - te - tn - ts - tum - tw - ur - vi - wo - xh - yo - zh - zhs - zht - zu license: bigscience-bloom-rail-1.0 tags: - mteb model-index: - name: udever-bloom-560m results: - task: type: STS dataset: name: MTEB AFQMC type: C-MTEB/AFQMC config: default split: validation revision: None metrics: - type: cos_sim_pearson value: 25.170024237678657 - type: cos_sim_spearman value: 25.32025098111752 - type: euclidean_pearson value: 25.34284673812859 - type: euclidean_spearman value: 25.52812937004611 - type: manhattan_pearson value: 25.734179522960822 - type: manhattan_spearman value: 25.92247507041032 - task: type: STS dataset: name: MTEB ATEC type: C-MTEB/ATEC config: default split: test revision: None metrics: - type: cos_sim_pearson value: 32.3359541791282 - type: cos_sim_spearman value: 33.45815274836323 - type: euclidean_pearson value: 35.14748229440635 - type: euclidean_spearman value: 33.377829932851334 - type: manhattan_pearson value: 35.359130773295625 - type: manhattan_spearman value: 33.524469762932426 - task: type: Classification dataset: name: MTEB AmazonCounterfactualClassification (en) type: mteb/amazon_counterfactual config: en split: test revision: e8379541af4e31359cca9fbcf4b00f2671dba205 metrics: - type: accuracy value: 72.35820895522389 - type: ap value: 35.45566303125099 - type: f1 value: 66.49474786522534 - task: type: Classification dataset: name: MTEB AmazonCounterfactualClassification (de) type: mteb/amazon_counterfactual config: de split: test revision: e8379541af4e31359cca9fbcf4b00f2671dba205 metrics: - type: accuracy value: 66.423982869379 - type: ap value: 78.32781372746805 - type: f1 value: 64.24959400774807 - task: type: Classification dataset: name: MTEB AmazonCounterfactualClassification (en-ext) type: mteb/amazon_counterfactual config: en-ext split: test revision: e8379541af4e31359cca9fbcf4b00f2671dba205 metrics: - type: accuracy value: 73.65817091454274 - type: ap value: 21.73416645163647 - type: f1 value: 60.52120070712094 - task: type: Classification dataset: name: MTEB AmazonCounterfactualClassification (ja) type: mteb/amazon_counterfactual config: ja split: test revision: e8379541af4e31359cca9fbcf4b00f2671dba205 metrics: - type: accuracy value: 56.86295503211991 - type: ap value: 12.906256075113513 - type: f1 value: 46.68625513679152 - task: type: Classification dataset: name: MTEB AmazonPolarityClassification type: mteb/amazon_polarity config: default split: test revision: e2d317d38cd51312af73b3d32a06d1a08b442046 metrics: - type: accuracy value: 83.8095 - type: ap value: 78.5195717101614 - type: f1 value: 83.74169093676316 - task: type: Classification dataset: name: MTEB AmazonReviewsClassification (en) type: mteb/amazon_reviews_multi config: en split: test revision: 1399c76144fd37290681b995c656ef9b2e06e26d metrics: - type: accuracy value: 38.97 - type: f1 value: 38.57853211177342 - task: type: Classification dataset: name: MTEB AmazonReviewsClassification (de) type: mteb/amazon_reviews_multi config: de split: test revision: 1399c76144fd37290681b995c656ef9b2e06e26d metrics: - type: accuracy value: 26.846000000000004 - type: f1 value: 26.473886891677306 - task: type: Classification dataset: name: MTEB AmazonReviewsClassification (es) type: mteb/amazon_reviews_multi config: es split: test revision: 1399c76144fd37290681b995c656ef9b2e06e26d metrics: - type: accuracy value: 38.974 - type: f1 value: 38.31719230291287 - task: type: Classification dataset: name: MTEB AmazonReviewsClassification (fr) type: mteb/amazon_reviews_multi config: fr split: test revision: 1399c76144fd37290681b995c656ef9b2e06e26d metrics: - type: accuracy value: 38.38799999999999 - type: f1 value: 37.53319978613875 - task: type: Classification dataset: name: MTEB AmazonReviewsClassification (ja) type: mteb/amazon_reviews_multi config: ja split: test revision: 1399c76144fd37290681b995c656ef9b2e06e26d metrics: - type: accuracy value: 28.311999999999998 - type: f1 value: 27.988313617729755 - task: type: Classification dataset: name: MTEB AmazonReviewsClassification (zh) type: mteb/amazon_reviews_multi config: zh split: test revision: 1399c76144fd37290681b995c656ef9b2e06e26d metrics: - type: accuracy value: 35.704 - type: f1 value: 34.863182924437254 - task: type: Retrieval dataset: name: MTEB ArguAna type: arguana config: default split: test revision: None metrics: - type: map_at_1 value: 21.053 - type: map_at_10 value: 35.811 - type: map_at_100 value: 37.035000000000004 - type: map_at_1000 value: 37.055 - type: map_at_3 value: 30.666 - type: map_at_5 value: 33.525 - type: mrr_at_1 value: 21.266 - type: mrr_at_10 value: 35.906 - type: mrr_at_100 value: 37.122 - type: mrr_at_1000 value: 37.141999999999996 - type: mrr_at_3 value: 30.714000000000002 - type: mrr_at_5 value: 33.576 - type: ndcg_at_1 value: 21.053 - type: ndcg_at_10 value: 44.545 - type: ndcg_at_100 value: 49.844 - type: ndcg_at_1000 value: 50.298 - type: ndcg_at_3 value: 33.889 - type: ndcg_at_5 value: 39.059 - type: precision_at_1 value: 21.053 - type: precision_at_10 value: 7.269 - type: precision_at_100 value: 0.96 - type: precision_at_1000 value: 0.099 - type: precision_at_3 value: 14.414 - type: precision_at_5 value: 11.166 - type: recall_at_1 value: 21.053 - type: recall_at_10 value: 72.688 - type: recall_at_100 value: 96.017 - type: recall_at_1000 value: 99.431 - type: recall_at_3 value: 43.242999999999995 - type: recall_at_5 value: 55.832 - task: type: Clustering dataset: name: MTEB ArxivClusteringP2P type: mteb/arxiv-clustering-p2p config: default split: test revision: a122ad7f3f0291bf49cc6f4d32aa80929df69d5d metrics: - type: v_measure value: 40.26646269393896 - task: type: Clustering dataset: name: MTEB ArxivClusteringS2S type: mteb/arxiv-clustering-s2s config: default split: test revision: f910caf1a6075f7329cdf8c1a6135696f37dbd53 metrics: - type: v_measure value: 32.00218289816601 - task: type: Reranking dataset: name: MTEB AskUbuntuDupQuestions type: mteb/askubuntudupquestions-reranking config: default split: test revision: 2000358ca161889fa9c082cb41daa8dcfb161a54 metrics: - type: map value: 57.381567373603424 - type: mrr value: 70.09431473420392 - task: type: STS dataset: name: MTEB BIOSSES type: mteb/biosses-sts config: default split: test revision: d3fb88f8f02e40887cd149695127462bbcf29b4a metrics: - type: cos_sim_pearson value: 87.14803223261677 - type: cos_sim_spearman value: 84.43626128689064 - type: euclidean_pearson value: 85.03130036472703 - type: euclidean_spearman value: 84.05974668365359 - type: manhattan_pearson value: 85.59339889467545 - type: manhattan_spearman value: 83.86938090025696 - task: type: STS dataset: name: MTEB BQ type: C-MTEB/BQ config: default split: test revision: None metrics: - type: cos_sim_pearson value: 44.19468290937555 - type: cos_sim_spearman value: 43.93025426799595 - type: euclidean_pearson value: 45.273900549350735 - type: euclidean_spearman value: 45.07419415738924 - type: manhattan_pearson value: 45.469211385235376 - type: manhattan_spearman value: 45.27440191151001 - task: type: BitextMining dataset: name: MTEB BUCC (de-en) type: mteb/bucc-bitext-mining config: de-en split: test revision: d51519689f32196a32af33b075a01d0e7c51e252 metrics: - type: accuracy value: 11.440501043841337 - type: f1 value: 11.295895880968951 - type: precision value: 11.237446950317073 - type: recall value: 11.440501043841337 - task: type: BitextMining dataset: name: MTEB BUCC (fr-en) type: mteb/bucc-bitext-mining config: fr-en split: test revision: d51519689f32196a32af33b075a01d0e7c51e252 metrics: - type: accuracy value: 96.53312788906008 - type: f1 value: 96.18093770636143 - type: precision value: 96.00667693888035 - type: recall value: 96.53312788906008 - task: type: BitextMining dataset: name: MTEB BUCC (ru-en) type: mteb/bucc-bitext-mining config: ru-en split: test revision: d51519689f32196a32af33b075a01d0e7c51e252 metrics: - type: accuracy value: 1.6972635954277795 - type: f1 value: 1.5885146938143124 - type: precision value: 1.5581125970067466 - type: recall value: 1.6972635954277795 - task: type: BitextMining dataset: name: MTEB BUCC (zh-en) type: mteb/bucc-bitext-mining config: zh-en split: test revision: d51519689f32196a32af33b075a01d0e7c51e252 metrics: - type: accuracy value: 96.31384939441811 - type: f1 value: 96.15587151132175 - type: precision value: 96.07688256977357 - type: recall value: 96.31384939441811 - task: type: Classification dataset: name: MTEB Banking77Classification type: mteb/banking77 config: default split: test revision: 0fd18e25b25c072e09e0d92ab615fda904d66300 metrics: - type: accuracy value: 80.97402597402598 - type: f1 value: 80.88177660652944 - task: type: Clustering dataset: name: MTEB BiorxivClusteringP2P type: mteb/biorxiv-clustering-p2p config: default split: test revision: 65b79d1d13f80053f67aca9498d9402c2d9f1f40 metrics: - type: v_measure value: 33.266950159712465 - task: type: Clustering dataset: name: MTEB BiorxivClusteringS2S type: mteb/biorxiv-clustering-s2s config: default split: test revision: 258694dd0231531bc1fd9de6ceb52a0853c6d908 metrics: - type: v_measure value: 28.65092446021672 - task: type: Clustering dataset: name: MTEB CLSClusteringP2P type: C-MTEB/CLSClusteringP2P config: default split: test revision: None metrics: - type: v_measure value: 35.21075820650184 - task: type: Clustering dataset: name: MTEB CLSClusteringS2S type: C-MTEB/CLSClusteringS2S config: default split: test revision: None metrics: - type: v_measure value: 35.121931960714484 - task: type: Reranking dataset: name: MTEB CMedQAv1 type: C-MTEB/CMedQAv1-reranking config: default split: test revision: None metrics: - type: map value: 63.41256934884578 - type: mrr value: 68.6492857142857 - task: type: Reranking dataset: name: MTEB CMedQAv2 type: C-MTEB/CMedQAv2-reranking config: default split: test revision: None metrics: - type: map value: 63.663067375541104 - type: mrr value: 68.92075396825396 - task: type: Retrieval dataset: name: MTEB CQADupstackAndroidRetrieval type: BeIR/cqadupstack config: default split: test revision: None metrics: - type: map_at_1 value: 24.997 - type: map_at_10 value: 35.477 - type: map_at_100 value: 36.722 - type: map_at_1000 value: 36.849 - type: map_at_3 value: 32.083 - type: map_at_5 value: 33.884 - type: mrr_at_1 value: 32.046 - type: mrr_at_10 value: 41.455999999999996 - type: mrr_at_100 value: 42.214 - type: mrr_at_1000 value: 42.268 - type: mrr_at_3 value: 38.722 - type: mrr_at_5 value: 40.266999999999996 - type: ndcg_at_1 value: 32.046 - type: ndcg_at_10 value: 41.705999999999996 - type: ndcg_at_100 value: 46.695 - type: ndcg_at_1000 value: 49.128 - type: ndcg_at_3 value: 36.6 - type: ndcg_at_5 value: 38.725 - type: precision_at_1 value: 32.046 - type: precision_at_10 value: 8.197000000000001 - type: precision_at_100 value: 1.323 - type: precision_at_1000 value: 0.183 - type: precision_at_3 value: 18.073 - type: precision_at_5 value: 13.047 - type: recall_at_1 value: 24.997 - type: recall_at_10 value: 54.013 - type: recall_at_100 value: 75.29400000000001 - type: recall_at_1000 value: 91.611 - type: recall_at_3 value: 38.627 - type: recall_at_5 value: 45.019999999999996 - type: map_at_1 value: 23.194 - type: map_at_10 value: 30.076000000000004 - type: map_at_100 value: 31.0 - type: map_at_1000 value: 31.125999999999998 - type: map_at_3 value: 28.137 - type: map_at_5 value: 29.206 - type: mrr_at_1 value: 28.535 - type: mrr_at_10 value: 34.833999999999996 - type: mrr_at_100 value: 35.504999999999995 - type: mrr_at_1000 value: 35.57 - type: mrr_at_3 value: 33.089 - type: mrr_at_5 value: 34.115 - type: ndcg_at_1 value: 28.535 - type: ndcg_at_10 value: 34.285 - type: ndcg_at_100 value: 38.286 - type: ndcg_at_1000 value: 41.007 - type: ndcg_at_3 value: 31.395 - type: ndcg_at_5 value: 32.687 - type: precision_at_1 value: 28.535 - type: precision_at_10 value: 6.166 - type: precision_at_100 value: 1.042 - type: precision_at_1000 value: 0.155 - type: precision_at_3 value: 14.862 - type: precision_at_5 value: 10.331 - type: recall_at_1 value: 23.194 - type: recall_at_10 value: 41.648 - type: recall_at_100 value: 58.999 - type: recall_at_1000 value: 77.46300000000001 - type: recall_at_3 value: 32.931 - type: recall_at_5 value: 36.736999999999995 - type: map_at_1 value: 31.899 - type: map_at_10 value: 42.657000000000004 - type: map_at_100 value: 43.717 - type: map_at_1000 value: 43.79 - type: map_at_3 value: 39.635 - type: map_at_5 value: 41.538000000000004 - type: mrr_at_1 value: 36.864999999999995 - type: mrr_at_10 value: 46.137 - type: mrr_at_100 value: 46.946 - type: mrr_at_1000 value: 46.986 - type: mrr_at_3 value: 43.469 - type: mrr_at_5 value: 45.262 - type: ndcg_at_1 value: 36.864999999999995 - type: ndcg_at_10 value: 48.164 - type: ndcg_at_100 value: 52.769999999999996 - type: ndcg_at_1000 value: 54.393 - type: ndcg_at_3 value: 42.887 - type: ndcg_at_5 value: 45.871 - type: precision_at_1 value: 36.864999999999995 - type: precision_at_10 value: 7.843 - type: precision_at_100 value: 1.102 - type: precision_at_1000 value: 0.13 - type: precision_at_3 value: 19.352 - type: precision_at_5 value: 13.618 - type: recall_at_1 value: 31.899 - type: recall_at_10 value: 61.131 - type: recall_at_100 value: 81.504 - type: recall_at_1000 value: 93.146 - type: recall_at_3 value: 46.971000000000004 - type: recall_at_5 value: 54.42399999999999 - type: map_at_1 value: 17.621000000000002 - type: map_at_10 value: 23.621 - type: map_at_100 value: 24.636 - type: map_at_1000 value: 24.739 - type: map_at_3 value: 21.623 - type: map_at_5 value: 22.511 - type: mrr_at_1 value: 19.096 - type: mrr_at_10 value: 25.288 - type: mrr_at_100 value: 26.238 - type: mrr_at_1000 value: 26.314 - type: mrr_at_3 value: 23.202 - type: mrr_at_5 value: 24.213 - type: ndcg_at_1 value: 19.096 - type: ndcg_at_10 value: 27.529999999999998 - type: ndcg_at_100 value: 32.763 - type: ndcg_at_1000 value: 35.538 - type: ndcg_at_3 value: 23.362 - type: ndcg_at_5 value: 24.961 - type: precision_at_1 value: 19.096 - type: precision_at_10 value: 4.417999999999999 - type: precision_at_100 value: 0.739 - type: precision_at_1000 value: 0.10300000000000001 - type: precision_at_3 value: 9.981 - type: precision_at_5 value: 6.959999999999999 - type: recall_at_1 value: 17.621000000000002 - type: recall_at_10 value: 38.079 - type: recall_at_100 value: 62.499 - type: recall_at_1000 value: 83.783 - type: recall_at_3 value: 26.687 - type: recall_at_5 value: 30.459000000000003 - type: map_at_1 value: 11.019 - type: map_at_10 value: 15.869 - type: map_at_100 value: 17.078 - type: map_at_1000 value: 17.205000000000002 - type: map_at_3 value: 13.794 - type: map_at_5 value: 14.814 - type: mrr_at_1 value: 13.930000000000001 - type: mrr_at_10 value: 19.172 - type: mrr_at_100 value: 20.325 - type: mrr_at_1000 value: 20.415 - type: mrr_at_3 value: 17.122999999999998 - type: mrr_at_5 value: 18.124000000000002 - type: ndcg_at_1 value: 13.930000000000001 - type: ndcg_at_10 value: 19.646 - type: ndcg_at_100 value: 25.684 - type: ndcg_at_1000 value: 29.14 - type: ndcg_at_3 value: 15.614 - type: ndcg_at_5 value: 17.247 - type: precision_at_1 value: 13.930000000000001 - type: precision_at_10 value: 3.868 - type: precision_at_100 value: 0.8 - type: precision_at_1000 value: 0.125 - type: precision_at_3 value: 7.420999999999999 - type: precision_at_5 value: 5.672 - type: recall_at_1 value: 11.019 - type: recall_at_10 value: 28.116000000000003 - type: recall_at_100 value: 54.794 - type: recall_at_1000 value: 79.838 - type: recall_at_3 value: 17.124 - type: recall_at_5 value: 21.086 - type: map_at_1 value: 24.791 - type: map_at_10 value: 33.442 - type: map_at_100 value: 34.719 - type: map_at_1000 value: 34.849000000000004 - type: map_at_3 value: 30.885 - type: map_at_5 value: 32.245000000000005 - type: mrr_at_1 value: 30.606 - type: mrr_at_10 value: 38.922000000000004 - type: mrr_at_100 value: 39.822 - type: mrr_at_1000 value: 39.881 - type: mrr_at_3 value: 36.622 - type: mrr_at_5 value: 37.907000000000004 - type: ndcg_at_1 value: 30.606 - type: ndcg_at_10 value: 38.867000000000004 - type: ndcg_at_100 value: 44.364 - type: ndcg_at_1000 value: 47.073 - type: ndcg_at_3 value: 34.63 - type: ndcg_at_5 value: 36.479 - type: precision_at_1 value: 30.606 - type: precision_at_10 value: 7.0360000000000005 - type: precision_at_100 value: 1.174 - type: precision_at_1000 value: 0.16 - type: precision_at_3 value: 16.522000000000002 - type: precision_at_5 value: 11.588 - type: recall_at_1 value: 24.791 - type: recall_at_10 value: 49.736000000000004 - type: recall_at_100 value: 72.67099999999999 - type: recall_at_1000 value: 91.29599999999999 - type: recall_at_3 value: 37.345 - type: recall_at_5 value: 42.400999999999996 - type: map_at_1 value: 20.669999999999998 - type: map_at_10 value: 28.605000000000004 - type: map_at_100 value: 29.769000000000002 - type: map_at_1000 value: 29.881999999999998 - type: map_at_3 value: 25.886 - type: map_at_5 value: 27.317999999999998 - type: mrr_at_1 value: 25.457 - type: mrr_at_10 value: 33.423 - type: mrr_at_100 value: 34.269 - type: mrr_at_1000 value: 34.336 - type: mrr_at_3 value: 30.974 - type: mrr_at_5 value: 32.23 - type: ndcg_at_1 value: 25.457 - type: ndcg_at_10 value: 33.785 - type: ndcg_at_100 value: 39.145 - type: ndcg_at_1000 value: 41.772 - type: ndcg_at_3 value: 29.014 - type: ndcg_at_5 value: 31.019999999999996 - type: precision_at_1 value: 25.457 - type: precision_at_10 value: 6.2330000000000005 - type: precision_at_100 value: 1.045 - type: precision_at_1000 value: 0.145 - type: precision_at_3 value: 13.813 - type: precision_at_5 value: 9.863 - type: recall_at_1 value: 20.669999999999998 - type: recall_at_10 value: 44.651 - type: recall_at_100 value: 68.037 - type: recall_at_1000 value: 86.282 - type: recall_at_3 value: 31.381999999999998 - type: recall_at_5 value: 36.778 - type: map_at_1 value: 19.796583333333338 - type: map_at_10 value: 26.900166666666664 - type: map_at_100 value: 27.956583333333334 - type: map_at_1000 value: 28.08083333333333 - type: map_at_3 value: 24.598416666666665 - type: map_at_5 value: 25.81791666666667 - type: mrr_at_1 value: 23.68591666666667 - type: mrr_at_10 value: 30.65558333333333 - type: mrr_at_100 value: 31.503583333333335 - type: mrr_at_1000 value: 31.576083333333333 - type: mrr_at_3 value: 28.50525 - type: mrr_at_5 value: 29.690666666666665 - type: ndcg_at_1 value: 23.68591666666667 - type: ndcg_at_10 value: 31.425000000000004 - type: ndcg_at_100 value: 36.34316666666666 - type: ndcg_at_1000 value: 39.164249999999996 - type: ndcg_at_3 value: 27.330083333333338 - type: ndcg_at_5 value: 29.14408333333333 - type: precision_at_1 value: 23.68591666666667 - type: precision_at_10 value: 5.5862500000000015 - type: precision_at_100 value: 0.9571666666666666 - type: precision_at_1000 value: 0.13866666666666666 - type: precision_at_3 value: 12.663499999999999 - type: precision_at_5 value: 9.035333333333332 - type: recall_at_1 value: 19.796583333333338 - type: recall_at_10 value: 41.289416666666675 - type: recall_at_100 value: 63.251250000000006 - type: recall_at_1000 value: 83.4515 - type: recall_at_3 value: 29.727916666666665 - type: recall_at_5 value: 34.45824999999999 - type: map_at_1 value: 16.121 - type: map_at_10 value: 22.104 - type: map_at_100 value: 23.003 - type: map_at_1000 value: 23.108 - type: map_at_3 value: 20.233 - type: map_at_5 value: 21.186 - type: mrr_at_1 value: 18.865000000000002 - type: mrr_at_10 value: 24.951 - type: mrr_at_100 value: 25.779000000000003 - type: mrr_at_1000 value: 25.863999999999997 - type: mrr_at_3 value: 23.083000000000002 - type: mrr_at_5 value: 24.049 - type: ndcg_at_1 value: 18.865000000000002 - type: ndcg_at_10 value: 26.031 - type: ndcg_at_100 value: 30.589 - type: ndcg_at_1000 value: 33.565 - type: ndcg_at_3 value: 22.369 - type: ndcg_at_5 value: 23.932000000000002 - type: precision_at_1 value: 18.865000000000002 - type: precision_at_10 value: 4.324999999999999 - type: precision_at_100 value: 0.722 - type: precision_at_1000 value: 0.104 - type: precision_at_3 value: 10.072000000000001 - type: precision_at_5 value: 7.086 - type: recall_at_1 value: 16.121 - type: recall_at_10 value: 35.577 - type: recall_at_100 value: 56.298 - type: recall_at_1000 value: 79.089 - type: recall_at_3 value: 25.239 - type: recall_at_5 value: 29.242 - type: map_at_1 value: 10.968 - type: map_at_10 value: 15.639 - type: map_at_100 value: 16.459 - type: map_at_1000 value: 16.584 - type: map_at_3 value: 14.127 - type: map_at_5 value: 14.911 - type: mrr_at_1 value: 13.73 - type: mrr_at_10 value: 18.822 - type: mrr_at_100 value: 19.592000000000002 - type: mrr_at_1000 value: 19.683999999999997 - type: mrr_at_3 value: 17.223 - type: mrr_at_5 value: 18.082 - type: ndcg_at_1 value: 13.73 - type: ndcg_at_10 value: 18.881999999999998 - type: ndcg_at_100 value: 23.182 - type: ndcg_at_1000 value: 26.479000000000003 - type: ndcg_at_3 value: 16.067999999999998 - type: ndcg_at_5 value: 17.265 - type: precision_at_1 value: 13.73 - type: precision_at_10 value: 3.544 - type: precision_at_100 value: 0.679 - type: precision_at_1000 value: 0.11199999999999999 - type: precision_at_3 value: 7.674 - type: precision_at_5 value: 5.561 - type: recall_at_1 value: 10.968 - type: recall_at_10 value: 25.596000000000004 - type: recall_at_100 value: 45.411 - type: recall_at_1000 value: 69.555 - type: recall_at_3 value: 17.582 - type: recall_at_5 value: 20.785 - type: map_at_1 value: 20.886 - type: map_at_10 value: 27.029999999999998 - type: map_at_100 value: 27.968 - type: map_at_1000 value: 28.108 - type: map_at_3 value: 25.001 - type: map_at_5 value: 26.185000000000002 - type: mrr_at_1 value: 24.067 - type: mrr_at_10 value: 30.756 - type: mrr_at_100 value: 31.593 - type: mrr_at_1000 value: 31.685999999999996 - type: mrr_at_3 value: 28.793999999999997 - type: mrr_at_5 value: 29.997 - type: ndcg_at_1 value: 24.067 - type: ndcg_at_10 value: 31.095 - type: ndcg_at_100 value: 35.893 - type: ndcg_at_1000 value: 39.158 - type: ndcg_at_3 value: 27.321 - type: ndcg_at_5 value: 29.247 - type: precision_at_1 value: 24.067 - type: precision_at_10 value: 5.103 - type: precision_at_100 value: 0.8460000000000001 - type: precision_at_1000 value: 0.125 - type: precision_at_3 value: 12.065 - type: precision_at_5 value: 8.601 - type: recall_at_1 value: 20.886 - type: recall_at_10 value: 39.797 - type: recall_at_100 value: 61.399 - type: recall_at_1000 value: 84.555 - type: recall_at_3 value: 29.721999999999998 - type: recall_at_5 value: 34.455999999999996 - type: map_at_1 value: 21.394 - type: map_at_10 value: 28.303 - type: map_at_100 value: 29.726000000000003 - type: map_at_1000 value: 29.955 - type: map_at_3 value: 25.705 - type: map_at_5 value: 26.989 - type: mrr_at_1 value: 25.691999999999997 - type: mrr_at_10 value: 32.495000000000005 - type: mrr_at_100 value: 33.461999999999996 - type: mrr_at_1000 value: 33.534000000000006 - type: mrr_at_3 value: 30.137999999999998 - type: mrr_at_5 value: 31.383 - type: ndcg_at_1 value: 25.691999999999997 - type: ndcg_at_10 value: 33.300000000000004 - type: ndcg_at_100 value: 39.062000000000005 - type: ndcg_at_1000 value: 42.176 - type: ndcg_at_3 value: 28.859 - type: ndcg_at_5 value: 30.805 - type: precision_at_1 value: 25.691999999999997 - type: precision_at_10 value: 6.383 - type: precision_at_100 value: 1.387 - type: precision_at_1000 value: 0.22899999999999998 - type: precision_at_3 value: 13.439 - type: precision_at_5 value: 9.959999999999999 - type: recall_at_1 value: 21.394 - type: recall_at_10 value: 42.853 - type: recall_at_100 value: 69.284 - type: recall_at_1000 value: 89.646 - type: recall_at_3 value: 29.786 - type: recall_at_5 value: 34.797 - type: map_at_1 value: 13.999 - type: map_at_10 value: 19.979 - type: map_at_100 value: 20.682000000000002 - type: map_at_1000 value: 20.775 - type: map_at_3 value: 18.072 - type: map_at_5 value: 19.028 - type: mrr_at_1 value: 15.342 - type: mrr_at_10 value: 21.611 - type: mrr_at_100 value: 22.298000000000002 - type: mrr_at_1000 value: 22.375 - type: mrr_at_3 value: 19.624 - type: mrr_at_5 value: 20.659 - type: ndcg_at_1 value: 15.342 - type: ndcg_at_10 value: 23.809 - type: ndcg_at_100 value: 27.685 - type: ndcg_at_1000 value: 30.542 - type: ndcg_at_3 value: 19.842000000000002 - type: ndcg_at_5 value: 21.490000000000002 - type: precision_at_1 value: 15.342 - type: precision_at_10 value: 3.9190000000000005 - type: precision_at_100 value: 0.627 - type: precision_at_1000 value: 0.093 - type: precision_at_3 value: 8.688 - type: precision_at_5 value: 6.1370000000000005 - type: recall_at_1 value: 13.999 - type: recall_at_10 value: 34.276 - type: recall_at_100 value: 52.825 - type: recall_at_1000 value: 75.154 - type: recall_at_3 value: 23.339 - type: recall_at_5 value: 27.314 - task: type: Retrieval dataset: name: MTEB ClimateFEVER type: climate-fever config: default split: test revision: None metrics: - type: map_at_1 value: 8.27 - type: map_at_10 value: 14.161999999999999 - type: map_at_100 value: 15.775 - type: map_at_1000 value: 15.947 - type: map_at_3 value: 11.701 - type: map_at_5 value: 12.952 - type: mrr_at_1 value: 18.632 - type: mrr_at_10 value: 28.871000000000002 - type: mrr_at_100 value: 29.985 - type: mrr_at_1000 value: 30.037999999999997 - type: mrr_at_3 value: 25.451 - type: mrr_at_5 value: 27.366 - type: ndcg_at_1 value: 18.632 - type: ndcg_at_10 value: 21.017 - type: ndcg_at_100 value: 28.022999999999996 - type: ndcg_at_1000 value: 31.518 - type: ndcg_at_3 value: 16.611 - type: ndcg_at_5 value: 18.149 - type: precision_at_1 value: 18.632 - type: precision_at_10 value: 6.736000000000001 - type: precision_at_100 value: 1.414 - type: precision_at_1000 value: 0.20600000000000002 - type: precision_at_3 value: 12.313 - type: precision_at_5 value: 9.759 - type: recall_at_1 value: 8.27 - type: recall_at_10 value: 26.218999999999998 - type: recall_at_100 value: 50.77 - type: recall_at_1000 value: 70.8 - type: recall_at_3 value: 15.526000000000002 - type: recall_at_5 value: 19.724 - task: type: Retrieval dataset: name: MTEB CmedqaRetrieval type: C-MTEB/CmedqaRetrieval config: default split: dev revision: None metrics: - type: map_at_1 value: 10.598 - type: map_at_10 value: 15.869 - type: map_at_100 value: 17.081 - type: map_at_1000 value: 17.267 - type: map_at_3 value: 13.877 - type: map_at_5 value: 14.884 - type: mrr_at_1 value: 17.279 - type: mrr_at_10 value: 22.554 - type: mrr_at_100 value: 23.521 - type: mrr_at_1000 value: 23.619 - type: mrr_at_3 value: 20.647 - type: mrr_at_5 value: 21.625 - type: ndcg_at_1 value: 17.279 - type: ndcg_at_10 value: 20.029 - type: ndcg_at_100 value: 25.968000000000004 - type: ndcg_at_1000 value: 30.158 - type: ndcg_at_3 value: 16.947000000000003 - type: ndcg_at_5 value: 18.069 - type: precision_at_1 value: 17.279 - type: precision_at_10 value: 4.704 - type: precision_at_100 value: 0.9690000000000001 - type: precision_at_1000 value: 0.152 - type: precision_at_3 value: 9.777 - type: precision_at_5 value: 7.207 - type: recall_at_1 value: 10.598 - type: recall_at_10 value: 26.034000000000002 - type: recall_at_100 value: 51.385999999999996 - type: recall_at_1000 value: 80.49 - type: recall_at_3 value: 16.834 - type: recall_at_5 value: 20.317 - task: type: PairClassification dataset: name: MTEB Cmnli type: C-MTEB/CMNLI config: default split: validation revision: None metrics: - type: cos_sim_accuracy value: 70.40288634996993 - type: cos_sim_ap value: 78.43387766087626 - type: cos_sim_f1 value: 73.09982840415867 - type: cos_sim_precision value: 64.31616341030195 - type: cos_sim_recall value: 84.66214636427402 - type: dot_accuracy value: 65.52014431749849 - type: dot_ap value: 70.89507344960353 - type: dot_f1 value: 70.7030509759333 - type: dot_precision value: 59.43922255854708 - type: dot_recall value: 87.2340425531915 - type: euclidean_accuracy value: 69.84966927239927 - type: euclidean_ap value: 78.08825177727368 - type: euclidean_f1 value: 72.68394399761692 - type: euclidean_precision value: 63.16879530548844 - type: euclidean_recall value: 85.57400046761748 - type: manhattan_accuracy value: 69.9579073962718 - type: manhattan_ap value: 78.38355697667261 - type: manhattan_f1 value: 73.06507508663844 - type: manhattan_precision value: 62.10112911143839 - type: manhattan_recall value: 88.73041851765257 - type: max_accuracy value: 70.40288634996993 - type: max_ap value: 78.43387766087626 - type: max_f1 value: 73.09982840415867 - task: type: Retrieval dataset: name: MTEB CovidRetrieval type: C-MTEB/CovidRetrieval config: default split: dev revision: None metrics: - type: map_at_1 value: 23.973 - type: map_at_10 value: 30.074 - type: map_at_100 value: 31.05 - type: map_at_1000 value: 31.147000000000002 - type: map_at_3 value: 27.977 - type: map_at_5 value: 29.247 - type: mrr_at_1 value: 24.025 - type: mrr_at_10 value: 30.093999999999998 - type: mrr_at_100 value: 31.068 - type: mrr_at_1000 value: 31.165 - type: mrr_at_3 value: 27.994000000000003 - type: mrr_at_5 value: 29.243000000000002 - type: ndcg_at_1 value: 24.025 - type: ndcg_at_10 value: 33.566 - type: ndcg_at_100 value: 38.818999999999996 - type: ndcg_at_1000 value: 41.477000000000004 - type: ndcg_at_3 value: 29.293000000000003 - type: ndcg_at_5 value: 31.564999999999998 - type: precision_at_1 value: 24.025 - type: precision_at_10 value: 4.489 - type: precision_at_100 value: 0.709 - type: precision_at_1000 value: 0.092 - type: precision_at_3 value: 11.064 - type: precision_at_5 value: 7.734000000000001 - type: recall_at_1 value: 23.973 - type: recall_at_10 value: 44.731 - type: recall_at_100 value: 70.52199999999999 - type: recall_at_1000 value: 91.491 - type: recall_at_3 value: 33.087 - type: recall_at_5 value: 38.567 - task: type: Retrieval dataset: name: MTEB DBPedia type: dbpedia-entity config: default split: test revision: None metrics: - type: map_at_1 value: 6.950000000000001 - type: map_at_10 value: 13.236999999999998 - type: map_at_100 value: 16.137 - type: map_at_1000 value: 16.785 - type: map_at_3 value: 10.378 - type: map_at_5 value: 11.62 - type: mrr_at_1 value: 54.0 - type: mrr_at_10 value: 61.861 - type: mrr_at_100 value: 62.436 - type: mrr_at_1000 value: 62.456 - type: mrr_at_3 value: 60.458 - type: mrr_at_5 value: 61.208 - type: ndcg_at_1 value: 43.75 - type: ndcg_at_10 value: 28.224 - type: ndcg_at_100 value: 29.244999999999997 - type: ndcg_at_1000 value: 34.410000000000004 - type: ndcg_at_3 value: 33.955 - type: ndcg_at_5 value: 30.597 - type: precision_at_1 value: 54.0 - type: precision_at_10 value: 20.825 - type: precision_at_100 value: 5.462 - type: precision_at_1000 value: 1.1320000000000001 - type: precision_at_3 value: 37.0 - type: precision_at_5 value: 28.849999999999998 - type: recall_at_1 value: 6.950000000000001 - type: recall_at_10 value: 17.159 - type: recall_at_100 value: 31.657999999999998 - type: recall_at_1000 value: 49.155 - type: recall_at_3 value: 11.393 - type: recall_at_5 value: 13.568 - task: type: Retrieval dataset: name: MTEB DuRetrieval type: C-MTEB/DuRetrieval config: default split: dev revision: None metrics: - type: map_at_1 value: 16.333000000000002 - type: map_at_10 value: 44.080999999999996 - type: map_at_100 value: 47.958 - type: map_at_1000 value: 48.183 - type: map_at_3 value: 31.468 - type: map_at_5 value: 38.213 - type: mrr_at_1 value: 63.0 - type: mrr_at_10 value: 72.006 - type: mrr_at_100 value: 72.299 - type: mrr_at_1000 value: 72.313 - type: mrr_at_3 value: 70.375 - type: mrr_at_5 value: 71.33 - type: ndcg_at_1 value: 63.0 - type: ndcg_at_10 value: 56.044000000000004 - type: ndcg_at_100 value: 63.629999999999995 - type: ndcg_at_1000 value: 66.156 - type: ndcg_at_3 value: 55.85 - type: ndcg_at_5 value: 53.559 - type: precision_at_1 value: 63.0 - type: precision_at_10 value: 27.279999999999998 - type: precision_at_100 value: 4.005 - type: precision_at_1000 value: 0.462 - type: precision_at_3 value: 49.633 - type: precision_at_5 value: 40.6 - type: recall_at_1 value: 16.333000000000002 - type: recall_at_10 value: 57.152 - type: recall_at_100 value: 80.231 - type: recall_at_1000 value: 92.95400000000001 - type: recall_at_3 value: 34.793 - type: recall_at_5 value: 44.989000000000004 - task: type: Retrieval dataset: name: MTEB EcomRetrieval type: C-MTEB/EcomRetrieval config: default split: dev revision: None metrics: - type: map_at_1 value: 33.7 - type: map_at_10 value: 42.327999999999996 - type: map_at_100 value: 43.230000000000004 - type: map_at_1000 value: 43.274 - type: map_at_3 value: 39.883 - type: map_at_5 value: 41.178 - type: mrr_at_1 value: 33.7 - type: mrr_at_10 value: 42.327999999999996 - type: mrr_at_100 value: 43.230000000000004 - type: mrr_at_1000 value: 43.274 - type: mrr_at_3 value: 39.883 - type: mrr_at_5 value: 41.178 - type: ndcg_at_1 value: 33.7 - type: ndcg_at_10 value: 46.996 - type: ndcg_at_100 value: 51.629000000000005 - type: ndcg_at_1000 value: 52.823 - type: ndcg_at_3 value: 41.891 - type: ndcg_at_5 value: 44.232 - type: precision_at_1 value: 33.7 - type: precision_at_10 value: 6.1899999999999995 - type: precision_at_100 value: 0.8410000000000001 - type: precision_at_1000 value: 0.094 - type: precision_at_3 value: 15.9 - type: precision_at_5 value: 10.68 - type: recall_at_1 value: 33.7 - type: recall_at_10 value: 61.9 - type: recall_at_100 value: 84.1 - type: recall_at_1000 value: 93.60000000000001 - type: recall_at_3 value: 47.699999999999996 - type: recall_at_5 value: 53.400000000000006 - task: type: Classification dataset: name: MTEB EmotionClassification type: mteb/emotion config: default split: test revision: 4f58c6b202a23cf9a4da393831edf4f9183cad37 metrics: - type: accuracy value: 44.76500000000001 - type: f1 value: 40.46330006682868 - task: type: Retrieval dataset: name: MTEB FEVER type: fever config: default split: test revision: None metrics: - type: map_at_1 value: 45.078 - type: map_at_10 value: 55.443 - type: map_at_100 value: 56.03900000000001 - type: map_at_1000 value: 56.067 - type: map_at_3 value: 53.174 - type: map_at_5 value: 54.510999999999996 - type: mrr_at_1 value: 48.575 - type: mrr_at_10 value: 59.194 - type: mrr_at_100 value: 59.760999999999996 - type: mrr_at_1000 value: 59.784000000000006 - type: mrr_at_3 value: 56.896 - type: mrr_at_5 value: 58.282000000000004 - type: ndcg_at_1 value: 48.575 - type: ndcg_at_10 value: 61.096 - type: ndcg_at_100 value: 63.94800000000001 - type: ndcg_at_1000 value: 64.68199999999999 - type: ndcg_at_3 value: 56.58 - type: ndcg_at_5 value: 58.928000000000004 - type: precision_at_1 value: 48.575 - type: precision_at_10 value: 8.18 - type: precision_at_100 value: 0.968 - type: precision_at_1000 value: 0.104 - type: precision_at_3 value: 22.662 - type: precision_at_5 value: 14.881 - type: recall_at_1 value: 45.078 - type: recall_at_10 value: 75.057 - type: recall_at_100 value: 88.05199999999999 - type: recall_at_1000 value: 93.58999999999999 - type: recall_at_3 value: 62.77700000000001 - type: recall_at_5 value: 68.50699999999999 - task: type: Retrieval dataset: name: MTEB FiQA2018 type: fiqa config: default split: test revision: None metrics: - type: map_at_1 value: 11.097999999999999 - type: map_at_10 value: 18.288 - type: map_at_100 value: 19.903000000000002 - type: map_at_1000 value: 20.108 - type: map_at_3 value: 15.576 - type: map_at_5 value: 16.997999999999998 - type: mrr_at_1 value: 23.302 - type: mrr_at_10 value: 30.978 - type: mrr_at_100 value: 32.072 - type: mrr_at_1000 value: 32.15 - type: mrr_at_3 value: 28.549000000000003 - type: mrr_at_5 value: 29.931 - type: ndcg_at_1 value: 23.302 - type: ndcg_at_10 value: 24.488 - type: ndcg_at_100 value: 31.052999999999997 - type: ndcg_at_1000 value: 35.124 - type: ndcg_at_3 value: 21.215999999999998 - type: ndcg_at_5 value: 22.314999999999998 - type: precision_at_1 value: 23.302 - type: precision_at_10 value: 7.13 - type: precision_at_100 value: 1.3559999999999999 - type: precision_at_1000 value: 0.20600000000000002 - type: precision_at_3 value: 14.198 - type: precision_at_5 value: 10.895000000000001 - type: recall_at_1 value: 11.097999999999999 - type: recall_at_10 value: 30.352 - type: recall_at_100 value: 54.937999999999995 - type: recall_at_1000 value: 79.586 - type: recall_at_3 value: 19.486 - type: recall_at_5 value: 23.860999999999997 - task: type: Retrieval dataset: name: MTEB HotpotQA type: hotpotqa config: default split: test revision: None metrics: - type: map_at_1 value: 28.325 - type: map_at_10 value: 37.305 - type: map_at_100 value: 38.0 - type: map_at_1000 value: 38.065 - type: map_at_3 value: 35.219 - type: map_at_5 value: 36.466 - type: mrr_at_1 value: 56.650999999999996 - type: mrr_at_10 value: 63.574 - type: mrr_at_100 value: 63.966 - type: mrr_at_1000 value: 63.992000000000004 - type: mrr_at_3 value: 62.107 - type: mrr_at_5 value: 62.976 - type: ndcg_at_1 value: 56.650999999999996 - type: ndcg_at_10 value: 46.046 - type: ndcg_at_100 value: 48.916 - type: ndcg_at_1000 value: 50.410999999999994 - type: ndcg_at_3 value: 42.516999999999996 - type: ndcg_at_5 value: 44.374 - type: precision_at_1 value: 56.650999999999996 - type: precision_at_10 value: 9.392 - type: precision_at_100 value: 1.166 - type: precision_at_1000 value: 0.13699999999999998 - type: precision_at_3 value: 26.068 - type: precision_at_5 value: 17.11 - type: recall_at_1 value: 28.325 - type: recall_at_10 value: 46.961999999999996 - type: recall_at_100 value: 58.318999999999996 - type: recall_at_1000 value: 68.298 - type: recall_at_3 value: 39.102 - type: recall_at_5 value: 42.775 - task: type: Classification dataset: name: MTEB IFlyTek type: C-MTEB/IFlyTek-classification config: default split: validation revision: None metrics: - type: accuracy value: 40.461716044632546 - type: f1 value: 33.890745966734315 - task: type: Classification dataset: name: MTEB ImdbClassification type: mteb/imdb config: default split: test revision: 3d86128a09e091d6018b6d26cad27f2739fc2db7 metrics: - type: accuracy value: 72.21000000000001 - type: ap value: 66.59963731769069 - type: f1 value: 71.97616824840041 - task: type: Classification dataset: name: MTEB JDReview type: C-MTEB/JDReview-classification config: default split: test revision: None metrics: - type: accuracy value: 78.25515947467167 - type: ap value: 38.265118237185064 - type: f1 value: 70.73962826410575 - task: type: STS dataset: name: MTEB LCQMC type: C-MTEB/LCQMC config: default split: test revision: None metrics: - type: cos_sim_pearson value: 63.98362797180168 - type: cos_sim_spearman value: 71.97575564053473 - type: euclidean_pearson value: 70.56052438394708 - type: euclidean_spearman value: 72.48267176371337 - type: manhattan_pearson value: 70.7156268448442 - type: manhattan_spearman value: 72.61065396802094 - task: type: Retrieval dataset: name: MTEB MMarcoRetrieval type: C-MTEB/MMarcoRetrieval config: default split: dev revision: None metrics: - type: map_at_1 value: 55.775 - type: map_at_10 value: 65.074 - type: map_at_100 value: 65.596 - type: map_at_1000 value: 65.618 - type: map_at_3 value: 62.92 - type: map_at_5 value: 64.277 - type: mrr_at_1 value: 57.708000000000006 - type: mrr_at_10 value: 65.824 - type: mrr_at_100 value: 66.286 - type: mrr_at_1000 value: 66.306 - type: mrr_at_3 value: 63.871 - type: mrr_at_5 value: 65.093 - type: ndcg_at_1 value: 57.708000000000006 - type: ndcg_at_10 value: 69.309 - type: ndcg_at_100 value: 71.723 - type: ndcg_at_1000 value: 72.313 - type: ndcg_at_3 value: 65.134 - type: ndcg_at_5 value: 67.476 - type: precision_at_1 value: 57.708000000000006 - type: precision_at_10 value: 8.668 - type: precision_at_100 value: 0.989 - type: precision_at_1000 value: 0.104 - type: precision_at_3 value: 24.837999999999997 - type: precision_at_5 value: 16.128999999999998 - type: recall_at_1 value: 55.775 - type: recall_at_10 value: 81.702 - type: recall_at_100 value: 92.785 - type: recall_at_1000 value: 97.425 - type: recall_at_3 value: 70.587 - type: recall_at_5 value: 76.199 - task: type: Retrieval dataset: name: MTEB MSMARCO type: msmarco config: default split: dev revision: None metrics: - type: map_at_1 value: 17.771 - type: map_at_10 value: 28.16 - type: map_at_100 value: 29.363 - type: map_at_1000 value: 29.431 - type: map_at_3 value: 24.767 - type: map_at_5 value: 26.706999999999997 - type: mrr_at_1 value: 18.252 - type: mrr_at_10 value: 28.666000000000004 - type: mrr_at_100 value: 29.837000000000003 - type: mrr_at_1000 value: 29.898999999999997 - type: mrr_at_3 value: 25.308000000000003 - type: mrr_at_5 value: 27.226 - type: ndcg_at_1 value: 18.252 - type: ndcg_at_10 value: 34.176 - type: ndcg_at_100 value: 40.138 - type: ndcg_at_1000 value: 41.923 - type: ndcg_at_3 value: 27.214 - type: ndcg_at_5 value: 30.695 - type: precision_at_1 value: 18.252 - type: precision_at_10 value: 5.503 - type: precision_at_100 value: 0.8500000000000001 - type: precision_at_1000 value: 0.1 - type: precision_at_3 value: 11.667 - type: precision_at_5 value: 8.754000000000001 - type: recall_at_1 value: 17.771 - type: recall_at_10 value: 52.781 - type: recall_at_100 value: 80.638 - type: recall_at_1000 value: 94.46 - type: recall_at_3 value: 33.767 - type: recall_at_5 value: 42.172 - task: type: Classification dataset: name: MTEB MTOPDomainClassification (en) type: mteb/mtop_domain config: en split: test revision: d80d48c1eb48d3562165c59d59d0034df9fff0bf metrics: - type: accuracy value: 89.93388052895577 - type: f1 value: 89.55553145791954 - task: type: Classification dataset: name: MTEB MTOPDomainClassification (de) type: mteb/mtop_domain config: de split: test revision: d80d48c1eb48d3562165c59d59d0034df9fff0bf metrics: - type: accuracy value: 68.42490842490842 - type: f1 value: 67.01398674117826 - task: type: Classification dataset: name: MTEB MTOPDomainClassification (es) type: mteb/mtop_domain config: es split: test revision: d80d48c1eb48d3562165c59d59d0034df9fff0bf metrics: - type: accuracy value: 88.2121414276184 - type: f1 value: 87.61981627763988 - task: type: Classification dataset: name: MTEB MTOPDomainClassification (fr) type: mteb/mtop_domain config: fr split: test revision: d80d48c1eb48d3562165c59d59d0034df9fff0bf metrics: - type: accuracy value: 85.49013466958974 - type: f1 value: 85.09758510104221 - task: type: Classification dataset: name: MTEB MTOPDomainClassification (hi) type: mteb/mtop_domain config: hi split: test revision: d80d48c1eb48d3562165c59d59d0034df9fff0bf metrics: - type: accuracy value: 84.22732162065257 - type: f1 value: 83.24580378090367 - task: type: Classification dataset: name: MTEB MTOPDomainClassification (th) type: mteb/mtop_domain config: th split: test revision: d80d48c1eb48d3562165c59d59d0034df9fff0bf metrics: - type: accuracy value: 53.171790235081374 - type: f1 value: 51.93028909966765 - task: type: Classification dataset: name: MTEB MTOPIntentClassification (en) type: mteb/mtop_intent config: en split: test revision: ae001d0e6b1228650b7bd1c2c65fb50ad11a8aba metrics: - type: accuracy value: 66.5640674874601 - type: f1 value: 49.856876973153966 - task: type: Classification dataset: name: MTEB MTOPIntentClassification (de) type: mteb/mtop_intent config: de split: test revision: ae001d0e6b1228650b7bd1c2c65fb50ad11a8aba metrics: - type: accuracy value: 49.171597633136095 - type: f1 value: 32.166022205347545 - task: type: Classification dataset: name: MTEB MTOPIntentClassification (es) type: mteb/mtop_intent config: es split: test revision: ae001d0e6b1228650b7bd1c2c65fb50ad11a8aba metrics: - type: accuracy value: 65.71714476317545 - type: f1 value: 45.748971341625136 - task: type: Classification dataset: name: MTEB MTOPIntentClassification (fr) type: mteb/mtop_intent config: fr split: test revision: ae001d0e6b1228650b7bd1c2c65fb50ad11a8aba metrics: - type: accuracy value: 62.65267773253993 - type: f1 value: 45.904472624086026 - task: type: Classification dataset: name: MTEB MTOPIntentClassification (hi) type: mteb/mtop_intent config: hi split: test revision: ae001d0e6b1228650b7bd1c2c65fb50ad11a8aba metrics: - type: accuracy value: 61.8752240946576 - type: f1 value: 40.7359613185448 - task: type: Classification dataset: name: MTEB MTOPIntentClassification (th) type: mteb/mtop_intent config: th split: test revision: ae001d0e6b1228650b7bd1c2c65fb50ad11a8aba metrics: - type: accuracy value: 41.67088607594936 - type: f1 value: 28.12210726419673 - task: type: Classification dataset: name: MTEB MassiveIntentClassification (af) type: mteb/amazon_massive_intent config: af split: test revision: 31efe3c427b0bae9c22cbb560b8f15491cc6bed7 metrics: - type: accuracy value: 43.29186281102892 - type: f1 value: 41.83461350696014 - task: type: Classification dataset: name: MTEB MassiveIntentClassification (am) type: mteb/amazon_massive_intent config: am split: test revision: 31efe3c427b0bae9c22cbb560b8f15491cc6bed7 metrics: - type: accuracy value: 23.214525891055814 - type: f1 value: 22.364131190189962 - task: type: Classification dataset: name: MTEB MassiveIntentClassification (ar) type: mteb/amazon_massive_intent config: ar split: test revision: 31efe3c427b0bae9c22cbb560b8f15491cc6bed7 metrics: - type: accuracy value: 53.38264963012777 - type: f1 value: 50.74546702709091 - task: type: Classification dataset: name: MTEB MassiveIntentClassification (az) type: mteb/amazon_massive_intent config: az split: test revision: 31efe3c427b0bae9c22cbb560b8f15491cc6bed7 metrics: - type: accuracy value: 39.55951580363147 - type: f1 value: 39.07769075741216 - task: type: Classification dataset: name: MTEB MassiveIntentClassification (bn) type: mteb/amazon_massive_intent config: bn split: test revision: 31efe3c427b0bae9c22cbb560b8f15491cc6bed7 metrics: - type: accuracy value: 56.73839946200403 - type: f1 value: 54.36728741542025 - task: type: Classification dataset: name: MTEB MassiveIntentClassification (cy) type: mteb/amazon_massive_intent config: cy split: test revision: 31efe3c427b0bae9c22cbb560b8f15491cc6bed7 metrics: - type: accuracy value: 39.99663752521857 - type: f1 value: 38.709817953652596 - task: type: Classification dataset: name: MTEB MassiveIntentClassification (da) type: mteb/amazon_massive_intent config: da split: test revision: 31efe3c427b0bae9c22cbb560b8f15491cc6bed7 metrics: - type: accuracy value: 46.933422999327504 - type: f1 value: 45.32022679895763 - task: type: Classification dataset: name: MTEB MassiveIntentClassification (de) type: mteb/amazon_massive_intent config: de split: test revision: 31efe3c427b0bae9c22cbb560b8f15491cc6bed7 metrics: - type: accuracy value: 45.820443846671154 - type: f1 value: 42.853155158197886 - task: type: Classification dataset: name: MTEB MassiveIntentClassification (el) type: mteb/amazon_massive_intent config: el split: test revision: 31efe3c427b0bae9c22cbb560b8f15491cc6bed7 metrics: - type: accuracy value: 37.874915938130464 - type: f1 value: 35.9849010888881 - task: type: Classification dataset: name: MTEB MassiveIntentClassification (en) type: mteb/amazon_massive_intent config: en split: test revision: 31efe3c427b0bae9c22cbb560b8f15491cc6bed7 metrics: - type: accuracy value: 66.08944182918628 - type: f1 value: 64.5039080809391 - task: type: Classification dataset: name: MTEB MassiveIntentClassification (es) type: mteb/amazon_massive_intent config: es split: test revision: 31efe3c427b0bae9c22cbb560b8f15491cc6bed7 metrics: - type: accuracy value: 61.17350369872226 - type: f1 value: 60.0792530132073 - task: type: Classification dataset: name: MTEB MassiveIntentClassification (fa) type: mteb/amazon_massive_intent config: fa split: test revision: 31efe3c427b0bae9c22cbb560b8f15491cc6bed7 metrics: - type: accuracy value: 45.652320107599195 - type: f1 value: 44.28182554287625 - task: type: Classification dataset: name: MTEB MassiveIntentClassification (fi) type: mteb/amazon_massive_intent config: fi split: test revision: 31efe3c427b0bae9c22cbb560b8f15491cc6bed7 metrics: - type: accuracy value: 40.282447881640884 - type: f1 value: 38.79927524886836 - task: type: Classification dataset: name: MTEB MassiveIntentClassification (fr) type: mteb/amazon_massive_intent config: fr split: test revision: 31efe3c427b0bae9c22cbb560b8f15491cc6bed7 metrics: - type: accuracy value: 62.60591795561533 - type: f1 value: 61.01451309609411 - task: type: Classification dataset: name: MTEB MassiveIntentClassification (he) type: mteb/amazon_massive_intent config: he split: test revision: 31efe3c427b0bae9c22cbb560b8f15491cc6bed7 metrics: - type: accuracy value: 32.225958305312716 - type: f1 value: 30.903299940417906 - task: type: Classification dataset: name: MTEB MassiveIntentClassification (hi) type: mteb/amazon_massive_intent config: hi split: test revision: 31efe3c427b0bae9c22cbb560b8f15491cc6bed7 metrics: - type: accuracy value: 59.46200403496974 - type: f1 value: 57.34556231956785 - task: type: Classification dataset: name: MTEB MassiveIntentClassification (hu) type: mteb/amazon_massive_intent config: hu split: test revision: 31efe3c427b0bae9c22cbb560b8f15491cc6bed7 metrics: - type: accuracy value: 40.907868190988566 - type: f1 value: 39.74702259997524 - task: type: Classification dataset: name: MTEB MassiveIntentClassification (hy) type: mteb/amazon_massive_intent config: hy split: test revision: 31efe3c427b0bae9c22cbb560b8f15491cc6bed7 metrics: - type: accuracy value: 29.939475453934094 - type: f1 value: 28.462353413371353 - task: type: Classification dataset: name: MTEB MassiveIntentClassification (id) type: mteb/amazon_massive_intent config: id split: test revision: 31efe3c427b0bae9c22cbb560b8f15491cc6bed7 metrics: - type: accuracy value: 59.14256893073302 - type: f1 value: 57.24600767871435 - task: type: Classification dataset: name: MTEB MassiveIntentClassification (is) type: mteb/amazon_massive_intent config: is split: test revision: 31efe3c427b0bae9c22cbb560b8f15491cc6bed7 metrics: - type: accuracy value: 39.620040349697376 - type: f1 value: 38.414866180464735 - task: type: Classification dataset: name: MTEB MassiveIntentClassification (it) type: mteb/amazon_massive_intent config: it split: test revision: 31efe3c427b0bae9c22cbb560b8f15491cc6bed7 metrics: - type: accuracy value: 51.772024209818426 - type: f1 value: 51.05050942366993 - task: type: Classification dataset: name: MTEB MassiveIntentClassification (ja) type: mteb/amazon_massive_intent config: ja split: test revision: 31efe3c427b0bae9c22cbb560b8f15491cc6bed7 metrics: - type: accuracy value: 53.749159381304636 - type: f1 value: 52.04563008527909 - task: type: Classification dataset: name: MTEB MassiveIntentClassification (jv) type: mteb/amazon_massive_intent config: jv split: test revision: 31efe3c427b0bae9c22cbb560b8f15491cc6bed7 metrics: - type: accuracy value: 46.29455279085406 - type: f1 value: 43.84047527739209 - task: type: Classification dataset: name: MTEB MassiveIntentClassification (ka) type: mteb/amazon_massive_intent config: ka split: test revision: 31efe3c427b0bae9c22cbb560b8f15491cc6bed7 metrics: - type: accuracy value: 25.107599193006045 - type: f1 value: 24.58731463875415 - task: type: Classification dataset: name: MTEB MassiveIntentClassification (km) type: mteb/amazon_massive_intent config: km split: test revision: 31efe3c427b0bae9c22cbb560b8f15491cc6bed7 metrics: - type: accuracy value: 27.21923335574984 - type: f1 value: 25.964338481976796 - task: type: Classification dataset: name: MTEB MassiveIntentClassification (kn) type: mteb/amazon_massive_intent config: kn split: test revision: 31efe3c427b0bae9c22cbb560b8f15491cc6bed7 metrics: - type: accuracy value: 47.96906523201077 - type: f1 value: 45.32239408435578 - task: type: Classification dataset: name: MTEB MassiveIntentClassification (ko) type: mteb/amazon_massive_intent config: ko split: test revision: 31efe3c427b0bae9c22cbb560b8f15491cc6bed7 metrics: - type: accuracy value: 40.53799596503026 - type: f1 value: 39.15655510771227 - task: type: Classification dataset: name: MTEB MassiveIntentClassification (lv) type: mteb/amazon_massive_intent config: lv split: test revision: 31efe3c427b0bae9c22cbb560b8f15491cc6bed7 metrics: - type: accuracy value: 43.140551445864155 - type: f1 value: 42.12232733095163 - task: type: Classification dataset: name: MTEB MassiveIntentClassification (ml) type: mteb/amazon_massive_intent config: ml split: test revision: 31efe3c427b0bae9c22cbb560b8f15491cc6bed7 metrics: - type: accuracy value: 53.69199731002017 - type: f1 value: 50.67085509122796 - task: type: Classification dataset: name: MTEB MassiveIntentClassification (mn) type: mteb/amazon_massive_intent config: mn split: test revision: 31efe3c427b0bae9c22cbb560b8f15491cc6bed7 metrics: - type: accuracy value: 33.37256220578346 - type: f1 value: 33.39335560955231 - task: type: Classification dataset: name: MTEB MassiveIntentClassification (ms) type: mteb/amazon_massive_intent config: ms split: test revision: 31efe3c427b0bae9c22cbb560b8f15491cc6bed7 metrics: - type: accuracy value: 51.94014794889038 - type: f1 value: 50.6207021226521 - task: type: Classification dataset: name: MTEB MassiveIntentClassification (my) type: mteb/amazon_massive_intent config: my split: test revision: 31efe3c427b0bae9c22cbb560b8f15491cc6bed7 metrics: - type: accuracy value: 25.322797579018157 - type: f1 value: 23.94164121951907 - task: type: Classification dataset: name: MTEB MassiveIntentClassification (nb) type: mteb/amazon_massive_intent config: nb split: test revision: 31efe3c427b0bae9c22cbb560b8f15491cc6bed7 metrics: - type: accuracy value: 44.11903160726294 - type: f1 value: 43.016752983579536 - task: type: Classification dataset: name: MTEB MassiveIntentClassification (nl) type: mteb/amazon_massive_intent config: nl split: test revision: 31efe3c427b0bae9c22cbb560b8f15491cc6bed7 metrics: - type: accuracy value: 44.03496973772697 - type: f1 value: 42.322828283176754 - task: type: Classification dataset: name: MTEB MassiveIntentClassification (pl) type: mteb/amazon_massive_intent config: pl split: test revision: 31efe3c427b0bae9c22cbb560b8f15491cc6bed7 metrics: - type: accuracy value: 41.63080026899798 - type: f1 value: 39.58824644978166 - task: type: Classification dataset: name: MTEB MassiveIntentClassification (pt) type: mteb/amazon_massive_intent config: pt split: test revision: 31efe3c427b0bae9c22cbb560b8f15491cc6bed7 metrics: - type: accuracy value: 61.7350369872226 - type: f1 value: 59.956752206079386 - task: type: Classification dataset: name: MTEB MassiveIntentClassification (ro) type: mteb/amazon_massive_intent config: ro split: test revision: 31efe3c427b0bae9c22cbb560b8f15491cc6bed7 metrics: - type: accuracy value: 45.72629455279086 - type: f1 value: 44.731249269647826 - task: type: Classification dataset: name: MTEB MassiveIntentClassification (ru) type: mteb/amazon_massive_intent config: ru split: test revision: 31efe3c427b0bae9c22cbb560b8f15491cc6bed7 metrics: - type: accuracy value: 47.61264290517822 - type: f1 value: 45.5280995218491 - task: type: Classification dataset: name: MTEB MassiveIntentClassification (sl) type: mteb/amazon_massive_intent config: sl split: test revision: 31efe3c427b0bae9c22cbb560b8f15491cc6bed7 metrics: - type: accuracy value: 42.82784129119032 - type: f1 value: 41.37165985220223 - task: type: Classification dataset: name: MTEB MassiveIntentClassification (sq) type: mteb/amazon_massive_intent config: sq split: test revision: 31efe3c427b0bae9c22cbb560b8f15491cc6bed7 metrics: - type: accuracy value: 43.61466039004707 - type: f1 value: 43.164498227815535 - task: type: Classification dataset: name: MTEB MassiveIntentClassification (sv) type: mteb/amazon_massive_intent config: sv split: test revision: 31efe3c427b0bae9c22cbb560b8f15491cc6bed7 metrics: - type: accuracy value: 44.64021519838602 - type: f1 value: 43.04775030948548 - task: type: Classification dataset: name: MTEB MassiveIntentClassification (sw) type: mteb/amazon_massive_intent config: sw split: test revision: 31efe3c427b0bae9c22cbb560b8f15491cc6bed7 metrics: - type: accuracy value: 45.54808338937458 - type: f1 value: 44.011677633779975 - task: type: Classification dataset: name: MTEB MassiveIntentClassification (ta) type: mteb/amazon_massive_intent config: ta split: test revision: 31efe3c427b0bae9c22cbb560b8f15491cc6bed7 metrics: - type: accuracy value: 51.2441156691325 - type: f1 value: 48.73592932403811 - task: type: Classification dataset: name: MTEB MassiveIntentClassification (te) type: mteb/amazon_massive_intent config: te split: test revision: 31efe3c427b0bae9c22cbb560b8f15491cc6bed7 metrics: - type: accuracy value: 47.43443174176195 - type: f1 value: 45.08686598891457 - task: type: Classification dataset: name: MTEB MassiveIntentClassification (th) type: mteb/amazon_massive_intent config: th split: test revision: 31efe3c427b0bae9c22cbb560b8f15491cc6bed7 metrics: - type: accuracy value: 36.87962340282448 - type: f1 value: 36.50540864756967 - task: type: Classification dataset: name: MTEB MassiveIntentClassification (tl) type: mteb/amazon_massive_intent config: tl split: test revision: 31efe3c427b0bae9c22cbb560b8f15491cc6bed7 metrics: - type: accuracy value: 45.9280430396772 - type: f1 value: 44.57216865343283 - task: type: Classification dataset: name: MTEB MassiveIntentClassification (tr) type: mteb/amazon_massive_intent config: tr split: test revision: 31efe3c427b0bae9c22cbb560b8f15491cc6bed7 metrics: - type: accuracy value: 38.591123066577 - type: f1 value: 37.886312373767446 - task: type: Classification dataset: name: MTEB MassiveIntentClassification (ur) type: mteb/amazon_massive_intent config: ur split: test revision: 31efe3c427b0bae9c22cbb560b8f15491cc6bed7 metrics: - type: accuracy value: 51.85272360457296 - type: f1 value: 49.43461566216979 - task: type: Classification dataset: name: MTEB MassiveIntentClassification (vi) type: mteb/amazon_massive_intent config: vi split: test revision: 31efe3c427b0bae9c22cbb560b8f15491cc6bed7 metrics: - type: accuracy value: 58.72225958305313 - type: f1 value: 56.95500715299434 - task: type: Classification dataset: name: MTEB MassiveIntentClassification (zh-CN) type: mteb/amazon_massive_intent config: zh-CN split: test revision: 31efe3c427b0bae9c22cbb560b8f15491cc6bed7 metrics: - type: accuracy value: 63.74915938130464 - type: f1 value: 62.35543158488615 - task: type: Classification dataset: name: MTEB MassiveIntentClassification (zh-TW) type: mteb/amazon_massive_intent config: zh-TW split: test revision: 31efe3c427b0bae9c22cbb560b8f15491cc6bed7 metrics: - type: accuracy value: 59.95292535305985 - type: f1 value: 59.73499569346673 - task: type: Classification dataset: name: MTEB MassiveScenarioClassification (af) type: mteb/amazon_massive_scenario config: af split: test revision: 7d571f92784cd94a019292a1f45445077d0ef634 metrics: - type: accuracy value: 47.42098184263618 - type: f1 value: 45.22541854557743 - task: type: Classification dataset: name: MTEB MassiveScenarioClassification (am) type: mteb/amazon_massive_scenario config: am split: test revision: 7d571f92784cd94a019292a1f45445077d0ef634 metrics: - type: accuracy value: 24.707464694014796 - type: f1 value: 24.033506081882468 - task: type: Classification dataset: name: MTEB MassiveScenarioClassification (ar) type: mteb/amazon_massive_scenario config: ar split: test revision: 7d571f92784cd94a019292a1f45445077d0ef634 metrics: - type: accuracy value: 62.09145931405515 - type: f1 value: 62.22048940230962 - task: type: Classification dataset: name: MTEB MassiveScenarioClassification (az) type: mteb/amazon_massive_scenario config: az split: test revision: 7d571f92784cd94a019292a1f45445077d0ef634 metrics: - type: accuracy value: 39.25016812373907 - type: f1 value: 38.35431952425269 - task: type: Classification dataset: name: MTEB MassiveScenarioClassification (bn) type: mteb/amazon_massive_scenario config: bn split: test revision: 7d571f92784cd94a019292a1f45445077d0ef634 metrics: - type: accuracy value: 63.37256220578345 - type: f1 value: 63.12728180326932 - task: type: Classification dataset: name: MTEB MassiveScenarioClassification (cy) type: mteb/amazon_massive_scenario config: cy split: test revision: 7d571f92784cd94a019292a1f45445077d0ef634 metrics: - type: accuracy value: 39.172831203765966 - type: f1 value: 37.078841372640234 - task: type: Classification dataset: name: MTEB MassiveScenarioClassification (da) type: mteb/amazon_massive_scenario config: da split: test revision: 7d571f92784cd94a019292a1f45445077d0ef634 metrics: - type: accuracy value: 49.11230665770006 - type: f1 value: 46.489580286547245 - task: type: Classification dataset: name: MTEB MassiveScenarioClassification (de) type: mteb/amazon_massive_scenario config: de split: test revision: 7d571f92784cd94a019292a1f45445077d0ef634 metrics: - type: accuracy value: 50.7128446536651 - type: f1 value: 48.27782602378952 - task: type: Classification dataset: name: MTEB MassiveScenarioClassification (el) type: mteb/amazon_massive_scenario config: el split: test revision: 7d571f92784cd94a019292a1f45445077d0ef634 metrics: - type: accuracy value: 39.46536650975118 - type: f1 value: 37.4365280056047 - task: type: Classification dataset: name: MTEB MassiveScenarioClassification (en) type: mteb/amazon_massive_scenario config: en split: test revision: 7d571f92784cd94a019292a1f45445077d0ef634 metrics: - type: accuracy value: 73.26160053799597 - type: f1 value: 73.4478249967817 - task: type: Classification dataset: name: MTEB MassiveScenarioClassification (es) type: mteb/amazon_massive_scenario config: es split: test revision: 7d571f92784cd94a019292a1f45445077d0ef634 metrics: - type: accuracy value: 68.31203765971756 - type: f1 value: 68.70554437788068 - task: type: Classification dataset: name: MTEB MassiveScenarioClassification (fa) type: mteb/amazon_massive_scenario config: fa split: test revision: 7d571f92784cd94a019292a1f45445077d0ef634 metrics: - type: accuracy value: 45.652320107599195 - type: f1 value: 44.55357745265521 - task: type: Classification dataset: name: MTEB MassiveScenarioClassification (fi) type: mteb/amazon_massive_scenario config: fi split: test revision: 7d571f92784cd94a019292a1f45445077d0ef634 metrics: - type: accuracy value: 38.94754539340955 - type: f1 value: 36.48927336173062 - task: type: Classification dataset: name: MTEB MassiveScenarioClassification (fr) type: mteb/amazon_massive_scenario config: fr split: test revision: 7d571f92784cd94a019292a1f45445077d0ef634 metrics: - type: accuracy value: 68.69872225958305 - type: f1 value: 68.81347966311543 - task: type: Classification dataset: name: MTEB MassiveScenarioClassification (he) type: mteb/amazon_massive_scenario config: he split: test revision: 7d571f92784cd94a019292a1f45445077d0ef634 metrics: - type: accuracy value: 32.131809011432416 - type: f1 value: 30.212230946937474 - task: type: Classification dataset: name: MTEB MassiveScenarioClassification (hi) type: mteb/amazon_massive_scenario config: hi split: test revision: 7d571f92784cd94a019292a1f45445077d0ef634 metrics: - type: accuracy value: 65.57498318762609 - type: f1 value: 65.16084751135229 - task: type: Classification dataset: name: MTEB MassiveScenarioClassification (hu) type: mteb/amazon_massive_scenario config: hu split: test revision: 7d571f92784cd94a019292a1f45445077d0ef634 metrics: - type: accuracy value: 42.965702757229316 - type: f1 value: 40.575896627739105 - task: type: Classification dataset: name: MTEB MassiveScenarioClassification (hy) type: mteb/amazon_massive_scenario config: hy split: test revision: 7d571f92784cd94a019292a1f45445077d0ef634 metrics: - type: accuracy value: 32.125084061869536 - type: f1 value: 30.708056882129476 - task: type: Classification dataset: name: MTEB MassiveScenarioClassification (id) type: mteb/amazon_massive_scenario config: id split: test revision: 7d571f92784cd94a019292a1f45445077d0ef634 metrics: - type: accuracy value: 65.10759919300607 - type: f1 value: 64.5007800119315 - task: type: Classification dataset: name: MTEB MassiveScenarioClassification (is) type: mteb/amazon_massive_scenario config: is split: test revision: 7d571f92784cd94a019292a1f45445077d0ef634 metrics: - type: accuracy value: 40.83725622057834 - type: f1 value: 37.855774705520886 - task: type: Classification dataset: name: MTEB MassiveScenarioClassification (it) type: mteb/amazon_massive_scenario config: it split: test revision: 7d571f92784cd94a019292a1f45445077d0ef634 metrics: - type: accuracy value: 54.55279085406859 - type: f1 value: 52.73318944173822 - task: type: Classification dataset: name: MTEB MassiveScenarioClassification (ja) type: mteb/amazon_massive_scenario config: ja split: test revision: 7d571f92784cd94a019292a1f45445077d0ef634 metrics: - type: accuracy value: 57.14525891055817 - type: f1 value: 55.96714177558203 - task: type: Classification dataset: name: MTEB MassiveScenarioClassification (jv) type: mteb/amazon_massive_scenario config: jv split: test revision: 7d571f92784cd94a019292a1f45445077d0ef634 metrics: - type: accuracy value: 49.30060524546065 - type: f1 value: 47.82999154670342 - task: type: Classification dataset: name: MTEB MassiveScenarioClassification (ka) type: mteb/amazon_massive_scenario config: ka split: test revision: 7d571f92784cd94a019292a1f45445077d0ef634 metrics: - type: accuracy value: 25.85743106926698 - type: f1 value: 24.974946990729716 - task: type: Classification dataset: name: MTEB MassiveScenarioClassification (km) type: mteb/amazon_massive_scenario config: km split: test revision: 7d571f92784cd94a019292a1f45445077d0ef634 metrics: - type: accuracy value: 31.180228648285137 - type: f1 value: 28.22387838219335 - task: type: Classification dataset: name: MTEB MassiveScenarioClassification (kn) type: mteb/amazon_massive_scenario config: kn split: test revision: 7d571f92784cd94a019292a1f45445077d0ef634 metrics: - type: accuracy value: 53.00941492938802 - type: f1 value: 52.39610045092559 - task: type: Classification dataset: name: MTEB MassiveScenarioClassification (ko) type: mteb/amazon_massive_scenario config: ko split: test revision: 7d571f92784cd94a019292a1f45445077d0ef634 metrics: - type: accuracy value: 40.24546065904505 - type: f1 value: 38.99779773215032 - task: type: Classification dataset: name: MTEB MassiveScenarioClassification (lv) type: mteb/amazon_massive_scenario config: lv split: test revision: 7d571f92784cd94a019292a1f45445077d0ef634 metrics: - type: accuracy value: 41.88298587760592 - type: f1 value: 39.53867071594289 - task: type: Classification dataset: name: MTEB MassiveScenarioClassification (ml) type: mteb/amazon_massive_scenario config: ml split: test revision: 7d571f92784cd94a019292a1f45445077d0ef634 metrics: - type: accuracy value: 59.078681909885674 - type: f1 value: 58.47368723772022 - task: type: Classification dataset: name: MTEB MassiveScenarioClassification (mn) type: mteb/amazon_massive_scenario config: mn split: test revision: 7d571f92784cd94a019292a1f45445077d0ef634 metrics: - type: accuracy value: 33.33893745796907 - type: f1 value: 32.113466354321226 - task: type: Classification dataset: name: MTEB MassiveScenarioClassification (ms) type: mteb/amazon_massive_scenario config: ms split: test revision: 7d571f92784cd94a019292a1f45445077d0ef634 metrics: - type: accuracy value: 57.454606590450574 - type: f1 value: 56.13075383338251 - task: type: Classification dataset: name: MTEB MassiveScenarioClassification (my) type: mteb/amazon_massive_scenario config: my split: test revision: 7d571f92784cd94a019292a1f45445077d0ef634 metrics: - type: accuracy value: 27.19569603227976 - type: f1 value: 26.300773160344015 - task: type: Classification dataset: name: MTEB MassiveScenarioClassification (nb) type: mteb/amazon_massive_scenario config: nb split: test revision: 7d571f92784cd94a019292a1f45445077d0ef634 metrics: - type: accuracy value: 46.78547410894418 - type: f1 value: 44.233771335183015 - task: type: Classification dataset: name: MTEB MassiveScenarioClassification (nl) type: mteb/amazon_massive_scenario config: nl split: test revision: 7d571f92784cd94a019292a1f45445077d0ef634 metrics: - type: accuracy value: 48.4196368527236 - type: f1 value: 45.55838648206857 - task: type: Classification dataset: name: MTEB MassiveScenarioClassification (pl) type: mteb/amazon_massive_scenario config: pl split: test revision: 7d571f92784cd94a019292a1f45445077d0ef634 metrics: - type: accuracy value: 41.63080026899798 - type: f1 value: 40.77775839499525 - task: type: Classification dataset: name: MTEB MassiveScenarioClassification (pt) type: mteb/amazon_massive_scenario config: pt split: test revision: 7d571f92784cd94a019292a1f45445077d0ef634 metrics: - type: accuracy value: 66.408876933423 - type: f1 value: 66.7358693871042 - task: type: Classification dataset: name: MTEB MassiveScenarioClassification (ro) type: mteb/amazon_massive_scenario config: ro split: test revision: 7d571f92784cd94a019292a1f45445077d0ef634 metrics: - type: accuracy value: 50.077336919973106 - type: f1 value: 48.572749739090014 - task: type: Classification dataset: name: MTEB MassiveScenarioClassification (ru) type: mteb/amazon_massive_scenario config: ru split: test revision: 7d571f92784cd94a019292a1f45445077d0ef634 metrics: - type: accuracy value: 49.942837928715534 - type: f1 value: 49.34771836662566 - task: type: Classification dataset: name: MTEB MassiveScenarioClassification (sl) type: mteb/amazon_massive_scenario config: sl split: test revision: 7d571f92784cd94a019292a1f45445077d0ef634 metrics: - type: accuracy value: 43.43308675184936 - type: f1 value: 41.818008297000986 - task: type: Classification dataset: name: MTEB MassiveScenarioClassification (sq) type: mteb/amazon_massive_scenario config: sq split: test revision: 7d571f92784cd94a019292a1f45445077d0ef634 metrics: - type: accuracy value: 44.082044384667114 - type: f1 value: 43.25002746432129 - task: type: Classification dataset: name: MTEB MassiveScenarioClassification (sv) type: mteb/amazon_massive_scenario config: sv split: test revision: 7d571f92784cd94a019292a1f45445077d0ef634 metrics: - type: accuracy value: 46.45258910558171 - type: f1 value: 44.00958237591922 - task: type: Classification dataset: name: MTEB MassiveScenarioClassification (sw) type: mteb/amazon_massive_scenario config: sw split: test revision: 7d571f92784cd94a019292a1f45445077d0ef634 metrics: - type: accuracy value: 49.53261600537996 - type: f1 value: 48.01969699634672 - task: type: Classification dataset: name: MTEB MassiveScenarioClassification (ta) type: mteb/amazon_massive_scenario config: ta split: test revision: 7d571f92784cd94a019292a1f45445077d0ef634 metrics: - type: accuracy value: 56.792199058507066 - type: f1 value: 56.54421925671813 - task: type: Classification dataset: name: MTEB MassiveScenarioClassification (te) type: mteb/amazon_massive_scenario config: te split: test revision: 7d571f92784cd94a019292a1f45445077d0ef634 metrics: - type: accuracy value: 54.0114324142569 - type: f1 value: 52.29830350891558 - task: type: Classification dataset: name: MTEB MassiveScenarioClassification (th) type: mteb/amazon_massive_scenario config: th split: test revision: 7d571f92784cd94a019292a1f45445077d0ef634 metrics: - type: accuracy value: 38.584398117014125 - type: f1 value: 36.551426239639575 - task: type: Classification dataset: name: MTEB MassiveScenarioClassification (tl) type: mteb/amazon_massive_scenario config: tl split: test revision: 7d571f92784cd94a019292a1f45445077d0ef634 metrics: - type: accuracy value: 48.07330195023538 - type: f1 value: 46.463553675519975 - task: type: Classification dataset: name: MTEB MassiveScenarioClassification (tr) type: mteb/amazon_massive_scenario config: tr split: test revision: 7d571f92784cd94a019292a1f45445077d0ef634 metrics: - type: accuracy value: 40.645595158036315 - type: f1 value: 40.21280676607986 - task: type: Classification dataset: name: MTEB MassiveScenarioClassification (ur) type: mteb/amazon_massive_scenario config: ur split: test revision: 7d571f92784cd94a019292a1f45445077d0ef634 metrics: - type: accuracy value: 57.74714189643577 - type: f1 value: 56.8673027258351 - task: type: Classification dataset: name: MTEB MassiveScenarioClassification (vi) type: mteb/amazon_massive_scenario config: vi split: test revision: 7d571f92784cd94a019292a1f45445077d0ef634 metrics: - type: accuracy value: 65.83389374579693 - type: f1 value: 66.11273939782248 - task: type: Classification dataset: name: MTEB MassiveScenarioClassification (zh-CN) type: mteb/amazon_massive_scenario config: zh-CN split: test revision: 7d571f92784cd94a019292a1f45445077d0ef634 metrics: - type: accuracy value: 72.38735709482181 - type: f1 value: 72.89481650271512 - task: type: Classification dataset: name: MTEB MassiveScenarioClassification (zh-TW) type: mteb/amazon_massive_scenario config: zh-TW split: test revision: 7d571f92784cd94a019292a1f45445077d0ef634 metrics: - type: accuracy value: 69.63685272360458 - type: f1 value: 70.72285841806938 - task: type: Retrieval dataset: name: MTEB MedicalRetrieval type: C-MTEB/MedicalRetrieval config: default split: dev revision: None metrics: - type: map_at_1 value: 30.8 - type: map_at_10 value: 34.782000000000004 - type: map_at_100 value: 35.333999999999996 - type: map_at_1000 value: 35.405 - type: map_at_3 value: 34.0 - type: map_at_5 value: 34.345 - type: mrr_at_1 value: 30.8 - type: mrr_at_10 value: 34.782000000000004 - type: mrr_at_100 value: 35.333999999999996 - type: mrr_at_1000 value: 35.405 - type: mrr_at_3 value: 34.0 - type: mrr_at_5 value: 34.345 - type: ndcg_at_1 value: 30.8 - type: ndcg_at_10 value: 36.675000000000004 - type: ndcg_at_100 value: 39.633 - type: ndcg_at_1000 value: 41.904 - type: ndcg_at_3 value: 35.028 - type: ndcg_at_5 value: 35.648 - type: precision_at_1 value: 30.8 - type: precision_at_10 value: 4.26 - type: precision_at_100 value: 0.571 - type: precision_at_1000 value: 0.076 - type: precision_at_3 value: 12.667 - type: precision_at_5 value: 7.9 - type: recall_at_1 value: 30.8 - type: recall_at_10 value: 42.6 - type: recall_at_100 value: 57.099999999999994 - type: recall_at_1000 value: 75.8 - type: recall_at_3 value: 38.0 - type: recall_at_5 value: 39.5 - task: type: Clustering dataset: name: MTEB MedrxivClusteringP2P type: mteb/medrxiv-clustering-p2p config: default split: test revision: e7a26af6f3ae46b30dde8737f02c07b1505bcc73 metrics: - type: v_measure value: 27.84536559870361 - task: type: Clustering dataset: name: MTEB MedrxivClusteringS2S type: mteb/medrxiv-clustering-s2s config: default split: test revision: 35191c8c0dca72d8ff3efcd72aa802307d469663 metrics: - type: v_measure value: 27.714921841841605 - task: type: Reranking dataset: name: MTEB MindSmallReranking type: mteb/mind_small config: default split: test revision: 3bdac13927fdc888b903db93b2ffdbd90b295a69 metrics: - type: map value: 30.52145905910035 - type: mrr value: 31.551577344311845 - task: type: Reranking dataset: name: MTEB MMarcoReranking type: C-MTEB/Mmarco-reranking config: default split: dev revision: None metrics: - type: map value: 23.6853605350459 - type: mrr value: 22.341269841269842 - task: type: Classification dataset: name: MTEB MultilingualSentiment type: C-MTEB/MultilingualSentiment-classification config: default split: validation revision: None metrics: - type: accuracy value: 63.16666666666666 - type: f1 value: 63.09453591106835 - task: type: Retrieval dataset: name: MTEB NFCorpus type: nfcorpus config: default split: test revision: None metrics: - type: map_at_1 value: 3.7060000000000004 - type: map_at_10 value: 9.032 - type: map_at_100 value: 11.395 - type: map_at_1000 value: 12.713 - type: map_at_3 value: 6.502 - type: map_at_5 value: 7.8100000000000005 - type: mrr_at_1 value: 37.461 - type: mrr_at_10 value: 45.839999999999996 - type: mrr_at_100 value: 46.513 - type: mrr_at_1000 value: 46.571 - type: mrr_at_3 value: 43.55 - type: mrr_at_5 value: 44.773 - type: ndcg_at_1 value: 35.913000000000004 - type: ndcg_at_10 value: 27.340999999999998 - type: ndcg_at_100 value: 25.197000000000003 - type: ndcg_at_1000 value: 34.632000000000005 - type: ndcg_at_3 value: 31.952 - type: ndcg_at_5 value: 30.244 - type: precision_at_1 value: 37.461 - type: precision_at_10 value: 20.495 - type: precision_at_100 value: 6.551 - type: precision_at_1000 value: 1.966 - type: precision_at_3 value: 30.753000000000004 - type: precision_at_5 value: 26.935 - type: recall_at_1 value: 3.7060000000000004 - type: recall_at_10 value: 12.958 - type: recall_at_100 value: 26.582 - type: recall_at_1000 value: 59.724 - type: recall_at_3 value: 7.503 - type: recall_at_5 value: 9.808 - task: type: Retrieval dataset: name: MTEB NQ type: nq config: default split: test revision: None metrics: - type: map_at_1 value: 22.201999999999998 - type: map_at_10 value: 33.76 - type: map_at_100 value: 34.867 - type: map_at_1000 value: 34.92 - type: map_at_3 value: 30.233999999999998 - type: map_at_5 value: 32.291 - type: mrr_at_1 value: 25.232 - type: mrr_at_10 value: 36.239 - type: mrr_at_100 value: 37.119 - type: mrr_at_1000 value: 37.162 - type: mrr_at_3 value: 33.213 - type: mrr_at_5 value: 35.02 - type: ndcg_at_1 value: 25.232 - type: ndcg_at_10 value: 40.046 - type: ndcg_at_100 value: 45.025 - type: ndcg_at_1000 value: 46.459 - type: ndcg_at_3 value: 33.343 - type: ndcg_at_5 value: 36.801 - type: precision_at_1 value: 25.232 - type: precision_at_10 value: 6.796 - type: precision_at_100 value: 0.959 - type: precision_at_1000 value: 0.109 - type: precision_at_3 value: 15.276 - type: precision_at_5 value: 11.17 - type: recall_at_1 value: 22.201999999999998 - type: recall_at_10 value: 56.733 - type: recall_at_100 value: 79.041 - type: recall_at_1000 value: 90.08500000000001 - type: recall_at_3 value: 39.412000000000006 - type: recall_at_5 value: 47.352 - task: type: PairClassification dataset: name: MTEB Ocnli type: C-MTEB/OCNLI config: default split: validation revision: None metrics: - type: cos_sim_accuracy value: 62.53383865728208 - type: cos_sim_ap value: 66.3197921045625 - type: cos_sim_f1 value: 69.3385214007782 - type: cos_sim_precision value: 54.89833641404805 - type: cos_sim_recall value: 94.08658922914466 - type: dot_accuracy value: 59.7184623714131 - type: dot_ap value: 61.53586693000539 - type: dot_f1 value: 68.26923076923077 - type: dot_precision value: 52.53272623790552 - type: dot_recall value: 97.46568109820485 - type: euclidean_accuracy value: 62.912831618841366 - type: euclidean_ap value: 67.15479155849464 - type: euclidean_f1 value: 70.64071370640713 - type: euclidean_precision value: 57.34035549703752 - type: euclidean_recall value: 91.97465681098205 - type: manhattan_accuracy value: 63.50839198700595 - type: manhattan_ap value: 67.55807251483273 - type: manhattan_f1 value: 70.58356490670901 - type: manhattan_precision value: 56.55216284987278 - type: manhattan_recall value: 93.8753959873284 - type: max_accuracy value: 63.50839198700595 - type: max_ap value: 67.55807251483273 - type: max_f1 value: 70.64071370640713 - task: type: Classification dataset: name: MTEB OnlineShopping type: C-MTEB/OnlineShopping-classification config: default split: test revision: None metrics: - type: accuracy value: 87.11 - type: ap value: 84.20351278644551 - type: f1 value: 87.10043002123766 - task: type: STS dataset: name: MTEB PAWSX type: C-MTEB/PAWSX config: default split: test revision: None metrics: - type: cos_sim_pearson value: 13.050279647770473 - type: cos_sim_spearman value: 14.227909232579874 - type: euclidean_pearson value: 16.372629300358096 - type: euclidean_spearman value: 14.68140021547196 - type: manhattan_pearson value: 16.266960163157336 - type: manhattan_spearman value: 14.627750758965616 - task: type: STS dataset: name: MTEB QBQTC type: C-MTEB/QBQTC config: default split: test revision: None metrics: - type: cos_sim_pearson value: 30.56036276943463 - type: cos_sim_spearman value: 32.918859292204 - type: euclidean_pearson value: 31.679745438037195 - type: euclidean_spearman value: 33.68461814972644 - type: manhattan_pearson value: 31.994557954084563 - type: manhattan_spearman value: 33.97758185204816 - task: type: Retrieval dataset: name: MTEB QuoraRetrieval type: quora config: default split: test revision: None metrics: - type: map_at_1 value: 68.327 - type: map_at_10 value: 81.938 - type: map_at_100 value: 82.581 - type: map_at_1000 value: 82.60300000000001 - type: map_at_3 value: 78.89399999999999 - type: map_at_5 value: 80.816 - type: mrr_at_1 value: 78.75 - type: mrr_at_10 value: 85.302 - type: mrr_at_100 value: 85.432 - type: mrr_at_1000 value: 85.434 - type: mrr_at_3 value: 84.128 - type: mrr_at_5 value: 84.91199999999999 - type: ndcg_at_1 value: 78.74 - type: ndcg_at_10 value: 86.042 - type: ndcg_at_100 value: 87.468 - type: ndcg_at_1000 value: 87.641 - type: ndcg_at_3 value: 82.799 - type: ndcg_at_5 value: 84.603 - type: precision_at_1 value: 78.74 - type: precision_at_10 value: 13.071 - type: precision_at_100 value: 1.508 - type: precision_at_1000 value: 0.156 - type: precision_at_3 value: 36.08 - type: precision_at_5 value: 23.87 - type: recall_at_1 value: 68.327 - type: recall_at_10 value: 93.962 - type: recall_at_100 value: 99.054 - type: recall_at_1000 value: 99.9 - type: recall_at_3 value: 84.788 - type: recall_at_5 value: 89.73 - task: type: Clustering dataset: name: MTEB RedditClustering type: mteb/reddit-clustering config: default split: test revision: 24640382cdbf8abc73003fb0fa6d111a705499eb metrics: - type: v_measure value: 41.337989152483956 - task: type: Clustering dataset: name: MTEB RedditClusteringP2P type: mteb/reddit-clustering-p2p config: default split: test revision: 282350215ef01743dc01b456c7f5241fa8937f16 metrics: - type: v_measure value: 51.2046136625677 - task: type: Retrieval dataset: name: MTEB SCIDOCS type: scidocs config: default split: test revision: None metrics: - type: map_at_1 value: 3.763 - type: map_at_10 value: 8.785 - type: map_at_100 value: 10.266 - type: map_at_1000 value: 10.506 - type: map_at_3 value: 6.551 - type: map_at_5 value: 7.670000000000001 - type: mrr_at_1 value: 18.5 - type: mrr_at_10 value: 27.771 - type: mrr_at_100 value: 28.842000000000002 - type: mrr_at_1000 value: 28.913 - type: mrr_at_3 value: 24.767 - type: mrr_at_5 value: 26.457000000000004 - type: ndcg_at_1 value: 18.5 - type: ndcg_at_10 value: 15.312000000000001 - type: ndcg_at_100 value: 21.599 - type: ndcg_at_1000 value: 26.473999999999997 - type: ndcg_at_3 value: 14.821000000000002 - type: ndcg_at_5 value: 12.836 - type: precision_at_1 value: 18.5 - type: precision_at_10 value: 7.779999999999999 - type: precision_at_100 value: 1.69 - type: precision_at_1000 value: 0.28700000000000003 - type: precision_at_3 value: 13.667000000000002 - type: precision_at_5 value: 11.08 - type: recall_at_1 value: 3.763 - type: recall_at_10 value: 15.798000000000002 - type: recall_at_100 value: 34.313 - type: recall_at_1000 value: 58.318000000000005 - type: recall_at_3 value: 8.312999999999999 - type: recall_at_5 value: 11.238 - task: type: STS dataset: name: MTEB SICK-R type: mteb/sickr-sts config: default split: test revision: a6ea5a8cab320b040a23452cc28066d9beae2cee metrics: - type: cos_sim_pearson value: 84.33402689861924 - type: cos_sim_spearman value: 78.52738315932625 - type: euclidean_pearson value: 80.800678573052 - type: euclidean_spearman value: 77.86666946799137 - type: manhattan_pearson value: 81.03106755866989 - type: manhattan_spearman value: 78.0676393879487 - task: type: STS dataset: name: MTEB STS12 type: mteb/sts12-sts config: default split: test revision: a0d554a64d88156834ff5ae9920b964011b16384 metrics: - type: cos_sim_pearson value: 81.86998503723257 - type: cos_sim_spearman value: 74.07437934108376 - type: euclidean_pearson value: 80.91626452869946 - type: euclidean_spearman value: 76.88419802521403 - type: manhattan_pearson value: 81.50196980117957 - type: manhattan_spearman value: 77.2456891009073 - task: type: STS dataset: name: MTEB STS13 type: mteb/sts13-sts config: default split: test revision: 7e90230a92c190f1bf69ae9002b8cea547a64cca metrics: - type: cos_sim_pearson value: 81.19616084290932 - type: cos_sim_spearman value: 81.80834431353927 - type: euclidean_pearson value: 81.25429737195789 - type: euclidean_spearman value: 82.00934127307355 - type: manhattan_pearson value: 81.67403556759655 - type: manhattan_spearman value: 82.42359818976753 - task: type: STS dataset: name: MTEB STS14 type: mteb/sts14-sts config: default split: test revision: 6031580fec1f6af667f0bd2da0a551cf4f0b2375 metrics: - type: cos_sim_pearson value: 81.50884725941148 - type: cos_sim_spearman value: 77.0493522248929 - type: euclidean_pearson value: 79.15856111178543 - type: euclidean_spearman value: 77.24292975474096 - type: manhattan_pearson value: 79.22641788874807 - type: manhattan_spearman value: 77.37101663798234 - task: type: STS dataset: name: MTEB STS15 type: mteb/sts15-sts config: default split: test revision: ae752c7c21bf194d8b67fd573edf7ae58183cbe3 metrics: - type: cos_sim_pearson value: 83.75652767224308 - type: cos_sim_spearman value: 84.61113973428688 - type: euclidean_pearson value: 83.73646379542737 - type: euclidean_spearman value: 84.47126779405652 - type: manhattan_pearson value: 83.89617307570857 - type: manhattan_spearman value: 84.6073703393468 - task: type: STS dataset: name: MTEB STS16 type: mteb/sts16-sts config: default split: test revision: 4d8694f8f0e0100860b497b999b3dbed754a0513 metrics: - type: cos_sim_pearson value: 81.16302763567215 - type: cos_sim_spearman value: 83.08923353997561 - type: euclidean_pearson value: 80.08338016232464 - type: euclidean_spearman value: 80.40181608724076 - type: manhattan_pearson value: 80.02358856208708 - type: manhattan_spearman value: 80.30032329982274 - task: type: STS dataset: name: MTEB STS17 (ko-ko) type: mteb/sts17-crosslingual-sts config: ko-ko split: test revision: af5e6fb845001ecf41f4c1e033ce921939a2a68d metrics: - type: cos_sim_pearson value: 56.45965932801117 - type: cos_sim_spearman value: 57.28270045199294 - type: euclidean_pearson value: 57.3615782157595 - type: euclidean_spearman value: 56.94348399074146 - type: manhattan_pearson value: 57.9426531718209 - type: manhattan_spearman value: 57.61844831263504 - task: type: STS dataset: name: MTEB STS17 (ar-ar) type: mteb/sts17-crosslingual-sts config: ar-ar split: test revision: af5e6fb845001ecf41f4c1e033ce921939a2a68d metrics: - type: cos_sim_pearson value: 80.2973366536596 - type: cos_sim_spearman value: 80.60259304741632 - type: euclidean_pearson value: 78.30266089843892 - type: euclidean_spearman value: 78.06065126709282 - type: manhattan_pearson value: 78.61370380599344 - type: manhattan_spearman value: 78.45738598619143 - task: type: STS dataset: name: MTEB STS17 (en-ar) type: mteb/sts17-crosslingual-sts config: en-ar split: test revision: af5e6fb845001ecf41f4c1e033ce921939a2a68d metrics: - type: cos_sim_pearson value: 72.35020162217042 - type: cos_sim_spearman value: 72.59857902847162 - type: euclidean_pearson value: 65.03547299350457 - type: euclidean_spearman value: 64.16617373109685 - type: manhattan_pearson value: 65.68996569454929 - type: manhattan_spearman value: 64.88542254595046 - task: type: STS dataset: name: MTEB STS17 (en-de) type: mteb/sts17-crosslingual-sts config: en-de split: test revision: af5e6fb845001ecf41f4c1e033ce921939a2a68d metrics: - type: cos_sim_pearson value: 39.766484883595425 - type: cos_sim_spearman value: 40.3429946300341 - type: euclidean_pearson value: 39.47427150040957 - type: euclidean_spearman value: 39.072525589079696 - type: manhattan_pearson value: 40.56345338078474 - type: manhattan_spearman value: 40.444629078138036 - task: type: STS dataset: name: MTEB STS17 (en-en) type: mteb/sts17-crosslingual-sts config: en-en split: test revision: af5e6fb845001ecf41f4c1e033ce921939a2a68d metrics: - type: cos_sim_pearson value: 88.83798941013089 - type: cos_sim_spearman value: 89.15159294402415 - type: euclidean_pearson value: 87.9810618414505 - type: euclidean_spearman value: 87.90818542026535 - type: manhattan_pearson value: 88.06116863048229 - type: manhattan_spearman value: 88.00182442010694 - task: type: STS dataset: name: MTEB STS17 (en-tr) type: mteb/sts17-crosslingual-sts config: en-tr split: test revision: af5e6fb845001ecf41f4c1e033ce921939a2a68d metrics: - type: cos_sim_pearson value: 7.416028059666332 - type: cos_sim_spearman value: 6.792945857606915 - type: euclidean_pearson value: 11.485332917116061 - type: euclidean_spearman value: 9.793932873423419 - type: manhattan_pearson value: 9.148469412558393 - type: manhattan_spearman value: 7.803450524017845 - task: type: STS dataset: name: MTEB STS17 (es-en) type: mteb/sts17-crosslingual-sts config: es-en split: test revision: af5e6fb845001ecf41f4c1e033ce921939a2a68d metrics: - type: cos_sim_pearson value: 80.16381852152489 - type: cos_sim_spearman value: 81.80324089694928 - type: euclidean_pearson value: 76.41433274302783 - type: euclidean_spearman value: 77.15238726996526 - type: manhattan_pearson value: 77.08610108551368 - type: manhattan_spearman value: 77.99971298324311 - task: type: STS dataset: name: MTEB STS17 (es-es) type: mteb/sts17-crosslingual-sts config: es-es split: test revision: af5e6fb845001ecf41f4c1e033ce921939a2a68d metrics: - type: cos_sim_pearson value: 85.11032272383456 - type: cos_sim_spearman value: 85.64528002839239 - type: euclidean_pearson value: 85.54301672487198 - type: euclidean_spearman value: 84.21727806530393 - type: manhattan_pearson value: 85.57145576255618 - type: manhattan_spearman value: 84.07127479487694 - task: type: STS dataset: name: MTEB STS17 (fr-en) type: mteb/sts17-crosslingual-sts config: fr-en split: test revision: af5e6fb845001ecf41f4c1e033ce921939a2a68d metrics: - type: cos_sim_pearson value: 79.73703272230806 - type: cos_sim_spearman value: 79.9424510113259 - type: euclidean_pearson value: 77.64485173960838 - type: euclidean_spearman value: 77.54693014468836 - type: manhattan_pearson value: 77.96911553781774 - type: manhattan_spearman value: 77.87266778206842 - task: type: STS dataset: name: MTEB STS17 (it-en) type: mteb/sts17-crosslingual-sts config: it-en split: test revision: af5e6fb845001ecf41f4c1e033ce921939a2a68d metrics: - type: cos_sim_pearson value: 37.260672179617515 - type: cos_sim_spearman value: 34.80434004457536 - type: euclidean_pearson value: 38.55806751295782 - type: euclidean_spearman value: 36.129700913023115 - type: manhattan_pearson value: 40.74316244582763 - type: manhattan_spearman value: 38.60667540883322 - task: type: STS dataset: name: MTEB STS17 (nl-en) type: mteb/sts17-crosslingual-sts config: nl-en split: test revision: af5e6fb845001ecf41f4c1e033ce921939a2a68d metrics: - type: cos_sim_pearson value: 38.038311386574456 - type: cos_sim_spearman value: 33.576193063894195 - type: euclidean_pearson value: 33.712663568034316 - type: euclidean_spearman value: 32.560617375956916 - type: manhattan_pearson value: 35.60457167895616 - type: manhattan_spearman value: 34.63036216555931 - task: type: STS dataset: name: MTEB STS22 (en) type: mteb/sts22-crosslingual-sts config: en split: test revision: 6d1ba47164174a496b7fa5d3569dae26a6813b80 metrics: - type: cos_sim_pearson value: 61.01583638162472 - type: cos_sim_spearman value: 62.92281428893316 - type: euclidean_pearson value: 62.939630289711815 - type: euclidean_spearman value: 64.15209661725994 - type: manhattan_pearson value: 64.24261705090608 - type: manhattan_spearman value: 64.78283158164017 - task: type: STS dataset: name: MTEB STS22 (de) type: mteb/sts22-crosslingual-sts config: de split: test revision: 6d1ba47164174a496b7fa5d3569dae26a6813b80 metrics: - type: cos_sim_pearson value: 21.529440799555704 - type: cos_sim_spearman value: 26.62727800620091 - type: euclidean_pearson value: 16.837244578590123 - type: euclidean_spearman value: 25.012107525591425 - type: manhattan_pearson value: 18.445531476179454 - type: manhattan_spearman value: 27.070240480795153 - task: type: STS dataset: name: MTEB STS22 (es) type: mteb/sts22-crosslingual-sts config: es split: test revision: 6d1ba47164174a496b7fa5d3569dae26a6813b80 metrics: - type: cos_sim_pearson value: 49.655500043363624 - type: cos_sim_spearman value: 56.31248457847469 - type: euclidean_pearson value: 48.787154598246616 - type: euclidean_spearman value: 52.90454409579225 - type: manhattan_pearson value: 55.392327232639836 - type: manhattan_spearman value: 57.3726886727899 - task: type: STS dataset: name: MTEB STS22 (pl) type: mteb/sts22-crosslingual-sts config: pl split: test revision: 6d1ba47164174a496b7fa5d3569dae26a6813b80 metrics: - type: cos_sim_pearson value: 2.9137753115190304 - type: cos_sim_spearman value: 15.062114976486532 - type: euclidean_pearson value: -2.034404984782681 - type: euclidean_spearman value: 14.683481835467338 - type: manhattan_pearson value: -0.22204468354050833 - type: manhattan_spearman value: 15.526420635759743 - task: type: STS dataset: name: MTEB STS22 (tr) type: mteb/sts22-crosslingual-sts config: tr split: test revision: 6d1ba47164174a496b7fa5d3569dae26a6813b80 metrics: - type: cos_sim_pearson value: 4.3616620418459915 - type: cos_sim_spearman value: 22.11078316878173 - type: euclidean_pearson value: 15.111514877123403 - type: euclidean_spearman value: 21.232869644925973 - type: manhattan_pearson value: 19.71276925909529 - type: manhattan_spearman value: 25.704469862313466 - task: type: STS dataset: name: MTEB STS22 (ar) type: mteb/sts22-crosslingual-sts config: ar split: test revision: 6d1ba47164174a496b7fa5d3569dae26a6813b80 metrics: - type: cos_sim_pearson value: 44.25888840250496 - type: cos_sim_spearman value: 54.82352971568842 - type: euclidean_pearson value: 48.00261414068268 - type: euclidean_spearman value: 53.3721608428832 - type: manhattan_pearson value: 50.6442021864215 - type: manhattan_spearman value: 55.352339945631954 - task: type: STS dataset: name: MTEB STS22 (ru) type: mteb/sts22-crosslingual-sts config: ru split: test revision: 6d1ba47164174a496b7fa5d3569dae26a6813b80 metrics: - type: cos_sim_pearson value: 0.08233514100531068 - type: cos_sim_spearman value: 28.771721168834276 - type: euclidean_pearson value: 10.783524938899138 - type: euclidean_spearman value: 24.67831010432439 - type: manhattan_pearson value: 16.98415610436092 - type: manhattan_spearman value: 25.81670115913176 - task: type: STS dataset: name: MTEB STS22 (zh) type: mteb/sts22-crosslingual-sts config: zh split: test revision: 6d1ba47164174a496b7fa5d3569dae26a6813b80 metrics: - type: cos_sim_pearson value: 36.86678706245425 - type: cos_sim_spearman value: 40.9736918674032 - type: euclidean_pearson value: 26.42365971768556 - type: euclidean_spearman value: 30.479818788692054 - type: manhattan_pearson value: 41.08694658968258 - type: manhattan_spearman value: 45.080877435751084 - task: type: STS dataset: name: MTEB STS22 (fr) type: mteb/sts22-crosslingual-sts config: fr split: test revision: 6d1ba47164174a496b7fa5d3569dae26a6813b80 metrics: - type: cos_sim_pearson value: 75.98114217777062 - type: cos_sim_spearman value: 78.7295845730892 - type: euclidean_pearson value: 76.99433076522276 - type: euclidean_spearman value: 79.71421663258973 - type: manhattan_pearson value: 78.65656344143478 - type: manhattan_spearman value: 80.60968909615123 - task: type: STS dataset: name: MTEB STS22 (de-en) type: mteb/sts22-crosslingual-sts config: de-en split: test revision: 6d1ba47164174a496b7fa5d3569dae26a6813b80 metrics: - type: cos_sim_pearson value: 47.33261398683554 - type: cos_sim_spearman value: 49.547954534754666 - type: euclidean_pearson value: 48.23362592012922 - type: euclidean_spearman value: 49.17277986369927 - type: manhattan_pearson value: 49.06792311033889 - type: manhattan_spearman value: 51.27529282708198 - task: type: STS dataset: name: MTEB STS22 (es-en) type: mteb/sts22-crosslingual-sts config: es-en split: test revision: 6d1ba47164174a496b7fa5d3569dae26a6813b80 metrics: - type: cos_sim_pearson value: 66.10070360470756 - type: cos_sim_spearman value: 71.03150249855938 - type: euclidean_pearson value: 67.05372897033872 - type: euclidean_spearman value: 69.73291838049877 - type: manhattan_pearson value: 70.34740916239467 - type: manhattan_spearman value: 72.40053406658815 - task: type: STS dataset: name: MTEB STS22 (it) type: mteb/sts22-crosslingual-sts config: it split: test revision: 6d1ba47164174a496b7fa5d3569dae26a6813b80 metrics: - type: cos_sim_pearson value: 56.581317404418904 - type: cos_sim_spearman value: 62.61318021096797 - type: euclidean_pearson value: 57.4403074342031 - type: euclidean_spearman value: 60.04897783631694 - type: manhattan_pearson value: 58.441729285803014 - type: manhattan_spearman value: 60.70510326005463 - task: type: STS dataset: name: MTEB STS22 (pl-en) type: mteb/sts22-crosslingual-sts config: pl-en split: test revision: 6d1ba47164174a496b7fa5d3569dae26a6813b80 metrics: - type: cos_sim_pearson value: 47.064414464023905 - type: cos_sim_spearman value: 43.716659075869465 - type: euclidean_pearson value: 43.81699490724336 - type: euclidean_spearman value: 43.784380306563726 - type: manhattan_pearson value: 53.664583329563264 - type: manhattan_spearman value: 45.399271192350135 - task: type: STS dataset: name: MTEB STS22 (zh-en) type: mteb/sts22-crosslingual-sts config: zh-en split: test revision: 6d1ba47164174a496b7fa5d3569dae26a6813b80 metrics: - type: cos_sim_pearson value: 63.585903017365055 - type: cos_sim_spearman value: 63.90147651068459 - type: euclidean_pearson value: 50.21918146173064 - type: euclidean_spearman value: 53.02530618040754 - type: manhattan_pearson value: 62.7472089813117 - type: manhattan_spearman value: 63.90440606248973 - task: type: STS dataset: name: MTEB STS22 (es-it) type: mteb/sts22-crosslingual-sts config: es-it split: test revision: 6d1ba47164174a496b7fa5d3569dae26a6813b80 metrics: - type: cos_sim_pearson value: 59.06715980430013 - type: cos_sim_spearman value: 61.2993294424547 - type: euclidean_pearson value: 53.67335552456426 - type: euclidean_spearman value: 55.32940583953816 - type: manhattan_pearson value: 58.08097600675386 - type: manhattan_spearman value: 57.1966250850173 - task: type: STS dataset: name: MTEB STS22 (de-fr) type: mteb/sts22-crosslingual-sts config: de-fr split: test revision: 6d1ba47164174a496b7fa5d3569dae26a6813b80 metrics: - type: cos_sim_pearson value: 18.94271219818519 - type: cos_sim_spearman value: 22.355519793818935 - type: euclidean_pearson value: 14.336479135636187 - type: euclidean_spearman value: 18.862751864788684 - type: manhattan_pearson value: 14.481730447681057 - type: manhattan_spearman value: 17.572142526671563 - task: type: STS dataset: name: MTEB STS22 (de-pl) type: mteb/sts22-crosslingual-sts config: de-pl split: test revision: 6d1ba47164174a496b7fa5d3569dae26a6813b80 metrics: - type: cos_sim_pearson value: 20.644357537446464 - type: cos_sim_spearman value: 35.32083671407284 - type: euclidean_pearson value: 28.24720906134992 - type: euclidean_spearman value: 46.437508077438395 - type: manhattan_pearson value: 42.09834718968137 - type: manhattan_spearman value: 53.02744622635869 - task: type: STS dataset: name: MTEB STS22 (fr-pl) type: mteb/sts22-crosslingual-sts config: fr-pl split: test revision: 6d1ba47164174a496b7fa5d3569dae26a6813b80 metrics: - type: cos_sim_pearson value: 71.84986730523782 - type: cos_sim_spearman value: 73.24670207647144 - type: euclidean_pearson value: 62.450055500805604 - type: euclidean_spearman value: 61.97797868009122 - type: manhattan_pearson value: 56.32083882980946 - type: manhattan_spearman value: 39.440531887330785 - task: type: STS dataset: name: MTEB STSB type: C-MTEB/STSB config: default split: test revision: None metrics: - type: cos_sim_pearson value: 78.11479317838469 - type: cos_sim_spearman value: 77.7709743500025 - type: euclidean_pearson value: 78.83834281752932 - type: euclidean_spearman value: 78.21978829646487 - type: manhattan_pearson value: 79.36075578990533 - type: manhattan_spearman value: 78.72958965446072 - task: type: STS dataset: name: MTEB STSBenchmark type: mteb/stsbenchmark-sts config: default split: test revision: b0fddb56ed78048fa8b90373c8a3cfc37b684831 metrics: - type: cos_sim_pearson value: 82.92539499228975 - type: cos_sim_spearman value: 83.63025944536395 - type: euclidean_pearson value: 81.54744230098872 - type: euclidean_spearman value: 81.08707735758752 - type: manhattan_pearson value: 81.50252353111375 - type: manhattan_spearman value: 81.00641210322735 - task: type: Reranking dataset: name: MTEB SciDocsRR type: mteb/scidocs-reranking config: default split: test revision: d3c5e1fc0b855ab6097bf1cda04dd73947d7caab metrics: - type: map value: 75.12690809334019 - type: mrr value: 92.28846951886169 - task: type: Retrieval dataset: name: MTEB SciFact type: scifact config: default split: test revision: None metrics: - type: map_at_1 value: 47.15 - type: map_at_10 value: 56.748 - type: map_at_100 value: 57.528999999999996 - type: map_at_1000 value: 57.56400000000001 - type: map_at_3 value: 53.691 - type: map_at_5 value: 55.656000000000006 - type: mrr_at_1 value: 49.667 - type: mrr_at_10 value: 58.24700000000001 - type: mrr_at_100 value: 58.855000000000004 - type: mrr_at_1000 value: 58.888 - type: mrr_at_3 value: 55.72200000000001 - type: mrr_at_5 value: 57.272 - type: ndcg_at_1 value: 49.667 - type: ndcg_at_10 value: 61.739 - type: ndcg_at_100 value: 65.17399999999999 - type: ndcg_at_1000 value: 66.122 - type: ndcg_at_3 value: 56.266000000000005 - type: ndcg_at_5 value: 59.357000000000006 - type: precision_at_1 value: 49.667 - type: precision_at_10 value: 8.5 - type: precision_at_100 value: 1.04 - type: precision_at_1000 value: 0.11199999999999999 - type: precision_at_3 value: 22.111 - type: precision_at_5 value: 15.133 - type: recall_at_1 value: 47.15 - type: recall_at_10 value: 75.52799999999999 - type: recall_at_100 value: 91.167 - type: recall_at_1000 value: 98.667 - type: recall_at_3 value: 60.978 - type: recall_at_5 value: 68.839 - task: type: PairClassification dataset: name: MTEB SprintDuplicateQuestions type: mteb/sprintduplicatequestions-pairclassification config: default split: test revision: d66bd1f72af766a5cc4b0ca5e00c162f89e8cc46 metrics: - type: cos_sim_accuracy value: 99.71188118811881 - type: cos_sim_ap value: 92.0858173884619 - type: cos_sim_f1 value: 85.48864758144126 - type: cos_sim_precision value: 84.40545808966861 - type: cos_sim_recall value: 86.6 - type: dot_accuracy value: 99.57722772277228 - type: dot_ap value: 83.92226742515372 - type: dot_f1 value: 78.85091629519565 - type: dot_precision value: 78.11579980372915 - type: dot_recall value: 79.60000000000001 - type: euclidean_accuracy value: 99.6970297029703 - type: euclidean_ap value: 91.69378964699095 - type: euclidean_f1 value: 85.08771929824562 - type: euclidean_precision value: 82.98479087452472 - type: euclidean_recall value: 87.3 - type: manhattan_accuracy value: 99.7019801980198 - type: manhattan_ap value: 92.00969741996086 - type: manhattan_f1 value: 84.95752123938031 - type: manhattan_precision value: 84.91508491508492 - type: manhattan_recall value: 85.0 - type: max_accuracy value: 99.71188118811881 - type: max_ap value: 92.0858173884619 - type: max_f1 value: 85.48864758144126 - task: type: Clustering dataset: name: MTEB StackExchangeClustering type: mteb/stackexchange-clustering config: default split: test revision: 6cbc1f7b2bc0622f2e39d2c77fa502909748c259 metrics: - type: v_measure value: 54.50675991473899 - task: type: Clustering dataset: name: MTEB StackExchangeClusteringP2P type: mteb/stackexchange-clustering-p2p config: default split: test revision: 815ca46b2622cec33ccafc3735d572c266efdb44 metrics: - type: v_measure value: 31.12415042272221 - task: type: Reranking dataset: name: MTEB StackOverflowDupQuestions type: mteb/stackoverflowdupquestions-reranking config: default split: test revision: e185fbe320c72810689fc5848eb6114e1ef5ec69 metrics: - type: map value: 47.37961638353922 - type: mrr value: 48.04425558102029 - task: type: Summarization dataset: name: MTEB SummEval type: mteb/summeval config: default split: test revision: cda12ad7615edc362dbf25a00fdd61d3b1eaf93c metrics: - type: cos_sim_pearson value: 31.358583236464177 - type: cos_sim_spearman value: 32.06044850511017 - type: dot_pearson value: 30.36343303587471 - type: dot_spearman value: 30.303932242144704 - task: type: Reranking dataset: name: MTEB T2Reranking type: C-MTEB/T2Reranking config: default split: dev revision: None metrics: - type: map value: 63.73951666189072 - type: mrr value: 73.54706021429108 - task: type: Retrieval dataset: name: MTEB T2Retrieval type: C-MTEB/T2Retrieval config: default split: dev revision: None metrics: - type: map_at_1 value: 16.892 - type: map_at_10 value: 40.215 - type: map_at_100 value: 43.9 - type: map_at_1000 value: 44.185 - type: map_at_3 value: 30.008000000000003 - type: map_at_5 value: 35.465 - type: mrr_at_1 value: 63.931000000000004 - type: mrr_at_10 value: 70.35 - type: mrr_at_100 value: 70.762 - type: mrr_at_1000 value: 70.784 - type: mrr_at_3 value: 68.863 - type: mrr_at_5 value: 69.758 - type: ndcg_at_1 value: 63.931000000000004 - type: ndcg_at_10 value: 51.573 - type: ndcg_at_100 value: 59.067 - type: ndcg_at_1000 value: 62.388 - type: ndcg_at_3 value: 55.422000000000004 - type: ndcg_at_5 value: 52.322 - type: precision_at_1 value: 63.931000000000004 - type: precision_at_10 value: 25.373 - type: precision_at_100 value: 3.894 - type: precision_at_1000 value: 0.47400000000000003 - type: precision_at_3 value: 48.083 - type: precision_at_5 value: 38.513 - type: recall_at_1 value: 16.892 - type: recall_at_10 value: 49.945 - type: recall_at_100 value: 73.41499999999999 - type: recall_at_1000 value: 89.776 - type: recall_at_3 value: 32.544000000000004 - type: recall_at_5 value: 40.501 - task: type: Classification dataset: name: MTEB TNews type: C-MTEB/TNews-classification config: default split: validation revision: None metrics: - type: accuracy value: 44.153999999999996 - type: f1 value: 42.69123774230511 - task: type: Retrieval dataset: name: MTEB TRECCOVID type: trec-covid config: default split: test revision: None metrics: - type: map_at_1 value: 0.22300000000000003 - type: map_at_10 value: 1.7999999999999998 - type: map_at_100 value: 9.098 - type: map_at_1000 value: 20.59 - type: map_at_3 value: 0.6459999999999999 - type: map_at_5 value: 1.006 - type: mrr_at_1 value: 84.0 - type: mrr_at_10 value: 91.5 - type: mrr_at_100 value: 91.5 - type: mrr_at_1000 value: 91.5 - type: mrr_at_3 value: 91.0 - type: mrr_at_5 value: 91.5 - type: ndcg_at_1 value: 80.0 - type: ndcg_at_10 value: 72.992 - type: ndcg_at_100 value: 51.778999999999996 - type: ndcg_at_1000 value: 44.473 - type: ndcg_at_3 value: 77.531 - type: ndcg_at_5 value: 74.685 - type: precision_at_1 value: 84.0 - type: precision_at_10 value: 78.60000000000001 - type: precision_at_100 value: 52.800000000000004 - type: precision_at_1000 value: 19.736 - type: precision_at_3 value: 83.333 - type: precision_at_5 value: 80.0 - type: recall_at_1 value: 0.22300000000000003 - type: recall_at_10 value: 2.016 - type: recall_at_100 value: 12.21 - type: recall_at_1000 value: 41.427 - type: recall_at_3 value: 0.6839999999999999 - type: recall_at_5 value: 1.083 - task: type: BitextMining dataset: name: MTEB Tatoeba (sqi-eng) type: mteb/tatoeba-bitext-mining config: sqi-eng split: test revision: 9080400076fbadbb4c4dcb136ff4eddc40b42553 metrics: - type: accuracy value: 11.0 - type: f1 value: 8.487309997179562 - type: precision value: 7.935185890268856 - type: recall value: 11.0 - task: type: BitextMining dataset: name: MTEB Tatoeba (fry-eng) type: mteb/tatoeba-bitext-mining config: fry-eng split: test revision: 9080400076fbadbb4c4dcb136ff4eddc40b42553 metrics: - type: accuracy value: 23.699421965317917 - type: f1 value: 18.09982567208001 - type: precision value: 16.582017825552963 - type: recall value: 23.699421965317917 - task: type: BitextMining dataset: name: MTEB Tatoeba (kur-eng) type: mteb/tatoeba-bitext-mining config: kur-eng split: test revision: 9080400076fbadbb4c4dcb136ff4eddc40b42553 metrics: - type: accuracy value: 8.780487804878048 - type: f1 value: 6.484836753129436 - type: precision value: 5.916220801747723 - type: recall value: 8.780487804878048 - task: type: BitextMining dataset: name: MTEB Tatoeba (tur-eng) type: mteb/tatoeba-bitext-mining config: tur-eng split: test revision: 9080400076fbadbb4c4dcb136ff4eddc40b42553 metrics: - type: accuracy value: 5.0 - type: f1 value: 3.493223480735001 - type: precision value: 3.1492116349139385 - type: recall value: 5.0 - task: type: BitextMining dataset: name: MTEB Tatoeba (deu-eng) type: mteb/tatoeba-bitext-mining config: deu-eng split: test revision: 9080400076fbadbb4c4dcb136ff4eddc40b42553 metrics: - type: accuracy value: 33.6 - type: f1 value: 29.339340352229065 - type: precision value: 27.997920626374693 - type: recall value: 33.6 - task: type: BitextMining dataset: name: MTEB Tatoeba (nld-eng) type: mteb/tatoeba-bitext-mining config: nld-eng split: test revision: 9080400076fbadbb4c4dcb136ff4eddc40b42553 metrics: - type: accuracy value: 20.200000000000003 - type: f1 value: 16.330981736231458 - type: precision value: 15.250949969794044 - type: recall value: 20.200000000000003 - task: type: BitextMining dataset: name: MTEB Tatoeba (ron-eng) type: mteb/tatoeba-bitext-mining config: ron-eng split: test revision: 9080400076fbadbb4c4dcb136ff4eddc40b42553 metrics: - type: accuracy value: 19.6 - type: f1 value: 14.951120083366323 - type: precision value: 13.617335362707001 - type: recall value: 19.6 - task: type: BitextMining dataset: name: MTEB Tatoeba (ang-eng) type: mteb/tatoeba-bitext-mining config: ang-eng split: test revision: 9080400076fbadbb4c4dcb136ff4eddc40b42553 metrics: - type: accuracy value: 20.149253731343283 - type: f1 value: 13.312899786780385 - type: precision value: 11.979388770433545 - type: recall value: 20.149253731343283 - task: type: BitextMining dataset: name: MTEB Tatoeba (ido-eng) type: mteb/tatoeba-bitext-mining config: ido-eng split: test revision: 9080400076fbadbb4c4dcb136ff4eddc40b42553 metrics: - type: accuracy value: 31.4 - type: f1 value: 26.21323201417634 - type: precision value: 24.607830064672168 - type: recall value: 31.4 - task: type: BitextMining dataset: name: MTEB Tatoeba (jav-eng) type: mteb/tatoeba-bitext-mining config: jav-eng split: test revision: 9080400076fbadbb4c4dcb136ff4eddc40b42553 metrics: - type: accuracy value: 18.048780487804876 - type: f1 value: 14.347798542920492 - type: precision value: 13.301672920575362 - type: recall value: 18.048780487804876 - task: type: BitextMining dataset: name: MTEB Tatoeba (isl-eng) type: mteb/tatoeba-bitext-mining config: isl-eng split: test revision: 9080400076fbadbb4c4dcb136ff4eddc40b42553 metrics: - type: accuracy value: 5.2 - type: f1 value: 3.2713297295122503 - type: precision value: 2.978548911585725 - type: recall value: 5.2 - task: type: BitextMining dataset: name: MTEB Tatoeba (slv-eng) type: mteb/tatoeba-bitext-mining config: slv-eng split: test revision: 9080400076fbadbb4c4dcb136ff4eddc40b42553 metrics: - type: accuracy value: 7.411907654921021 - type: f1 value: 5.412915976323278 - type: precision value: 4.975402373122839 - type: recall value: 7.411907654921021 - task: type: BitextMining dataset: name: MTEB Tatoeba (cym-eng) type: mteb/tatoeba-bitext-mining config: cym-eng split: test revision: 9080400076fbadbb4c4dcb136ff4eddc40b42553 metrics: - type: accuracy value: 8.521739130434783 - type: f1 value: 5.871393789897329 - type: precision value: 5.350472658912557 - type: recall value: 8.521739130434783 - task: type: BitextMining dataset: name: MTEB Tatoeba (kaz-eng) type: mteb/tatoeba-bitext-mining config: kaz-eng split: test revision: 9080400076fbadbb4c4dcb136ff4eddc40b42553 metrics: - type: accuracy value: 1.565217391304348 - type: f1 value: 0.7422394530145001 - type: precision value: 0.7201734373569025 - type: recall value: 1.565217391304348 - task: type: BitextMining dataset: name: MTEB Tatoeba (est-eng) type: mteb/tatoeba-bitext-mining config: est-eng split: test revision: 9080400076fbadbb4c4dcb136ff4eddc40b42553 metrics: - type: accuracy value: 5.3 - type: f1 value: 3.0838354401589694 - type: precision value: 2.709942839090994 - type: recall value: 5.3 - task: type: BitextMining dataset: name: MTEB Tatoeba (heb-eng) type: mteb/tatoeba-bitext-mining config: heb-eng split: test revision: 9080400076fbadbb4c4dcb136ff4eddc40b42553 metrics: - type: accuracy value: 0.8 - type: f1 value: 0.24583802742178057 - type: precision value: 0.18710578268453032 - type: recall value: 0.8 - task: type: BitextMining dataset: name: MTEB Tatoeba (gla-eng) type: mteb/tatoeba-bitext-mining config: gla-eng split: test revision: 9080400076fbadbb4c4dcb136ff4eddc40b42553 metrics: - type: accuracy value: 4.945717732207479 - type: f1 value: 2.7266734043909437 - type: precision value: 2.3247505400014186 - type: recall value: 4.945717732207479 - task: type: BitextMining dataset: name: MTEB Tatoeba (mar-eng) type: mteb/tatoeba-bitext-mining config: mar-eng split: test revision: 9080400076fbadbb4c4dcb136ff4eddc40b42553 metrics: - type: accuracy value: 54.2 - type: f1 value: 47.22780366692132 - type: precision value: 44.740178571428565 - type: recall value: 54.2 - task: type: BitextMining dataset: name: MTEB Tatoeba (lat-eng) type: mteb/tatoeba-bitext-mining config: lat-eng split: test revision: 9080400076fbadbb4c4dcb136ff4eddc40b42553 metrics: - type: accuracy value: 25.8 - type: f1 value: 19.547406382656526 - type: precision value: 17.80766233766234 - type: recall value: 25.8 - task: type: BitextMining dataset: name: MTEB Tatoeba (bel-eng) type: mteb/tatoeba-bitext-mining config: bel-eng split: test revision: 9080400076fbadbb4c4dcb136ff4eddc40b42553 metrics: - type: accuracy value: 4.9 - type: f1 value: 3.283031457969928 - type: precision value: 3.0361515007649467 - type: recall value: 4.9 - task: type: BitextMining dataset: name: MTEB Tatoeba (pms-eng) type: mteb/tatoeba-bitext-mining config: pms-eng split: test revision: 9080400076fbadbb4c4dcb136ff4eddc40b42553 metrics: - type: accuracy value: 22.476190476190478 - type: f1 value: 17.494204011570957 - type: precision value: 16.16236240785113 - type: recall value: 22.476190476190478 - task: type: BitextMining dataset: name: MTEB Tatoeba (gle-eng) type: mteb/tatoeba-bitext-mining config: gle-eng split: test revision: 9080400076fbadbb4c4dcb136ff4eddc40b42553 metrics: - type: accuracy value: 6.3 - type: f1 value: 3.461898170471662 - type: precision value: 2.975546957350575 - type: recall value: 6.3 - task: type: BitextMining dataset: name: MTEB Tatoeba (pes-eng) type: mteb/tatoeba-bitext-mining config: pes-eng split: test revision: 9080400076fbadbb4c4dcb136ff4eddc40b42553 metrics: - type: accuracy value: 8.6 - type: f1 value: 5.874235156578609 - type: precision value: 5.201352547725499 - type: recall value: 8.6 - task: type: BitextMining dataset: name: MTEB Tatoeba (nob-eng) type: mteb/tatoeba-bitext-mining config: nob-eng split: test revision: 9080400076fbadbb4c4dcb136ff4eddc40b42553 metrics: - type: accuracy value: 15.2 - type: f1 value: 11.908986787697534 - type: precision value: 11.090628985937808 - type: recall value: 15.2 - task: type: BitextMining dataset: name: MTEB Tatoeba (bul-eng) type: mteb/tatoeba-bitext-mining config: bul-eng split: test revision: 9080400076fbadbb4c4dcb136ff4eddc40b42553 metrics: - type: accuracy value: 6.9 - type: f1 value: 4.58348360335125 - type: precision value: 4.183620994869927 - type: recall value: 6.9 - task: type: BitextMining dataset: name: MTEB Tatoeba (cbk-eng) type: mteb/tatoeba-bitext-mining config: cbk-eng split: test revision: 9080400076fbadbb4c4dcb136ff4eddc40b42553 metrics: - type: accuracy value: 62.1 - type: f1 value: 55.70845598845599 - type: precision value: 53.22281746031747 - type: recall value: 62.1 - task: type: BitextMining dataset: name: MTEB Tatoeba (hun-eng) type: mteb/tatoeba-bitext-mining config: hun-eng split: test revision: 9080400076fbadbb4c4dcb136ff4eddc40b42553 metrics: - type: accuracy value: 4.8 - type: f1 value: 3.246932234432234 - type: precision value: 2.9738765839703265 - type: recall value: 4.8 - task: type: BitextMining dataset: name: MTEB Tatoeba (uig-eng) type: mteb/tatoeba-bitext-mining config: uig-eng split: test revision: 9080400076fbadbb4c4dcb136ff4eddc40b42553 metrics: - type: accuracy value: 0.8999999999999999 - type: f1 value: 0.5331481481481481 - type: precision value: 0.4918990604783396 - type: recall value: 0.8999999999999999 - task: type: BitextMining dataset: name: MTEB Tatoeba (rus-eng) type: mteb/tatoeba-bitext-mining config: rus-eng split: test revision: 9080400076fbadbb4c4dcb136ff4eddc40b42553 metrics: - type: accuracy value: 31.7 - type: f1 value: 25.22406237037816 - type: precision value: 23.27273155929038 - type: recall value: 31.7 - task: type: BitextMining dataset: name: MTEB Tatoeba (spa-eng) type: mteb/tatoeba-bitext-mining config: spa-eng split: test revision: 9080400076fbadbb4c4dcb136ff4eddc40b42553 metrics: - type: accuracy value: 96.5 - type: f1 value: 95.48333333333333 - type: precision value: 95.0 - type: recall value: 96.5 - task: type: BitextMining dataset: name: MTEB Tatoeba (hye-eng) type: mteb/tatoeba-bitext-mining config: hye-eng split: test revision: 9080400076fbadbb4c4dcb136ff4eddc40b42553 metrics: - type: accuracy value: 0.40431266846361186 - type: f1 value: 0.22521185350542844 - type: precision value: 0.20245384171411912 - type: recall value: 0.40431266846361186 - task: type: BitextMining dataset: name: MTEB Tatoeba (tel-eng) type: mteb/tatoeba-bitext-mining config: tel-eng split: test revision: 9080400076fbadbb4c4dcb136ff4eddc40b42553 metrics: - type: accuracy value: 43.162393162393165 - type: f1 value: 35.83662064431295 - type: precision value: 33.66590199923534 - type: recall value: 43.162393162393165 - task: type: BitextMining dataset: name: MTEB Tatoeba (afr-eng) type: mteb/tatoeba-bitext-mining config: afr-eng split: test revision: 9080400076fbadbb4c4dcb136ff4eddc40b42553 metrics: - type: accuracy value: 12.2 - type: f1 value: 9.007009351120605 - type: precision value: 8.26509907921979 - type: recall value: 12.2 - task: type: BitextMining dataset: name: MTEB Tatoeba (mon-eng) type: mteb/tatoeba-bitext-mining config: mon-eng split: test revision: 9080400076fbadbb4c4dcb136ff4eddc40b42553 metrics: - type: accuracy value: 2.0454545454545454 - type: f1 value: 0.846869670733307 - type: precision value: 0.719285857023819 - type: recall value: 2.0454545454545454 - task: type: BitextMining dataset: name: MTEB Tatoeba (arz-eng) type: mteb/tatoeba-bitext-mining config: arz-eng split: test revision: 9080400076fbadbb4c4dcb136ff4eddc40b42553 metrics: - type: accuracy value: 56.18448637316562 - type: f1 value: 49.41850369523325 - type: precision value: 46.84486373165618 - type: recall value: 56.18448637316562 - task: type: BitextMining dataset: name: MTEB Tatoeba (hrv-eng) type: mteb/tatoeba-bitext-mining config: hrv-eng split: test revision: 9080400076fbadbb4c4dcb136ff4eddc40b42553 metrics: - type: accuracy value: 8.4 - type: f1 value: 6.274306734742452 - type: precision value: 5.854786915151029 - type: recall value: 8.4 - task: type: BitextMining dataset: name: MTEB Tatoeba (nov-eng) type: mteb/tatoeba-bitext-mining config: nov-eng split: test revision: 9080400076fbadbb4c4dcb136ff4eddc40b42553 metrics: - type: accuracy value: 45.13618677042802 - type: f1 value: 38.784818726452976 - type: precision value: 36.65848310789945 - type: recall value: 45.13618677042802 - task: type: BitextMining dataset: name: MTEB Tatoeba (gsw-eng) type: mteb/tatoeba-bitext-mining config: gsw-eng split: test revision: 9080400076fbadbb4c4dcb136ff4eddc40b42553 metrics: - type: accuracy value: 23.076923076923077 - type: f1 value: 17.501757501757503 - type: precision value: 16.06289721674337 - type: recall value: 23.076923076923077 - task: type: BitextMining dataset: name: MTEB Tatoeba (nds-eng) type: mteb/tatoeba-bitext-mining config: nds-eng split: test revision: 9080400076fbadbb4c4dcb136ff4eddc40b42553 metrics: - type: accuracy value: 15.8 - type: f1 value: 11.834682187321722 - type: precision value: 10.871016304088595 - type: recall value: 15.8 - task: type: BitextMining dataset: name: MTEB Tatoeba (ukr-eng) type: mteb/tatoeba-bitext-mining config: ukr-eng split: test revision: 9080400076fbadbb4c4dcb136ff4eddc40b42553 metrics: - type: accuracy value: 7.3 - type: f1 value: 4.929314970921539 - type: precision value: 4.427714750128542 - type: recall value: 7.3 - task: type: BitextMining dataset: name: MTEB Tatoeba (uzb-eng) type: mteb/tatoeba-bitext-mining config: uzb-eng split: test revision: 9080400076fbadbb4c4dcb136ff4eddc40b42553 metrics: - type: accuracy value: 5.14018691588785 - type: f1 value: 2.543797914741945 - type: precision value: 2.1476927403586066 - type: recall value: 5.14018691588785 - task: type: BitextMining dataset: name: MTEB Tatoeba (lit-eng) type: mteb/tatoeba-bitext-mining config: lit-eng split: test revision: 9080400076fbadbb4c4dcb136ff4eddc40b42553 metrics: - type: accuracy value: 5.0 - type: f1 value: 3.173243817101591 - type: precision value: 2.8643206769285485 - type: recall value: 5.0 - task: type: BitextMining dataset: name: MTEB Tatoeba (ina-eng) type: mteb/tatoeba-bitext-mining config: ina-eng split: test revision: 9080400076fbadbb4c4dcb136ff4eddc40b42553 metrics: - type: accuracy value: 69.5 - type: f1 value: 63.89614902641219 - type: precision value: 61.628650793650785 - type: recall value: 69.5 - task: type: BitextMining dataset: name: MTEB Tatoeba (lfn-eng) type: mteb/tatoeba-bitext-mining config: lfn-eng split: test revision: 9080400076fbadbb4c4dcb136ff4eddc40b42553 metrics: - type: accuracy value: 41.8 - type: f1 value: 37.523909714712914 - type: precision value: 36.054581750900766 - type: recall value: 41.8 - task: type: BitextMining dataset: name: MTEB Tatoeba (zsm-eng) type: mteb/tatoeba-bitext-mining config: zsm-eng split: test revision: 9080400076fbadbb4c4dcb136ff4eddc40b42553 metrics: - type: accuracy value: 79.2 - type: f1 value: 74.88805555555554 - type: precision value: 73.05083333333333 - type: recall value: 79.2 - task: type: BitextMining dataset: name: MTEB Tatoeba (ita-eng) type: mteb/tatoeba-bitext-mining config: ita-eng split: test revision: 9080400076fbadbb4c4dcb136ff4eddc40b42553 metrics: - type: accuracy value: 43.5 - type: f1 value: 37.28660019590605 - type: precision value: 35.18067447433519 - type: recall value: 43.5 - task: type: BitextMining dataset: name: MTEB Tatoeba (cmn-eng) type: mteb/tatoeba-bitext-mining config: cmn-eng split: test revision: 9080400076fbadbb4c4dcb136ff4eddc40b42553 metrics: - type: accuracy value: 94.5 - type: f1 value: 92.95 - type: precision value: 92.2 - type: recall value: 94.5 - task: type: BitextMining dataset: name: MTEB Tatoeba (lvs-eng) type: mteb/tatoeba-bitext-mining config: lvs-eng split: test revision: 9080400076fbadbb4c4dcb136ff4eddc40b42553 metrics: - type: accuracy value: 5.2 - type: f1 value: 3.5297755651484026 - type: precision value: 3.190013722690584 - type: recall value: 5.2 - task: type: BitextMining dataset: name: MTEB Tatoeba (glg-eng) type: mteb/tatoeba-bitext-mining config: glg-eng split: test revision: 9080400076fbadbb4c4dcb136ff4eddc40b42553 metrics: - type: accuracy value: 74.7 - type: f1 value: 69.2602380952381 - type: precision value: 67.03261904761905 - type: recall value: 74.7 - task: type: BitextMining dataset: name: MTEB Tatoeba (ceb-eng) type: mteb/tatoeba-bitext-mining config: ceb-eng split: test revision: 9080400076fbadbb4c4dcb136ff4eddc40b42553 metrics: - type: accuracy value: 8.0 - type: f1 value: 5.639611303143687 - type: precision value: 5.209856824277429 - type: recall value: 8.0 - task: type: BitextMining dataset: name: MTEB Tatoeba (bre-eng) type: mteb/tatoeba-bitext-mining config: bre-eng split: test revision: 9080400076fbadbb4c4dcb136ff4eddc40b42553 metrics: - type: accuracy value: 6.1 - type: f1 value: 3.847611167634209 - type: precision value: 3.3324923687423693 - type: recall value: 6.1 - task: type: BitextMining dataset: name: MTEB Tatoeba (ben-eng) type: mteb/tatoeba-bitext-mining config: ben-eng split: test revision: 9080400076fbadbb4c4dcb136ff4eddc40b42553 metrics: - type: accuracy value: 75.5 - type: f1 value: 70.14214285714286 - type: precision value: 67.88761904761904 - type: recall value: 75.5 - task: type: BitextMining dataset: name: MTEB Tatoeba (swg-eng) type: mteb/tatoeba-bitext-mining config: swg-eng split: test revision: 9080400076fbadbb4c4dcb136ff4eddc40b42553 metrics: - type: accuracy value: 20.535714285714285 - type: f1 value: 16.437074829931973 - type: precision value: 15.459837781266353 - type: recall value: 20.535714285714285 - task: type: BitextMining dataset: name: MTEB Tatoeba (arq-eng) type: mteb/tatoeba-bitext-mining config: arq-eng split: test revision: 9080400076fbadbb4c4dcb136ff4eddc40b42553 metrics: - type: accuracy value: 21.405049396267835 - type: f1 value: 16.162968480476714 - type: precision value: 14.506603642481391 - type: recall value: 21.405049396267835 - task: type: BitextMining dataset: name: MTEB Tatoeba (kab-eng) type: mteb/tatoeba-bitext-mining config: kab-eng split: test revision: 9080400076fbadbb4c4dcb136ff4eddc40b42553 metrics: - type: accuracy value: 1.4000000000000001 - type: f1 value: 0.8861559696342305 - type: precision value: 0.7898232323232323 - type: recall value: 1.4000000000000001 - task: type: BitextMining dataset: name: MTEB Tatoeba (fra-eng) type: mteb/tatoeba-bitext-mining config: fra-eng split: test revision: 9080400076fbadbb4c4dcb136ff4eddc40b42553 metrics: - type: accuracy value: 93.5 - type: f1 value: 91.65333333333334 - type: precision value: 90.80833333333332 - type: recall value: 93.5 - task: type: BitextMining dataset: name: MTEB Tatoeba (por-eng) type: mteb/tatoeba-bitext-mining config: por-eng split: test revision: 9080400076fbadbb4c4dcb136ff4eddc40b42553 metrics: - type: accuracy value: 93.8 - type: f1 value: 92.08333333333333 - type: precision value: 91.23333333333333 - type: recall value: 93.8 - task: type: BitextMining dataset: name: MTEB Tatoeba (tat-eng) type: mteb/tatoeba-bitext-mining config: tat-eng split: test revision: 9080400076fbadbb4c4dcb136ff4eddc40b42553 metrics: - type: accuracy value: 1.3 - type: f1 value: 0.9654912597950575 - type: precision value: 0.911237853823405 - type: recall value: 1.3 - task: type: BitextMining dataset: name: MTEB Tatoeba (oci-eng) type: mteb/tatoeba-bitext-mining config: oci-eng split: test revision: 9080400076fbadbb4c4dcb136ff4eddc40b42553 metrics: - type: accuracy value: 35.5 - type: f1 value: 29.385868020868024 - type: precision value: 27.38218614718615 - type: recall value: 35.5 - task: type: BitextMining dataset: name: MTEB Tatoeba (pol-eng) type: mteb/tatoeba-bitext-mining config: pol-eng split: test revision: 9080400076fbadbb4c4dcb136ff4eddc40b42553 metrics: - type: accuracy value: 8.3 - type: f1 value: 5.625495291471218 - type: precision value: 5.006352187769519 - type: recall value: 8.3 - task: type: BitextMining dataset: name: MTEB Tatoeba (war-eng) type: mteb/tatoeba-bitext-mining config: war-eng split: test revision: 9080400076fbadbb4c4dcb136ff4eddc40b42553 metrics: - type: accuracy value: 9.3 - type: f1 value: 7.188871139201601 - type: precision value: 6.68110313042221 - type: recall value: 9.3 - task: type: BitextMining dataset: name: MTEB Tatoeba (aze-eng) type: mteb/tatoeba-bitext-mining config: aze-eng split: test revision: 9080400076fbadbb4c4dcb136ff4eddc40b42553 metrics: - type: accuracy value: 4.9 - type: f1 value: 3.4368196711816386 - type: precision value: 3.1516575755476186 - type: recall value: 4.9 - task: type: BitextMining dataset: name: MTEB Tatoeba (vie-eng) type: mteb/tatoeba-bitext-mining config: vie-eng split: test revision: 9080400076fbadbb4c4dcb136ff4eddc40b42553 metrics: - type: accuracy value: 94.5 - type: f1 value: 92.85666666666667 - type: precision value: 92.07499999999999 - type: recall value: 94.5 - task: type: BitextMining dataset: name: MTEB Tatoeba (nno-eng) type: mteb/tatoeba-bitext-mining config: nno-eng split: test revision: 9080400076fbadbb4c4dcb136ff4eddc40b42553 metrics: - type: accuracy value: 10.9 - type: f1 value: 8.052880589619718 - type: precision value: 7.2833020438680816 - type: recall value: 10.9 - task: type: BitextMining dataset: name: MTEB Tatoeba (cha-eng) type: mteb/tatoeba-bitext-mining config: cha-eng split: test revision: 9080400076fbadbb4c4dcb136ff4eddc40b42553 metrics: - type: accuracy value: 21.897810218978105 - type: f1 value: 16.459096459096457 - type: precision value: 14.99391727493917 - type: recall value: 21.897810218978105 - task: type: BitextMining dataset: name: MTEB Tatoeba (mhr-eng) type: mteb/tatoeba-bitext-mining config: mhr-eng split: test revision: 9080400076fbadbb4c4dcb136ff4eddc40b42553 metrics: - type: accuracy value: 0.8 - type: f1 value: 0.43900258600589265 - type: precision value: 0.42151473277789064 - type: recall value: 0.8 - task: type: BitextMining dataset: name: MTEB Tatoeba (dan-eng) type: mteb/tatoeba-bitext-mining config: dan-eng split: test revision: 9080400076fbadbb4c4dcb136ff4eddc40b42553 metrics: - type: accuracy value: 14.899999999999999 - type: f1 value: 11.403181682754628 - type: precision value: 10.506373051667312 - type: recall value: 14.899999999999999 - task: type: BitextMining dataset: name: MTEB Tatoeba (ell-eng) type: mteb/tatoeba-bitext-mining config: ell-eng split: test revision: 9080400076fbadbb4c4dcb136ff4eddc40b42553 metrics: - type: accuracy value: 1.9 - type: f1 value: 0.8872641689515834 - type: precision value: 0.7857231069685399 - type: recall value: 1.9 - task: type: BitextMining dataset: name: MTEB Tatoeba (amh-eng) type: mteb/tatoeba-bitext-mining config: amh-eng split: test revision: 9080400076fbadbb4c4dcb136ff4eddc40b42553 metrics: - type: accuracy value: 1.1904761904761905 - type: f1 value: 0.20847048496818082 - type: precision value: 0.11904761904761904 - type: recall value: 1.1904761904761905 - task: type: BitextMining dataset: name: MTEB Tatoeba (pam-eng) type: mteb/tatoeba-bitext-mining config: pam-eng split: test revision: 9080400076fbadbb4c4dcb136ff4eddc40b42553 metrics: - type: accuracy value: 5.3 - type: f1 value: 3.784571880595977 - type: precision value: 3.4556477020719782 - type: recall value: 5.3 - task: type: BitextMining dataset: name: MTEB Tatoeba (hsb-eng) type: mteb/tatoeba-bitext-mining config: hsb-eng split: test revision: 9080400076fbadbb4c4dcb136ff4eddc40b42553 metrics: - type: accuracy value: 9.316770186335404 - type: f1 value: 6.80343720685027 - type: precision value: 6.316650292717499 - type: recall value: 9.316770186335404 - task: type: BitextMining dataset: name: MTEB Tatoeba (srp-eng) type: mteb/tatoeba-bitext-mining config: srp-eng split: test revision: 9080400076fbadbb4c4dcb136ff4eddc40b42553 metrics: - type: accuracy value: 5.8999999999999995 - type: f1 value: 4.5486926228313695 - type: precision value: 4.311121913612427 - type: recall value: 5.8999999999999995 - task: type: BitextMining dataset: name: MTEB Tatoeba (epo-eng) type: mteb/tatoeba-bitext-mining config: epo-eng split: test revision: 9080400076fbadbb4c4dcb136ff4eddc40b42553 metrics: - type: accuracy value: 18.099999999999998 - type: f1 value: 13.4170874831821 - type: precision value: 12.178193046524806 - type: recall value: 18.099999999999998 - task: type: BitextMining dataset: name: MTEB Tatoeba (kzj-eng) type: mteb/tatoeba-bitext-mining config: kzj-eng split: test revision: 9080400076fbadbb4c4dcb136ff4eddc40b42553 metrics: - type: accuracy value: 4.3999999999999995 - type: f1 value: 3.3905735425765524 - type: precision value: 3.2588935800436625 - type: recall value: 4.3999999999999995 - task: type: BitextMining dataset: name: MTEB Tatoeba (awa-eng) type: mteb/tatoeba-bitext-mining config: awa-eng split: test revision: 9080400076fbadbb4c4dcb136ff4eddc40b42553 metrics: - type: accuracy value: 37.66233766233766 - type: f1 value: 30.539579468150897 - type: precision value: 28.60288100547841 - type: recall value: 37.66233766233766 - task: type: BitextMining dataset: name: MTEB Tatoeba (fao-eng) type: mteb/tatoeba-bitext-mining config: fao-eng split: test revision: 9080400076fbadbb4c4dcb136ff4eddc40b42553 metrics: - type: accuracy value: 12.213740458015266 - type: f1 value: 8.297822182308039 - type: precision value: 7.463649581970193 - type: recall value: 12.213740458015266 - task: type: BitextMining dataset: name: MTEB Tatoeba (mal-eng) type: mteb/tatoeba-bitext-mining config: mal-eng split: test revision: 9080400076fbadbb4c4dcb136ff4eddc40b42553 metrics: - type: accuracy value: 78.31149927219796 - type: f1 value: 73.35759340126152 - type: precision value: 71.26394953905871 - type: recall value: 78.31149927219796 - task: type: BitextMining dataset: name: MTEB Tatoeba (ile-eng) type: mteb/tatoeba-bitext-mining config: ile-eng split: test revision: 9080400076fbadbb4c4dcb136ff4eddc40b42553 metrics: - type: accuracy value: 51.800000000000004 - type: f1 value: 44.24010323010323 - type: precision value: 41.450707972582975 - type: recall value: 51.800000000000004 - task: type: BitextMining dataset: name: MTEB Tatoeba (bos-eng) type: mteb/tatoeba-bitext-mining config: bos-eng split: test revision: 9080400076fbadbb4c4dcb136ff4eddc40b42553 metrics: - type: accuracy value: 13.27683615819209 - type: f1 value: 9.167320569156727 - type: precision value: 8.200402665583079 - type: recall value: 13.27683615819209 - task: type: BitextMining dataset: name: MTEB Tatoeba (cor-eng) type: mteb/tatoeba-bitext-mining config: cor-eng split: test revision: 9080400076fbadbb4c4dcb136ff4eddc40b42553 metrics: - type: accuracy value: 4.8 - type: f1 value: 3.1268763352790283 - type: precision value: 2.84393718699601 - type: recall value: 4.8 - task: type: BitextMining dataset: name: MTEB Tatoeba (cat-eng) type: mteb/tatoeba-bitext-mining config: cat-eng split: test revision: 9080400076fbadbb4c4dcb136ff4eddc40b42553 metrics: - type: accuracy value: 85.1 - type: f1 value: 81.55 - type: precision value: 79.98166666666665 - type: recall value: 85.1 - task: type: BitextMining dataset: name: MTEB Tatoeba (eus-eng) type: mteb/tatoeba-bitext-mining config: eus-eng split: test revision: 9080400076fbadbb4c4dcb136ff4eddc40b42553 metrics: - type: accuracy value: 48.3 - type: f1 value: 42.347894491129786 - type: precision value: 40.36040404040404 - type: recall value: 48.3 - task: type: BitextMining dataset: name: MTEB Tatoeba (yue-eng) type: mteb/tatoeba-bitext-mining config: yue-eng split: test revision: 9080400076fbadbb4c4dcb136ff4eddc40b42553 metrics: - type: accuracy value: 78.8 - type: f1 value: 74.35484848484847 - type: precision value: 72.43277777777777 - type: recall value: 78.8 - task: type: BitextMining dataset: name: MTEB Tatoeba (swe-eng) type: mteb/tatoeba-bitext-mining config: swe-eng split: test revision: 9080400076fbadbb4c4dcb136ff4eddc40b42553 metrics: - type: accuracy value: 13.900000000000002 - type: f1 value: 10.718252991153888 - type: precision value: 9.835761434404196 - type: recall value: 13.900000000000002 - task: type: BitextMining dataset: name: MTEB Tatoeba (dtp-eng) type: mteb/tatoeba-bitext-mining config: dtp-eng split: test revision: 9080400076fbadbb4c4dcb136ff4eddc40b42553 metrics: - type: accuracy value: 4.9 - type: f1 value: 3.371714825002496 - type: precision value: 3.085928254003479 - type: recall value: 4.9 - task: type: BitextMining dataset: name: MTEB Tatoeba (kat-eng) type: mteb/tatoeba-bitext-mining config: kat-eng split: test revision: 9080400076fbadbb4c4dcb136ff4eddc40b42553 metrics: - type: accuracy value: 0.5361930294906166 - type: f1 value: 0.40389703692021933 - type: precision value: 0.40302666854804575 - type: recall value: 0.5361930294906166 - task: type: BitextMining dataset: name: MTEB Tatoeba (jpn-eng) type: mteb/tatoeba-bitext-mining config: jpn-eng split: test revision: 9080400076fbadbb4c4dcb136ff4eddc40b42553 metrics: - type: accuracy value: 55.300000000000004 - type: f1 value: 48.83353113553113 - type: precision value: 46.48630659536542 - type: recall value: 55.300000000000004 - task: type: BitextMining dataset: name: MTEB Tatoeba (csb-eng) type: mteb/tatoeba-bitext-mining config: csb-eng split: test revision: 9080400076fbadbb4c4dcb136ff4eddc40b42553 metrics: - type: accuracy value: 8.300395256916996 - type: f1 value: 5.261552988548536 - type: precision value: 4.724388115499655 - type: recall value: 8.300395256916996 - task: type: BitextMining dataset: name: MTEB Tatoeba (xho-eng) type: mteb/tatoeba-bitext-mining config: xho-eng split: test revision: 9080400076fbadbb4c4dcb136ff4eddc40b42553 metrics: - type: accuracy value: 8.450704225352112 - type: f1 value: 4.829974470478787 - type: precision value: 4.337585798478816 - type: recall value: 8.450704225352112 - task: type: BitextMining dataset: name: MTEB Tatoeba (orv-eng) type: mteb/tatoeba-bitext-mining config: orv-eng split: test revision: 9080400076fbadbb4c4dcb136ff4eddc40b42553 metrics: - type: accuracy value: 1.0778443113772456 - type: f1 value: 0.5373251562068135 - type: precision value: 0.5107640721914694 - type: recall value: 1.0778443113772456 - task: type: BitextMining dataset: name: MTEB Tatoeba (ind-eng) type: mteb/tatoeba-bitext-mining config: ind-eng split: test revision: 9080400076fbadbb4c4dcb136ff4eddc40b42553 metrics: - type: accuracy value: 88.5 - type: f1 value: 85.46333333333334 - type: precision value: 84.1 - type: recall value: 88.5 - task: type: BitextMining dataset: name: MTEB Tatoeba (tuk-eng) type: mteb/tatoeba-bitext-mining config: tuk-eng split: test revision: 9080400076fbadbb4c4dcb136ff4eddc40b42553 metrics: - type: accuracy value: 5.41871921182266 - type: f1 value: 2.8063639248802965 - type: precision value: 2.2699550039451513 - type: recall value: 5.41871921182266 - task: type: BitextMining dataset: name: MTEB Tatoeba (max-eng) type: mteb/tatoeba-bitext-mining config: max-eng split: test revision: 9080400076fbadbb4c4dcb136ff4eddc40b42553 metrics: - type: accuracy value: 40.49295774647887 - type: f1 value: 33.455454951933824 - type: precision value: 31.4339393461183 - type: recall value: 40.49295774647887 - task: type: BitextMining dataset: name: MTEB Tatoeba (swh-eng) type: mteb/tatoeba-bitext-mining config: swh-eng split: test revision: 9080400076fbadbb4c4dcb136ff4eddc40b42553 metrics: - type: accuracy value: 18.974358974358974 - type: f1 value: 14.517578026097205 - type: precision value: 13.3510327465177 - type: recall value: 18.974358974358974 - task: type: BitextMining dataset: name: MTEB Tatoeba (hin-eng) type: mteb/tatoeba-bitext-mining config: hin-eng split: test revision: 9080400076fbadbb4c4dcb136ff4eddc40b42553 metrics: - type: accuracy value: 88.5 - type: f1 value: 85.34666666666666 - type: precision value: 83.89999999999999 - type: recall value: 88.5 - task: type: BitextMining dataset: name: MTEB Tatoeba (dsb-eng) type: mteb/tatoeba-bitext-mining config: dsb-eng split: test revision: 9080400076fbadbb4c4dcb136ff4eddc40b42553 metrics: - type: accuracy value: 8.1419624217119 - type: f1 value: 5.830783012763732 - type: precision value: 5.4408714223116545 - type: recall value: 8.1419624217119 - task: type: BitextMining dataset: name: MTEB Tatoeba (ber-eng) type: mteb/tatoeba-bitext-mining config: ber-eng split: test revision: 9080400076fbadbb4c4dcb136ff4eddc40b42553 metrics: - type: accuracy value: 5.800000000000001 - type: f1 value: 3.9245687335866406 - type: precision value: 3.5535667824951584 - type: recall value: 5.800000000000001 - task: type: BitextMining dataset: name: MTEB Tatoeba (tam-eng) type: mteb/tatoeba-bitext-mining config: tam-eng split: test revision: 9080400076fbadbb4c4dcb136ff4eddc40b42553 metrics: - type: accuracy value: 68.40390879478826 - type: f1 value: 62.25738069386277 - type: precision value: 60.10935318752908 - type: recall value: 68.40390879478826 - task: type: BitextMining dataset: name: MTEB Tatoeba (slk-eng) type: mteb/tatoeba-bitext-mining config: slk-eng split: test revision: 9080400076fbadbb4c4dcb136ff4eddc40b42553 metrics: - type: accuracy value: 7.1 - type: f1 value: 5.4876787833762135 - type: precision value: 5.126663482701374 - type: recall value: 7.1 - task: type: BitextMining dataset: name: MTEB Tatoeba (tgl-eng) type: mteb/tatoeba-bitext-mining config: tgl-eng split: test revision: 9080400076fbadbb4c4dcb136ff4eddc40b42553 metrics: - type: accuracy value: 8.9 - type: f1 value: 6.519531004112515 - type: precision value: 5.987707404636394 - type: recall value: 8.9 - task: type: BitextMining dataset: name: MTEB Tatoeba (ast-eng) type: mteb/tatoeba-bitext-mining config: ast-eng split: test revision: 9080400076fbadbb4c4dcb136ff4eddc40b42553 metrics: - type: accuracy value: 66.92913385826772 - type: f1 value: 59.96062992125984 - type: precision value: 57.13348331458567 - type: recall value: 66.92913385826772 - task: type: BitextMining dataset: name: MTEB Tatoeba (mkd-eng) type: mteb/tatoeba-bitext-mining config: mkd-eng split: test revision: 9080400076fbadbb4c4dcb136ff4eddc40b42553 metrics: - type: accuracy value: 4.3 - type: f1 value: 2.765805343607201 - type: precision value: 2.5247851243177144 - type: recall value: 4.3 - task: type: BitextMining dataset: name: MTEB Tatoeba (khm-eng) type: mteb/tatoeba-bitext-mining config: khm-eng split: test revision: 9080400076fbadbb4c4dcb136ff4eddc40b42553 metrics: - type: accuracy value: 0.41551246537396125 - type: f1 value: 0.1497838495760933 - type: precision value: 0.14429034844729552 - type: recall value: 0.41551246537396125 - task: type: BitextMining dataset: name: MTEB Tatoeba (ces-eng) type: mteb/tatoeba-bitext-mining config: ces-eng split: test revision: 9080400076fbadbb4c4dcb136ff4eddc40b42553 metrics: - type: accuracy value: 5.800000000000001 - type: f1 value: 3.761224995516873 - type: precision value: 3.2689210175496086 - type: recall value: 5.800000000000001 - task: type: BitextMining dataset: name: MTEB Tatoeba (tzl-eng) type: mteb/tatoeba-bitext-mining config: tzl-eng split: test revision: 9080400076fbadbb4c4dcb136ff4eddc40b42553 metrics: - type: accuracy value: 16.346153846153847 - type: f1 value: 14.524291497975709 - type: precision value: 13.995726495726496 - type: recall value: 16.346153846153847 - task: type: BitextMining dataset: name: MTEB Tatoeba (urd-eng) type: mteb/tatoeba-bitext-mining config: urd-eng split: test revision: 9080400076fbadbb4c4dcb136ff4eddc40b42553 metrics: - type: accuracy value: 67.80000000000001 - type: f1 value: 61.615800865800864 - type: precision value: 59.12333333333334 - type: recall value: 67.80000000000001 - task: type: BitextMining dataset: name: MTEB Tatoeba (ara-eng) type: mteb/tatoeba-bitext-mining config: ara-eng split: test revision: 9080400076fbadbb4c4dcb136ff4eddc40b42553 metrics: - type: accuracy value: 83.8 - type: f1 value: 80.08857142857143 - type: precision value: 78.46666666666667 - type: recall value: 83.8 - task: type: BitextMining dataset: name: MTEB Tatoeba (kor-eng) type: mteb/tatoeba-bitext-mining config: kor-eng split: test revision: 9080400076fbadbb4c4dcb136ff4eddc40b42553 metrics: - type: accuracy value: 4.2 - type: f1 value: 2.6507751588440254 - type: precision value: 2.335273168189835 - type: recall value: 4.2 - task: type: BitextMining dataset: name: MTEB Tatoeba (yid-eng) type: mteb/tatoeba-bitext-mining config: yid-eng split: test revision: 9080400076fbadbb4c4dcb136ff4eddc40b42553 metrics: - type: accuracy value: 0.4716981132075472 - type: f1 value: 0.19293763102725367 - type: precision value: 0.1622040325564188 - type: recall value: 0.4716981132075472 - task: type: BitextMining dataset: name: MTEB Tatoeba (fin-eng) type: mteb/tatoeba-bitext-mining config: fin-eng split: test revision: 9080400076fbadbb4c4dcb136ff4eddc40b42553 metrics: - type: accuracy value: 4.9 - type: f1 value: 3.5001791555125235 - type: precision value: 3.277940522301425 - type: recall value: 4.9 - task: type: BitextMining dataset: name: MTEB Tatoeba (tha-eng) type: mteb/tatoeba-bitext-mining config: tha-eng split: test revision: 9080400076fbadbb4c4dcb136ff4eddc40b42553 metrics: - type: accuracy value: 0.9124087591240875 - type: f1 value: 0.5083420229405631 - type: precision value: 0.4674562188049969 - type: recall value: 0.9124087591240875 - task: type: BitextMining dataset: name: MTEB Tatoeba (wuu-eng) type: mteb/tatoeba-bitext-mining config: wuu-eng split: test revision: 9080400076fbadbb4c4dcb136ff4eddc40b42553 metrics: - type: accuracy value: 79.4 - type: f1 value: 74.62333333333333 - type: precision value: 72.52333333333334 - type: recall value: 79.4 - task: type: Clustering dataset: name: MTEB ThuNewsClusteringP2P type: C-MTEB/ThuNewsClusteringP2P config: default split: test revision: None metrics: - type: v_measure value: 51.02719281751054 - task: type: Clustering dataset: name: MTEB ThuNewsClusteringS2S type: C-MTEB/ThuNewsClusteringS2S config: default split: test revision: None metrics: - type: v_measure value: 48.31885339280247 - task: type: Retrieval dataset: name: MTEB Touche2020 type: webis-touche2020 config: default split: test revision: None metrics: - type: map_at_1 value: 2.426 - type: map_at_10 value: 9.029 - type: map_at_100 value: 14.299999999999999 - type: map_at_1000 value: 15.798000000000002 - type: map_at_3 value: 4.626 - type: map_at_5 value: 6.221 - type: mrr_at_1 value: 32.653 - type: mrr_at_10 value: 46.608 - type: mrr_at_100 value: 47.195 - type: mrr_at_1000 value: 47.208 - type: mrr_at_3 value: 41.837 - type: mrr_at_5 value: 43.673 - type: ndcg_at_1 value: 29.592000000000002 - type: ndcg_at_10 value: 23.354 - type: ndcg_at_100 value: 33.875 - type: ndcg_at_1000 value: 45.369 - type: ndcg_at_3 value: 25.734 - type: ndcg_at_5 value: 23.873 - type: precision_at_1 value: 32.653 - type: precision_at_10 value: 21.224 - type: precision_at_100 value: 7.122000000000001 - type: precision_at_1000 value: 1.459 - type: precision_at_3 value: 26.531 - type: precision_at_5 value: 24.082 - type: recall_at_1 value: 2.426 - type: recall_at_10 value: 15.622 - type: recall_at_100 value: 44.318999999999996 - type: recall_at_1000 value: 78.632 - type: recall_at_3 value: 5.798 - type: recall_at_5 value: 8.927 - task: type: Classification dataset: name: MTEB ToxicConversationsClassification type: mteb/toxic_conversations_50k config: default split: test revision: d7c0de2777da35d6aae2200a62c6e0e5af397c4c metrics: - type: accuracy value: 67.9606 - type: ap value: 12.665547829558923 - type: f1 value: 52.10043478110198 - task: type: Classification dataset: name: MTEB TweetSentimentExtractionClassification type: mteb/tweet_sentiment_extraction config: default split: test revision: d604517c81ca91fe16a244d1248fc021f9ecee7a metrics: - type: accuracy value: 59.601018675721576 - type: f1 value: 59.91486569196274 - task: type: Clustering dataset: name: MTEB TwentyNewsgroupsClustering type: mteb/twentynewsgroups-clustering config: default split: test revision: 6125ec4e24fa026cec8a478383ee943acfbd5449 metrics: - type: v_measure value: 37.881729581540135 - task: type: PairClassification dataset: name: MTEB TwitterSemEval2015 type: mteb/twittersemeval2015-pairclassification config: default split: test revision: 70970daeab8776df92f5ea462b6173c0b46fd2d1 metrics: - type: cos_sim_accuracy value: 83.68003814746379 - type: cos_sim_ap value: 65.95659315362258 - type: cos_sim_f1 value: 61.94669484560291 - type: cos_sim_precision value: 55.80706579225725 - type: cos_sim_recall value: 69.6042216358839 - type: dot_accuracy value: 81.97532335936103 - type: dot_ap value: 58.99091918849294 - type: dot_f1 value: 57.098765432098766 - type: dot_precision value: 51.8990073370738 - type: dot_recall value: 63.45646437994723 - type: euclidean_accuracy value: 83.18531322644095 - type: euclidean_ap value: 64.5631762106556 - type: euclidean_f1 value: 61.150808574652125 - type: euclidean_precision value: 58.25173155003582 - type: euclidean_recall value: 64.35356200527704 - type: manhattan_accuracy value: 83.14358943792097 - type: manhattan_ap value: 64.73090464118813 - type: manhattan_f1 value: 61.228384019081695 - type: manhattan_precision value: 55.86507072905332 - type: manhattan_recall value: 67.73087071240106 - type: max_accuracy value: 83.68003814746379 - type: max_ap value: 65.95659315362258 - type: max_f1 value: 61.94669484560291 - task: type: PairClassification dataset: name: MTEB TwitterURLCorpus type: mteb/twitterurlcorpus-pairclassification config: default split: test revision: 8b6510b0b1fa4e4c4f879467980e9be563ec1cdf metrics: - type: cos_sim_accuracy value: 88.7161873714441 - type: cos_sim_ap value: 85.10870963707444 - type: cos_sim_f1 value: 77.88396923766146 - type: cos_sim_precision value: 75.59791274097695 - type: cos_sim_recall value: 80.31259624268556 - type: dot_accuracy value: 87.74595412737222 - type: dot_ap value: 81.22910623983562 - type: dot_f1 value: 76.08511889448344 - type: dot_precision value: 72.78672385908163 - type: dot_recall value: 79.69664305512781 - type: euclidean_accuracy value: 88.13404742500097 - type: euclidean_ap value: 84.03032098854915 - type: euclidean_f1 value: 76.3909440662918 - type: euclidean_precision value: 73.51894047279977 - type: euclidean_recall value: 79.49645826917154 - type: manhattan_accuracy value: 88.13598789148911 - type: manhattan_ap value: 84.13258714083858 - type: manhattan_f1 value: 76.44922164566346 - type: manhattan_precision value: 73.70640365923384 - type: manhattan_recall value: 79.40406529103788 - type: max_accuracy value: 88.7161873714441 - type: max_ap value: 85.10870963707444 - type: max_f1 value: 77.88396923766146 - task: type: Retrieval dataset: name: MTEB VideoRetrieval type: C-MTEB/VideoRetrieval config: default split: dev revision: None metrics: - type: map_at_1 value: 41.8 - type: map_at_10 value: 50.57000000000001 - type: map_at_100 value: 51.271 - type: map_at_1000 value: 51.31099999999999 - type: map_at_3 value: 48.283 - type: map_at_5 value: 49.633 - type: mrr_at_1 value: 41.8 - type: mrr_at_10 value: 50.57000000000001 - type: mrr_at_100 value: 51.271 - type: mrr_at_1000 value: 51.31099999999999 - type: mrr_at_3 value: 48.283 - type: mrr_at_5 value: 49.633 - type: ndcg_at_1 value: 41.8 - type: ndcg_at_10 value: 55.071999999999996 - type: ndcg_at_100 value: 58.604 - type: ndcg_at_1000 value: 59.679 - type: ndcg_at_3 value: 50.394000000000005 - type: ndcg_at_5 value: 52.825 - type: precision_at_1 value: 41.8 - type: precision_at_10 value: 6.93 - type: precision_at_100 value: 0.861 - type: precision_at_1000 value: 0.095 - type: precision_at_3 value: 18.833 - type: precision_at_5 value: 12.479999999999999 - type: recall_at_1 value: 41.8 - type: recall_at_10 value: 69.3 - type: recall_at_100 value: 86.1 - type: recall_at_1000 value: 94.6 - type: recall_at_3 value: 56.49999999999999 - type: recall_at_5 value: 62.4 - task: type: Classification dataset: name: MTEB Waimai type: C-MTEB/waimai-classification config: default split: test revision: None metrics: - type: accuracy value: 80.65 - type: ap value: 59.927241826012924 - type: f1 value: 78.72456184299979 --- # Model Card for udever-bloom <!-- Provide a quick summary of what the model is/does. --> `udever-bloom-560m` is finetuned from [bigscience/bloom-560m](https://huggingface.co/bigscience/bloom-560m) via [BitFit](https://aclanthology.org/2022.acl-short.1/) on MS MARCO Passage Ranking, SNLI and MultiNLI data. It is a universal embedding model across tasks, natural and programming languages. (From the technical view, `udever` is merely with some minor improvements to `sgpt-bloom`) <img width="338" height="259" src="https://user-images.githubusercontent.com/26690193/277643721-cdb7f227-cae5-40e1-b6e1-a201bde00339.png" /> ## Model Details ### Model Description - **Developed by:** Alibaba Group - **Model type:** Transformer-based Language Model (decoder-only) - **Language(s) (NLP):** Multiple; see [bloom training data](https://huggingface.co/bigscience/bloom-560m#training-data) - **Finetuned from model :** [bigscience/bloom-560m](https://huggingface.co/bigscience/bloom-560m) ### Model Sources <!-- Provide the basic links for the model. --> - **Repository:** [github.com/izhx/uni-rep](https://github.com/izhx/uni-rep) - **Paper :** [Language Models are Universal Embedders](https://arxiv.org/pdf/2310.08232.pdf) - **Training Date :** 2023-06 ### Checkpoints - [udever-bloom-560m](https://huggingface.co/izhx/udever-bloom-560m) - [udever-bloom-1b1](https://huggingface.co/izhx/udever-bloom-1b1) - [udever-bloom-3b](https://huggingface.co/izhx/udever-bloom-3b) - [udever-bloom-7b1](https://huggingface.co/izhx/udever-bloom-7b1) On ModelScope / 魔搭社区: [udever-bloom-560m](https://modelscope.cn/models/damo/udever-bloom-560m), [udever-bloom-1b1](https://modelscope.cn/models/damo/udever-bloom-1b1), [udever-bloom-3b](https://modelscope.cn/models/damo/udever-bloom-3b), [udever-bloom-7b1](https://modelscope.cn/models/damo/udever-bloom-7b1) ## How to Get Started with the Model Use the code below to get started with the model. ```python import torch from transformers import AutoTokenizer, BloomModel tokenizer = AutoTokenizer.from_pretrained('izhx/udever-bloom-560m') model = BloomModel.from_pretrained('izhx/udever-bloom-560m') boq, eoq, bod, eod = '[BOQ]', '[EOQ]', '[BOD]', '[EOD]' eoq_id, eod_id = tokenizer.convert_tokens_to_ids([eoq, eod]) if tokenizer.padding_side != 'left': print('!!!', tokenizer.padding_side) tokenizer.padding_side = 'left' def encode(texts: list, is_query: bool = True, max_length=300): bos = boq if is_query else bod eos_id = eoq_id if is_query else eod_id texts = [bos + t for t in texts] encoding = tokenizer( texts, truncation=True, max_length=max_length - 1, padding=True ) for ids, mask in zip(encoding['input_ids'], encoding['attention_mask']): ids.append(eos_id) mask.append(1) inputs = tokenizer.pad(encoding, return_tensors='pt') with torch.inference_mode(): outputs = model(**inputs) embeds = outputs.last_hidden_state[:, -1] return embeds encode(['I am Bert', 'You are Elmo']) ``` ## Training Details ### Training Data <!-- This should link to a Dataset Card, perhaps with a short stub of information on what the training data is all about as well as documentation related to data pre-processing or additional filtering. --> - MS MARCO Passage Ranking, retrieved by (https://github.com/UKPLab/sentence-transformers/blob/master/examples/training/ms_marco/train_bi-encoder_mnrl.py#L86) - SNLI and MultiNLI (https://sbert.net/datasets/AllNLI.tsv.gz) ### Training Procedure <!-- This relates heavily to the Technical Specifications. Content here should link to that section when it is relevant to the training procedure. --> #### Preprocessing MS MARCO hard negatives provided by (https://github.com/UKPLab/sentence-transformers/blob/master/examples/training/ms_marco/train_bi-encoder_mnrl.py#L86). Negatives for SNLI and MultiNLI are randomly sampled. #### Training Hyperparameters - **Training regime:** tf32, BitFit - **Batch size:** 1024 - **Epochs:** 3 - **Optimizer:** AdamW - **Learning rate:** 1e-4 - **Scheduler:** constant with warmup. - **Warmup:** 0.25 epoch ## Evaluation ### Table 1: Massive Text Embedding Benchmark [MTEB](https://huggingface.co/spaces/mteb/leaderboard) | MTEB | Avg. | Class. | Clust. | PairClass. | Rerank. | Retr. | STS | Summ. | |-----------------------------|--------------|--------------|--------------|--------------|--------------|--------------|--------------|--------| | #Datasets ➡️ | 56 | 12 | 11 | 3 | 4 | 15 | 10 | 1 | || | bge-large-en-v1.5 | **64.23** | **75.97** | 46.08| **87.12** | **60.03** | **54.29** | 83.11| 31.61 | | bge-base-en-v1.5 | 63.55| 75.53| 45.77| 86.55| 58.86| 53.25| 82.4| 31.07 | | gte-large | 63.13| 73.33| **46.84** | 85| 59.13| 52.22| **83.35** | 31.66 | | gte-base | 62.39| 73.01| 46.2| 84.57| 58.61| 51.14| 82.3| 31.17 | | e5-large-v2 | 62.25| 75.24| 44.49| 86.03| 56.61| 50.56| 82.05| 30.19 | | instructor-xl | 61.79| 73.12| 44.74| 86.62| 57.29| 49.26| 83.06| 32.32 | | instructor-large | 61.59| 73.86| 45.29| 85.89| 57.54| 47.57| 83.15| 31.84 | | e5-base-v2 | 61.5 | 73.84| 43.8| 85.73| 55.91| 50.29| 81.05| 30.28 | | e5-large | 61.42| 73.14| 43.33| 85.94| 56.53| 49.99| 82.06| 30.97 | | text-embedding-ada-002 (OpenAI API) | 60.99| 70.93| 45.9 | 84.89| 56.32| 49.25| 80.97| 30.8 | | e5-base | 60.44| 72.63| 42.11| 85.09| 55.7 | 48.75| 80.96| 31.01 | | SGPT-5.8B-msmarco | 58.93| 68.13| 40.34| 82 | 56.56| 50.25| 78.1 | 31.46 | | sgpt-bloom-7b1-msmarco | 57.59| 66.19| 38.93| 81.9 | 55.65| 48.22| 77.74| **33.6** | || | Udever-bloom-560m | 55.80| 68.04| 36.89| 81.05| 52.60| 41.19| 79.93| 32.06 | | Udever-bloom-1b1 | 58.28| 70.18| 39.11| 83.11| 54.28| 45.27| 81.52| 31.10 | | Udever-bloom-3b | 59.86| 71.91| 40.74| 84.06| 54.90| 47.67| 82.37| 30.62 | | Udever-bloom-7b1 | 60.63 | 72.13| 40.81| 85.40| 55.91| 49.34| 83.01| 30.97 | ### Table 2: [CodeSearchNet](https://github.com/github/CodeSearchNet) | CodeSearchNet | Go | Ruby | Python | Java | JS | PHP | Avg. | |-|-|-|-|-|-|-|-| | CodeBERT | 69.3 | 70.6 | 84.0 | 86.8 | 74.8 | 70.6 | 76.0 | | GraphCodeBERT | 84.1 | 73.2 | 87.9 | 75.7 | 71.1 | 72.5 | 77.4 | | cpt-code S | **97.7** | **86.3** | 99.8 | 94.0 | 86.0 | 96.7 | 93.4 | | cpt-code M | 97.5 | 85.5 | **99.9** | **94.4** | **86.5** | **97.2** | **93.5** | | sgpt-bloom-7b1-msmarco | 76.79 | 69.25 | 95.68 | 77.93 | 70.35 | 73.45 | 77.24 | || | Udever-bloom-560m | 75.38 | 66.67 | 96.23 | 78.99 | 69.39 | 73.69 | 76.73 | | Udever-bloom-1b1 | 78.76 | 72.85 | 97.67 | 82.77 | 74.38 | 78.97 | 80.90 | | Udever-bloom-3b | 80.63 | 75.40 | 98.02 | 83.88 | 76.18 | 79.67 | 82.29 | | Udever-bloom-7b1 | 79.37 | 76.59 | 98.38 | 84.68 | 77.49 | 80.03 | 82.76 | ### Table 3: Chinese multi-domain retrieval [Multi-cpr](https://dl.acm.org/doi/10.1145/3477495.3531736) | | | |E-commerce | | Entertainment video | | Medical | | |--|--|--|--|--|--|--|--|--| | Model | Train | Backbone | MRR@10 | Recall@1k | MRR@10 | Recall@1k | MRR@10 | Recall@1k | || | BM25 | - | - | 0.225 | 0.815 | 0.225 | 0.780 | 0.187 | 0.482 | | Doc2Query | - | - | 0.239 | 0.826 | 0.238 | 0.794 | 0.210 | 0.505 | | DPR-1 | In-Domain | BERT | 0.270 | 0.921 | 0.254 | 0.934 | 0.327 | 0.747 | | DPR-2 | In-Domain | BERT-CT | 0.289 | **0.926** | 0.263 | **0.935** | 0.339 | **0.769** | | text-embedding-ada-002 | General | GPT | 0.183 | 0.825 | 0.159 | 0.786 | 0.245 | 0.593 | | sgpt-bloom-7b1-msmarco | General | BLOOM | 0.242 | 0.840 | 0.227 | 0.829 | 0.311 | 0.675 | || | Udever-bloom-560m | General | BLOOM | 0.156 | 0.802 | 0.149 | 0.749 | 0.245 | 0.571 | | Udever-bloom-1b1 | General | BLOOM | 0.244 | 0.863 | 0.208 | 0.815 | 0.241 | 0.557 | | Udever-bloom-3b | General | BLOOM | 0.267 | 0.871 | 0.228 | 0.836 | 0.288 | 0.619 | | Udever-bloom-7b1 | General | BLOOM | **0.296** | 0.889 | **0.267** | 0.907 | **0.343** | 0.705 | #### More results refer to [paper](https://arxiv.org/pdf/2310.08232.pdf) section 3. ## Technical Specifications ### Model Architecture and Objective - Model: [bigscience/bloom-560m](https://huggingface.co/bigscience/bloom-560m). - Objective: Constrastive loss with hard negatives (refer to [paper](https://arxiv.org/pdf/2310.08232.pdf) section 2.2). ### Compute Infrastructure - Nvidia A100 SXM4 80GB. - torch 2.0.0, transformers 4.29.2. ## Citation **BibTeX:** ```BibTeX @article{zhang2023language, title={Language Models are Universal Embedders}, author={Zhang, Xin and Li, Zehan and Zhang, Yanzhao and Long, Dingkun and Xie, Pengjun and Zhang, Meishan and Zhang, Min}, journal={arXiv preprint arXiv:2310.08232}, year={2023} } ```
[ "SUMMARIZATION" ]
[ "BIOSSES", "SCIFACT" ]
AWS/Titan-text-embeddings-v2
AWS
feature-extraction
[ "transformers", "feature-extraction", "sentence-similarity", "mteb", "en", "fr", "de", "es", "ja", "zh", "hi", "ar", "it", "pt", "sv", "ko", "he", "cs", "tr", "tl", "ru", "nl", "pl", "ta", "mr", "ml", "te", "kn", "vi", "id", "fa", "hu", "el", "ro", "da", "th", "fi", "sk", "uk", "no", "bg", "ca", "sr", "hr", "lt", "sl", "et", "la", "bn", "lv", "ms", "bs", "sq", "az", "gl", "is", "ka", "mk", "eu", "hy", "ne", "ur", "kk", "mn", "be", "uz", "km", "nn", "gu", "my", "cy", "eo", "si", "tt", "sw", "af", "ga", "pa", "ku", "ky", "tg", "or", "lo", "fo", "mt", "so", "lb", "am", "oc", "jv", "ha", "ps", "sa", "fy", "mg", "as", "ba", "br", "tk", "co", "dv", "rw", "ht", "yi", "sd", "zu", "gd", "bo", "ug", "mi", "rm", "xh", "su", "yo", "license:other", "model-index", "region:us" ]
2024-04-30T12:43:01
2024-04-30T22:12:45
148
10
--- language: - en - fr - de - es - ja - zh - hi - ar - it - pt - sv - ko - he - cs - tr - tl - ru - nl - pl - ta - mr - ml - te - kn - vi - id - fa - hu - el - ro - da - th - fi - sk - uk - 'no' - bg - ca - sr - hr - lt - sl - et - la - bn - lv - ms - bs - sq - az - gl - is - ka - mk - eu - hy - ne - ur - kk - mn - be - uz - km - nn - gu - my - cy - eo - si - tt - sw - af - ga - pa - ku - ky - tg - or - lo - fo - mt - so - lb - am - oc - jv - ha - ps - sa - fy - mg - as - ba - br - tk - co - dv - rw - ht - yi - sd - zu - gd - bo - ug - mi - rm - xh - su - yo license: other license_name: amazon-service-terms license_link: https://aws.amazon.com/service-terms/ tags: - feature-extraction - sentence-similarity - mteb inference: false model-index: - name: Titan-text-embeddings-v2 results: - task: type: Classification dataset: name: MTEB AmazonCounterfactualClassification (en) type: mteb/amazon_counterfactual config: en split: test revision: e8379541af4e31359cca9fbcf4b00f2671dba205 metrics: - type: accuracy value: 79.31343283582089 - type: ap value: 43.9465851246623 - type: f1 value: 73.6131343594374 - task: type: Classification dataset: name: MTEB AmazonCounterfactualClassification (de) type: mteb/amazon_counterfactual config: de split: test revision: e8379541af4e31359cca9fbcf4b00f2671dba205 metrics: - type: accuracy value: 70.94218415417559 - type: ap value: 82.30115528468109 - type: f1 value: 69.37963699148699 - task: type: Classification dataset: name: MTEB AmazonCounterfactualClassification (en-ext) type: mteb/amazon_counterfactual config: en-ext split: test revision: e8379541af4e31359cca9fbcf4b00f2671dba205 metrics: - type: accuracy value: 82.29385307346327 - type: ap value: 29.956638709449372 - type: f1 value: 68.88158061498754 - task: type: Classification dataset: name: MTEB AmazonCounterfactualClassification (ja) type: mteb/amazon_counterfactual config: ja split: test revision: e8379541af4e31359cca9fbcf4b00f2671dba205 metrics: - type: accuracy value: 80.06423982869379 - type: ap value: 25.2439835379337 - type: f1 value: 65.53837311569734 - task: type: Classification dataset: name: MTEB AmazonPolarityClassification type: mteb/amazon_polarity config: default split: test revision: e2d317d38cd51312af73b3d32a06d1a08b442046 metrics: - type: accuracy value: 76.66435 - type: ap value: 70.76988138513991 - type: f1 value: 76.54117595647566 - task: type: Classification dataset: name: MTEB AmazonReviewsClassification (en) type: mteb/amazon_reviews_multi config: en split: test revision: 1399c76144fd37290681b995c656ef9b2e06e26d metrics: - type: accuracy value: 35.276 - type: f1 value: 34.90637768461089 - task: type: Classification dataset: name: MTEB AmazonReviewsClassification (de) type: mteb/amazon_reviews_multi config: de split: test revision: 1399c76144fd37290681b995c656ef9b2e06e26d metrics: - type: accuracy value: 38.826 - type: f1 value: 37.71339372044998 - task: type: Classification dataset: name: MTEB AmazonReviewsClassification (es) type: mteb/amazon_reviews_multi config: es split: test revision: 1399c76144fd37290681b995c656ef9b2e06e26d metrics: - type: accuracy value: 39.385999999999996 - type: f1 value: 38.24347249789392 - task: type: Classification dataset: name: MTEB AmazonReviewsClassification (fr) type: mteb/amazon_reviews_multi config: fr split: test revision: 1399c76144fd37290681b995c656ef9b2e06e26d metrics: - type: accuracy value: 39.472 - type: f1 value: 38.37157729490788 - task: type: Classification dataset: name: MTEB AmazonReviewsClassification (ja) type: mteb/amazon_reviews_multi config: ja split: test revision: 1399c76144fd37290681b995c656ef9b2e06e26d metrics: - type: accuracy value: 35.897999999999996 - type: f1 value: 35.187204289589346 - task: type: Classification dataset: name: MTEB AmazonReviewsClassification (zh) type: mteb/amazon_reviews_multi config: zh split: test revision: 1399c76144fd37290681b995c656ef9b2e06e26d metrics: - type: accuracy value: 36.068 - type: f1 value: 35.042441064207175 - task: type: Retrieval dataset: name: MTEB ArguAna type: arguana config: default split: test revision: None metrics: - type: map_at_1 value: 27.027 - type: map_at_10 value: 42.617 - type: map_at_100 value: 43.686 - type: map_at_1000 value: 43.695 - type: map_at_3 value: 37.684 - type: map_at_5 value: 40.532000000000004 - type: mrr_at_1 value: 27.667 - type: mrr_at_10 value: 42.88 - type: mrr_at_100 value: 43.929 - type: mrr_at_1000 value: 43.938 - type: mrr_at_3 value: 37.933 - type: mrr_at_5 value: 40.774 - type: ndcg_at_1 value: 27.027 - type: ndcg_at_10 value: 51.312000000000005 - type: ndcg_at_100 value: 55.696 - type: ndcg_at_1000 value: 55.896 - type: ndcg_at_3 value: 41.124 - type: ndcg_at_5 value: 46.283 - type: precision_at_1 value: 27.027 - type: precision_at_10 value: 7.9159999999999995 - type: precision_at_100 value: 0.979 - type: precision_at_1000 value: 0.099 - type: precision_at_3 value: 17.022000000000002 - type: precision_at_5 value: 12.731 - type: recall_at_1 value: 27.027 - type: recall_at_10 value: 79.161 - type: recall_at_100 value: 97.937 - type: recall_at_1000 value: 99.431 - type: recall_at_3 value: 51.06699999999999 - type: recall_at_5 value: 63.656 - task: type: Clustering dataset: name: MTEB ArxivClusteringP2P type: mteb/arxiv-clustering-p2p config: default split: test revision: a122ad7f3f0291bf49cc6f4d32aa80929df69d5d metrics: - type: v_measure value: 41.775131599226874 - task: type: Clustering dataset: name: MTEB ArxivClusteringS2S type: mteb/arxiv-clustering-s2s config: default split: test revision: f910caf1a6075f7329cdf8c1a6135696f37dbd53 metrics: - type: v_measure value: 34.134214263072494 - task: type: Reranking dataset: name: MTEB AskUbuntuDupQuestions type: mteb/askubuntudupquestions-reranking config: default split: test revision: 2000358ca161889fa9c082cb41daa8dcfb161a54 metrics: - type: map value: 63.2885651257187 - type: mrr value: 76.37712702809655 - task: type: STS dataset: name: MTEB BIOSSES type: mteb/biosses-sts config: default split: test revision: d3fb88f8f02e40887cd149695127462bbcf29b4a metrics: - type: cos_sim_pearson value: 89.53738990667027 - type: cos_sim_spearman value: 87.13210584606783 - type: euclidean_pearson value: 87.33265405736388 - type: euclidean_spearman value: 87.18632394893399 - type: manhattan_pearson value: 87.33673166528312 - type: manhattan_spearman value: 86.9736685010257 - task: type: BitextMining dataset: name: MTEB BUCC (de-en) type: mteb/bucc-bitext-mining config: de-en split: test revision: d51519689f32196a32af33b075a01d0e7c51e252 metrics: - type: accuracy value: 98.32985386221294 - type: f1 value: 98.18371607515658 - type: precision value: 98.1106471816284 - type: recall value: 98.32985386221294 - task: type: BitextMining dataset: name: MTEB BUCC (fr-en) type: mteb/bucc-bitext-mining config: fr-en split: test revision: d51519689f32196a32af33b075a01d0e7c51e252 metrics: - type: accuracy value: 98.20603125687872 - type: f1 value: 98.04461075647515 - type: precision value: 97.96390050627338 - type: recall value: 98.20603125687872 - task: type: BitextMining dataset: name: MTEB BUCC (ru-en) type: mteb/bucc-bitext-mining config: ru-en split: test revision: d51519689f32196a32af33b075a01d0e7c51e252 metrics: - type: accuracy value: 94.8874263941808 - type: f1 value: 94.57568410114305 - type: precision value: 94.42096755570951 - type: recall value: 94.8874263941808 - task: type: BitextMining dataset: name: MTEB BUCC (zh-en) type: mteb/bucc-bitext-mining config: zh-en split: test revision: d51519689f32196a32af33b075a01d0e7c51e252 metrics: - type: accuracy value: 96.78778304370721 - type: f1 value: 96.75267684746358 - type: precision value: 96.73512374934175 - type: recall value: 96.78778304370721 - task: type: Classification dataset: name: MTEB Banking77Classification type: mteb/banking77 config: default split: test revision: 0fd18e25b25c072e09e0d92ab615fda904d66300 metrics: - type: accuracy value: 84.3051948051948 - type: f1 value: 83.97876601554812 - task: type: Clustering dataset: name: MTEB BiorxivClusteringP2P type: mteb/biorxiv-clustering-p2p config: default split: test revision: 65b79d1d13f80053f67aca9498d9402c2d9f1f40 metrics: - type: v_measure value: 35.005716163806575 - task: type: Clustering dataset: name: MTEB BiorxivClusteringS2S type: mteb/biorxiv-clustering-s2s config: default split: test revision: 258694dd0231531bc1fd9de6ceb52a0853c6d908 metrics: - type: v_measure value: 30.999141295578852 - task: type: Retrieval dataset: name: MTEB CQADupstackAndroidRetrieval type: BeIR/cqadupstack config: default split: test revision: None metrics: - type: map_at_1 value: 36.153 - type: map_at_10 value: 48.742000000000004 - type: map_at_100 value: 50.253 - type: map_at_1000 value: 50.373999999999995 - type: map_at_3 value: 45.089 - type: map_at_5 value: 47.08 - type: mrr_at_1 value: 44.635000000000005 - type: mrr_at_10 value: 54.715 - type: mrr_at_100 value: 55.300000000000004 - type: mrr_at_1000 value: 55.337 - type: mrr_at_3 value: 52.527 - type: mrr_at_5 value: 53.76499999999999 - type: ndcg_at_1 value: 44.635000000000005 - type: ndcg_at_10 value: 55.31 - type: ndcg_at_100 value: 60.084 - type: ndcg_at_1000 value: 61.645 - type: ndcg_at_3 value: 50.876999999999995 - type: ndcg_at_5 value: 52.764 - type: precision_at_1 value: 44.635000000000005 - type: precision_at_10 value: 10.687000000000001 - type: precision_at_100 value: 1.66 - type: precision_at_1000 value: 0.212 - type: precision_at_3 value: 24.94 - type: precision_at_5 value: 17.596999999999998 - type: recall_at_1 value: 36.153 - type: recall_at_10 value: 67.308 - type: recall_at_100 value: 87.199 - type: recall_at_1000 value: 96.904 - type: recall_at_3 value: 53.466 - type: recall_at_5 value: 59.512 - type: map_at_1 value: 32.0 - type: map_at_10 value: 43.646 - type: map_at_100 value: 44.933 - type: map_at_1000 value: 45.049 - type: map_at_3 value: 40.333999999999996 - type: map_at_5 value: 42.108000000000004 - type: mrr_at_1 value: 40.382 - type: mrr_at_10 value: 49.738 - type: mrr_at_100 value: 50.331 - type: mrr_at_1000 value: 50.364 - type: mrr_at_3 value: 47.442 - type: mrr_at_5 value: 48.719 - type: ndcg_at_1 value: 40.382 - type: ndcg_at_10 value: 49.808 - type: ndcg_at_100 value: 54.053 - type: ndcg_at_1000 value: 55.753 - type: ndcg_at_3 value: 45.355000000000004 - type: ndcg_at_5 value: 47.215 - type: precision_at_1 value: 40.382 - type: precision_at_10 value: 9.58 - type: precision_at_100 value: 1.488 - type: precision_at_1000 value: 0.192 - type: precision_at_3 value: 22.272 - type: precision_at_5 value: 15.604999999999999 - type: recall_at_1 value: 32.0 - type: recall_at_10 value: 60.839 - type: recall_at_100 value: 78.869 - type: recall_at_1000 value: 89.384 - type: recall_at_3 value: 47.226 - type: recall_at_5 value: 52.864 - type: map_at_1 value: 44.084 - type: map_at_10 value: 56.591 - type: map_at_100 value: 57.533 - type: map_at_1000 value: 57.583 - type: map_at_3 value: 53.356 - type: map_at_5 value: 55.236 - type: mrr_at_1 value: 50.532999999999994 - type: mrr_at_10 value: 59.974000000000004 - type: mrr_at_100 value: 60.557 - type: mrr_at_1000 value: 60.584 - type: mrr_at_3 value: 57.774 - type: mrr_at_5 value: 59.063 - type: ndcg_at_1 value: 50.532999999999994 - type: ndcg_at_10 value: 62.265 - type: ndcg_at_100 value: 65.78 - type: ndcg_at_1000 value: 66.76299999999999 - type: ndcg_at_3 value: 57.154 - type: ndcg_at_5 value: 59.708000000000006 - type: precision_at_1 value: 50.532999999999994 - type: precision_at_10 value: 9.85 - type: precision_at_100 value: 1.247 - type: precision_at_1000 value: 0.13699999999999998 - type: precision_at_3 value: 25.434 - type: precision_at_5 value: 17.279 - type: recall_at_1 value: 44.084 - type: recall_at_10 value: 75.576 - type: recall_at_100 value: 90.524 - type: recall_at_1000 value: 97.38799999999999 - type: recall_at_3 value: 61.792 - type: recall_at_5 value: 68.112 - type: map_at_1 value: 29.203000000000003 - type: map_at_10 value: 38.078 - type: map_at_100 value: 39.144 - type: map_at_1000 value: 39.222 - type: map_at_3 value: 35.278999999999996 - type: map_at_5 value: 36.812 - type: mrr_at_1 value: 31.299 - type: mrr_at_10 value: 39.879 - type: mrr_at_100 value: 40.832 - type: mrr_at_1000 value: 40.891 - type: mrr_at_3 value: 37.513999999999996 - type: mrr_at_5 value: 38.802 - type: ndcg_at_1 value: 31.299 - type: ndcg_at_10 value: 43.047999999999995 - type: ndcg_at_100 value: 48.101 - type: ndcg_at_1000 value: 49.958999999999996 - type: ndcg_at_3 value: 37.778 - type: ndcg_at_5 value: 40.257 - type: precision_at_1 value: 31.299 - type: precision_at_10 value: 6.508 - type: precision_at_100 value: 0.9530000000000001 - type: precision_at_1000 value: 0.11399999999999999 - type: precision_at_3 value: 15.744 - type: precision_at_5 value: 10.893 - type: recall_at_1 value: 29.203000000000003 - type: recall_at_10 value: 56.552 - type: recall_at_100 value: 79.21000000000001 - type: recall_at_1000 value: 92.884 - type: recall_at_3 value: 42.441 - type: recall_at_5 value: 48.399 - type: map_at_1 value: 19.029 - type: map_at_10 value: 28.410000000000004 - type: map_at_100 value: 29.773 - type: map_at_1000 value: 29.887000000000004 - type: map_at_3 value: 25.374000000000002 - type: map_at_5 value: 27.162 - type: mrr_at_1 value: 23.632 - type: mrr_at_10 value: 33.0 - type: mrr_at_100 value: 34.043 - type: mrr_at_1000 value: 34.105999999999995 - type: mrr_at_3 value: 30.245 - type: mrr_at_5 value: 31.830000000000002 - type: ndcg_at_1 value: 23.632 - type: ndcg_at_10 value: 34.192 - type: ndcg_at_100 value: 40.29 - type: ndcg_at_1000 value: 42.753 - type: ndcg_at_3 value: 28.811999999999998 - type: ndcg_at_5 value: 31.46 - type: precision_at_1 value: 23.632 - type: precision_at_10 value: 6.455 - type: precision_at_100 value: 1.095 - type: precision_at_1000 value: 0.14200000000000002 - type: precision_at_3 value: 14.096 - type: precision_at_5 value: 10.448 - type: recall_at_1 value: 19.029 - type: recall_at_10 value: 47.278999999999996 - type: recall_at_100 value: 72.977 - type: recall_at_1000 value: 90.17699999999999 - type: recall_at_3 value: 32.519 - type: recall_at_5 value: 39.156 - type: map_at_1 value: 30.983 - type: map_at_10 value: 42.595 - type: map_at_100 value: 43.906 - type: map_at_1000 value: 44.001000000000005 - type: map_at_3 value: 39.245000000000005 - type: map_at_5 value: 41.14 - type: mrr_at_1 value: 38.114 - type: mrr_at_10 value: 48.181000000000004 - type: mrr_at_100 value: 48.935 - type: mrr_at_1000 value: 48.972 - type: mrr_at_3 value: 45.877 - type: mrr_at_5 value: 47.249 - type: ndcg_at_1 value: 38.114 - type: ndcg_at_10 value: 48.793 - type: ndcg_at_100 value: 54.001999999999995 - type: ndcg_at_1000 value: 55.749 - type: ndcg_at_3 value: 43.875 - type: ndcg_at_5 value: 46.23 - type: precision_at_1 value: 38.114 - type: precision_at_10 value: 8.98 - type: precision_at_100 value: 1.3390000000000002 - type: precision_at_1000 value: 0.166 - type: precision_at_3 value: 21.303 - type: precision_at_5 value: 15.072 - type: recall_at_1 value: 30.983 - type: recall_at_10 value: 61.47 - type: recall_at_100 value: 83.14399999999999 - type: recall_at_1000 value: 94.589 - type: recall_at_3 value: 47.019 - type: recall_at_5 value: 53.445 - type: map_at_1 value: 29.707 - type: map_at_10 value: 40.900999999999996 - type: map_at_100 value: 42.369 - type: map_at_1000 value: 42.455 - type: map_at_3 value: 37.416 - type: map_at_5 value: 39.483000000000004 - type: mrr_at_1 value: 36.301 - type: mrr_at_10 value: 46.046 - type: mrr_at_100 value: 46.922999999999995 - type: mrr_at_1000 value: 46.964 - type: mrr_at_3 value: 43.436 - type: mrr_at_5 value: 45.04 - type: ndcg_at_1 value: 36.301 - type: ndcg_at_10 value: 46.955999999999996 - type: ndcg_at_100 value: 52.712 - type: ndcg_at_1000 value: 54.447 - type: ndcg_at_3 value: 41.643 - type: ndcg_at_5 value: 44.305 - type: precision_at_1 value: 36.301 - type: precision_at_10 value: 8.607 - type: precision_at_100 value: 1.34 - type: precision_at_1000 value: 0.164 - type: precision_at_3 value: 19.901 - type: precision_at_5 value: 14.429 - type: recall_at_1 value: 29.707 - type: recall_at_10 value: 59.559 - type: recall_at_100 value: 83.60499999999999 - type: recall_at_1000 value: 95.291 - type: recall_at_3 value: 44.774 - type: recall_at_5 value: 51.67 - type: map_at_1 value: 29.455416666666668 - type: map_at_10 value: 39.61333333333334 - type: map_at_100 value: 40.85875 - type: map_at_1000 value: 40.96791666666667 - type: map_at_3 value: 36.48874999999999 - type: map_at_5 value: 38.24341666666667 - type: mrr_at_1 value: 34.80258333333334 - type: mrr_at_10 value: 43.783 - type: mrr_at_100 value: 44.591833333333334 - type: mrr_at_1000 value: 44.64208333333333 - type: mrr_at_3 value: 41.38974999999999 - type: mrr_at_5 value: 42.74566666666667 - type: ndcg_at_1 value: 34.80258333333334 - type: ndcg_at_10 value: 45.2705 - type: ndcg_at_100 value: 50.31224999999999 - type: ndcg_at_1000 value: 52.27916666666667 - type: ndcg_at_3 value: 40.2745 - type: ndcg_at_5 value: 42.61575 - type: precision_at_1 value: 34.80258333333334 - type: precision_at_10 value: 7.97075 - type: precision_at_100 value: 1.2400000000000002 - type: precision_at_1000 value: 0.1595 - type: precision_at_3 value: 18.627583333333337 - type: precision_at_5 value: 13.207000000000003 - type: recall_at_1 value: 29.455416666666668 - type: recall_at_10 value: 57.66091666666665 - type: recall_at_100 value: 79.51966666666665 - type: recall_at_1000 value: 93.01883333333333 - type: recall_at_3 value: 43.580416666666665 - type: recall_at_5 value: 49.7025 - type: map_at_1 value: 27.569 - type: map_at_10 value: 34.73 - type: map_at_100 value: 35.708 - type: map_at_1000 value: 35.808 - type: map_at_3 value: 32.62 - type: map_at_5 value: 33.556999999999995 - type: mrr_at_1 value: 31.135 - type: mrr_at_10 value: 37.833 - type: mrr_at_100 value: 38.68 - type: mrr_at_1000 value: 38.749 - type: mrr_at_3 value: 35.915 - type: mrr_at_5 value: 36.751 - type: ndcg_at_1 value: 31.135 - type: ndcg_at_10 value: 39.047 - type: ndcg_at_100 value: 43.822 - type: ndcg_at_1000 value: 46.249 - type: ndcg_at_3 value: 35.115 - type: ndcg_at_5 value: 36.49 - type: precision_at_1 value: 31.135 - type: precision_at_10 value: 6.058 - type: precision_at_100 value: 0.923 - type: precision_at_1000 value: 0.121 - type: precision_at_3 value: 15.031 - type: precision_at_5 value: 10.030999999999999 - type: recall_at_1 value: 27.569 - type: recall_at_10 value: 49.332 - type: recall_at_100 value: 70.967 - type: recall_at_1000 value: 88.876 - type: recall_at_3 value: 37.858999999999995 - type: recall_at_5 value: 41.589 - type: map_at_1 value: 19.677 - type: map_at_10 value: 28.097 - type: map_at_100 value: 29.24 - type: map_at_1000 value: 29.365000000000002 - type: map_at_3 value: 25.566 - type: map_at_5 value: 26.852999999999998 - type: mrr_at_1 value: 23.882 - type: mrr_at_10 value: 31.851000000000003 - type: mrr_at_100 value: 32.757 - type: mrr_at_1000 value: 32.83 - type: mrr_at_3 value: 29.485 - type: mrr_at_5 value: 30.744 - type: ndcg_at_1 value: 23.882 - type: ndcg_at_10 value: 33.154 - type: ndcg_at_100 value: 38.491 - type: ndcg_at_1000 value: 41.274 - type: ndcg_at_3 value: 28.648 - type: ndcg_at_5 value: 30.519000000000002 - type: precision_at_1 value: 23.882 - type: precision_at_10 value: 6.117999999999999 - type: precision_at_100 value: 1.0330000000000001 - type: precision_at_1000 value: 0.145 - type: precision_at_3 value: 13.73 - type: precision_at_5 value: 9.794 - type: recall_at_1 value: 19.677 - type: recall_at_10 value: 44.444 - type: recall_at_100 value: 68.477 - type: recall_at_1000 value: 88.23 - type: recall_at_3 value: 31.708 - type: recall_at_5 value: 36.599 - type: map_at_1 value: 30.489 - type: map_at_10 value: 40.883 - type: map_at_100 value: 42.058 - type: map_at_1000 value: 42.152 - type: map_at_3 value: 37.525999999999996 - type: map_at_5 value: 39.753 - type: mrr_at_1 value: 35.541 - type: mrr_at_10 value: 44.842999999999996 - type: mrr_at_100 value: 45.673 - type: mrr_at_1000 value: 45.723 - type: mrr_at_3 value: 42.397 - type: mrr_at_5 value: 43.937 - type: ndcg_at_1 value: 35.541 - type: ndcg_at_10 value: 46.504 - type: ndcg_at_100 value: 51.637 - type: ndcg_at_1000 value: 53.535 - type: ndcg_at_3 value: 41.127 - type: ndcg_at_5 value: 44.17 - type: precision_at_1 value: 35.541 - type: precision_at_10 value: 7.864 - type: precision_at_100 value: 1.165 - type: precision_at_1000 value: 0.14300000000000002 - type: precision_at_3 value: 18.688 - type: precision_at_5 value: 13.507 - type: recall_at_1 value: 30.489 - type: recall_at_10 value: 59.378 - type: recall_at_100 value: 81.38300000000001 - type: recall_at_1000 value: 94.294 - type: recall_at_3 value: 44.946000000000005 - type: recall_at_5 value: 52.644999999999996 - type: map_at_1 value: 29.981 - type: map_at_10 value: 39.688 - type: map_at_100 value: 41.400999999999996 - type: map_at_1000 value: 41.634 - type: map_at_3 value: 36.047000000000004 - type: map_at_5 value: 38.064 - type: mrr_at_1 value: 35.375 - type: mrr_at_10 value: 44.169000000000004 - type: mrr_at_100 value: 45.07 - type: mrr_at_1000 value: 45.113 - type: mrr_at_3 value: 41.502 - type: mrr_at_5 value: 43.034 - type: ndcg_at_1 value: 35.375 - type: ndcg_at_10 value: 45.959 - type: ndcg_at_100 value: 51.688 - type: ndcg_at_1000 value: 53.714 - type: ndcg_at_3 value: 40.457 - type: ndcg_at_5 value: 43.08 - type: precision_at_1 value: 35.375 - type: precision_at_10 value: 8.953 - type: precision_at_100 value: 1.709 - type: precision_at_1000 value: 0.253 - type: precision_at_3 value: 18.775 - type: precision_at_5 value: 14.032 - type: recall_at_1 value: 29.981 - type: recall_at_10 value: 57.896 - type: recall_at_100 value: 83.438 - type: recall_at_1000 value: 95.608 - type: recall_at_3 value: 42.327 - type: recall_at_5 value: 49.069 - type: map_at_1 value: 24.59 - type: map_at_10 value: 32.999 - type: map_at_100 value: 33.987 - type: map_at_1000 value: 34.085 - type: map_at_3 value: 30.013 - type: map_at_5 value: 31.673000000000002 - type: mrr_at_1 value: 26.802 - type: mrr_at_10 value: 35.167 - type: mrr_at_100 value: 36.001 - type: mrr_at_1000 value: 36.071999999999996 - type: mrr_at_3 value: 32.562999999999995 - type: mrr_at_5 value: 34.014 - type: ndcg_at_1 value: 26.802 - type: ndcg_at_10 value: 38.21 - type: ndcg_at_100 value: 43.086999999999996 - type: ndcg_at_1000 value: 45.509 - type: ndcg_at_3 value: 32.452999999999996 - type: ndcg_at_5 value: 35.191 - type: precision_at_1 value: 26.802 - type: precision_at_10 value: 5.989 - type: precision_at_100 value: 0.928 - type: precision_at_1000 value: 0.125 - type: precision_at_3 value: 13.617 - type: precision_at_5 value: 9.797 - type: recall_at_1 value: 24.59 - type: recall_at_10 value: 52.298 - type: recall_at_100 value: 74.443 - type: recall_at_1000 value: 92.601 - type: recall_at_3 value: 36.888 - type: recall_at_5 value: 43.37 - task: type: Retrieval dataset: name: MTEB ClimateFEVER type: climate-fever config: default split: test revision: None metrics: - type: map_at_1 value: 9.798 - type: map_at_10 value: 15.983 - type: map_at_100 value: 17.18 - type: map_at_1000 value: 17.329 - type: map_at_3 value: 13.594000000000001 - type: map_at_5 value: 14.984 - type: mrr_at_1 value: 21.564 - type: mrr_at_10 value: 31.415 - type: mrr_at_100 value: 32.317 - type: mrr_at_1000 value: 32.376 - type: mrr_at_3 value: 28.360000000000003 - type: mrr_at_5 value: 30.194 - type: ndcg_at_1 value: 21.564 - type: ndcg_at_10 value: 22.762 - type: ndcg_at_100 value: 28.199 - type: ndcg_at_1000 value: 31.284 - type: ndcg_at_3 value: 18.746 - type: ndcg_at_5 value: 20.434 - type: precision_at_1 value: 21.564 - type: precision_at_10 value: 6.755999999999999 - type: precision_at_100 value: 1.258 - type: precision_at_1000 value: 0.182 - type: precision_at_3 value: 13.507 - type: precision_at_5 value: 10.541 - type: recall_at_1 value: 9.798 - type: recall_at_10 value: 27.407999999999998 - type: recall_at_100 value: 46.659 - type: recall_at_1000 value: 64.132 - type: recall_at_3 value: 17.541999999999998 - type: recall_at_5 value: 22.137999999999998 - task: type: Retrieval dataset: name: MTEB DBPedia type: dbpedia-entity config: default split: test revision: None metrics: - type: map_at_1 value: 8.276 - type: map_at_10 value: 18.003 - type: map_at_100 value: 23.759 - type: map_at_1000 value: 25.105 - type: map_at_3 value: 13.812 - type: map_at_5 value: 15.659999999999998 - type: mrr_at_1 value: 63.0 - type: mrr_at_10 value: 71.812 - type: mrr_at_100 value: 72.205 - type: mrr_at_1000 value: 72.21300000000001 - type: mrr_at_3 value: 70.375 - type: mrr_at_5 value: 71.188 - type: ndcg_at_1 value: 50.5 - type: ndcg_at_10 value: 36.954 - type: ndcg_at_100 value: 40.083999999999996 - type: ndcg_at_1000 value: 47.661 - type: ndcg_at_3 value: 42.666 - type: ndcg_at_5 value: 39.581 - type: precision_at_1 value: 63.0 - type: precision_at_10 value: 28.249999999999996 - type: precision_at_100 value: 8.113 - type: precision_at_1000 value: 1.7149999999999999 - type: precision_at_3 value: 47.083000000000006 - type: precision_at_5 value: 38.65 - type: recall_at_1 value: 8.276 - type: recall_at_10 value: 23.177 - type: recall_at_100 value: 45.321 - type: recall_at_1000 value: 68.742 - type: recall_at_3 value: 15.473 - type: recall_at_5 value: 18.276 - task: type: Classification dataset: name: MTEB EmotionClassification type: mteb/emotion config: default split: test revision: 4f58c6b202a23cf9a4da393831edf4f9183cad37 metrics: - type: accuracy value: 55.605000000000004 - type: f1 value: 49.86208997523934 - task: type: Retrieval dataset: name: MTEB FEVER type: fever config: default split: test revision: None metrics: - type: map_at_1 value: 80.079 - type: map_at_10 value: 85.143 - type: map_at_100 value: 85.287 - type: map_at_1000 value: 85.297 - type: map_at_3 value: 84.533 - type: map_at_5 value: 84.953 - type: mrr_at_1 value: 86.424 - type: mrr_at_10 value: 91.145 - type: mrr_at_100 value: 91.212 - type: mrr_at_1000 value: 91.213 - type: mrr_at_3 value: 90.682 - type: mrr_at_5 value: 91.013 - type: ndcg_at_1 value: 86.424 - type: ndcg_at_10 value: 88.175 - type: ndcg_at_100 value: 88.77199999999999 - type: ndcg_at_1000 value: 88.967 - type: ndcg_at_3 value: 87.265 - type: ndcg_at_5 value: 87.813 - type: precision_at_1 value: 86.424 - type: precision_at_10 value: 10.012 - type: precision_at_100 value: 1.042 - type: precision_at_1000 value: 0.107 - type: precision_at_3 value: 32.228 - type: precision_at_5 value: 19.724 - type: recall_at_1 value: 80.079 - type: recall_at_10 value: 91.96600000000001 - type: recall_at_100 value: 94.541 - type: recall_at_1000 value: 95.824 - type: recall_at_3 value: 89.213 - type: recall_at_5 value: 90.791 - task: type: Retrieval dataset: name: MTEB FiQA2018 type: fiqa config: default split: test revision: None metrics: - type: map_at_1 value: 23.006999999999998 - type: map_at_10 value: 36.923 - type: map_at_100 value: 38.932 - type: map_at_1000 value: 39.096 - type: map_at_3 value: 32.322 - type: map_at_5 value: 35.119 - type: mrr_at_1 value: 45.37 - type: mrr_at_10 value: 53.418 - type: mrr_at_100 value: 54.174 - type: mrr_at_1000 value: 54.20700000000001 - type: mrr_at_3 value: 51.132 - type: mrr_at_5 value: 52.451 - type: ndcg_at_1 value: 45.37 - type: ndcg_at_10 value: 44.799 - type: ndcg_at_100 value: 51.605000000000004 - type: ndcg_at_1000 value: 54.30500000000001 - type: ndcg_at_3 value: 41.33 - type: ndcg_at_5 value: 42.608000000000004 - type: precision_at_1 value: 45.37 - type: precision_at_10 value: 12.33 - type: precision_at_100 value: 1.9349999999999998 - type: precision_at_1000 value: 0.241 - type: precision_at_3 value: 27.828999999999997 - type: precision_at_5 value: 20.432 - type: recall_at_1 value: 23.006999999999998 - type: recall_at_10 value: 51.06699999999999 - type: recall_at_100 value: 75.917 - type: recall_at_1000 value: 92.331 - type: recall_at_3 value: 36.544 - type: recall_at_5 value: 43.449 - task: type: Retrieval dataset: name: MTEB HotpotQA type: hotpotqa config: default split: test revision: None metrics: - type: map_at_1 value: 38.196999999999996 - type: map_at_10 value: 55.554 - type: map_at_100 value: 56.309 - type: map_at_1000 value: 56.37799999999999 - type: map_at_3 value: 53.123 - type: map_at_5 value: 54.626 - type: mrr_at_1 value: 76.39399999999999 - type: mrr_at_10 value: 81.75 - type: mrr_at_100 value: 81.973 - type: mrr_at_1000 value: 81.982 - type: mrr_at_3 value: 80.79499999999999 - type: mrr_at_5 value: 81.393 - type: ndcg_at_1 value: 76.39399999999999 - type: ndcg_at_10 value: 64.14800000000001 - type: ndcg_at_100 value: 66.90899999999999 - type: ndcg_at_1000 value: 68.277 - type: ndcg_at_3 value: 60.529999999999994 - type: ndcg_at_5 value: 62.513 - type: precision_at_1 value: 76.39399999999999 - type: precision_at_10 value: 12.967999999999998 - type: precision_at_100 value: 1.5150000000000001 - type: precision_at_1000 value: 0.16999999999999998 - type: precision_at_3 value: 37.884 - type: precision_at_5 value: 24.294 - type: recall_at_1 value: 38.196999999999996 - type: recall_at_10 value: 64.84100000000001 - type: recall_at_100 value: 75.726 - type: recall_at_1000 value: 84.794 - type: recall_at_3 value: 56.826 - type: recall_at_5 value: 60.736000000000004 - task: type: Classification dataset: name: MTEB ImdbClassification type: mteb/imdb config: default split: test revision: 3d86128a09e091d6018b6d26cad27f2739fc2db7 metrics: - type: accuracy value: 82.3912 - type: ap value: 76.3949298163793 - type: f1 value: 82.30848699417406 - task: type: Retrieval dataset: name: MTEB MSMARCO type: msmarco config: default split: dev revision: None metrics: - type: map_at_1 value: 19.454 - type: map_at_10 value: 31.22 - type: map_at_100 value: 32.475 - type: map_at_1000 value: 32.532 - type: map_at_3 value: 27.419 - type: map_at_5 value: 29.608 - type: mrr_at_1 value: 20.072000000000003 - type: mrr_at_10 value: 31.813999999999997 - type: mrr_at_100 value: 33.01 - type: mrr_at_1000 value: 33.062000000000005 - type: mrr_at_3 value: 28.055999999999997 - type: mrr_at_5 value: 30.218 - type: ndcg_at_1 value: 20.072000000000003 - type: ndcg_at_10 value: 38.0 - type: ndcg_at_100 value: 44.038 - type: ndcg_at_1000 value: 45.43 - type: ndcg_at_3 value: 30.219 - type: ndcg_at_5 value: 34.127 - type: precision_at_1 value: 20.072000000000003 - type: precision_at_10 value: 6.159 - type: precision_at_100 value: 0.9169999999999999 - type: precision_at_1000 value: 0.104 - type: precision_at_3 value: 13.071 - type: precision_at_5 value: 9.814 - type: recall_at_1 value: 19.454 - type: recall_at_10 value: 58.931 - type: recall_at_100 value: 86.886 - type: recall_at_1000 value: 97.425 - type: recall_at_3 value: 37.697 - type: recall_at_5 value: 47.101 - task: type: Classification dataset: name: MTEB MTOPDomainClassification (en) type: mteb/mtop_domain config: en split: test revision: d80d48c1eb48d3562165c59d59d0034df9fff0bf metrics: - type: accuracy value: 90.46283629730961 - type: f1 value: 90.22448402668293 - task: type: Classification dataset: name: MTEB MTOPDomainClassification (de) type: mteb/mtop_domain config: de split: test revision: d80d48c1eb48d3562165c59d59d0034df9fff0bf metrics: - type: accuracy value: 86.91462383770076 - type: f1 value: 85.77767304705436 - task: type: Classification dataset: name: MTEB MTOPDomainClassification (es) type: mteb/mtop_domain config: es split: test revision: d80d48c1eb48d3562165c59d59d0034df9fff0bf metrics: - type: accuracy value: 87.73849232821881 - type: f1 value: 87.33680109229385 - task: type: Classification dataset: name: MTEB MTOPDomainClassification (fr) type: mteb/mtop_domain config: fr split: test revision: d80d48c1eb48d3562165c59d59d0034df9fff0bf metrics: - type: accuracy value: 86.22298778578141 - type: f1 value: 85.88868176519013 - task: type: Classification dataset: name: MTEB MTOPDomainClassification (hi) type: mteb/mtop_domain config: hi split: test revision: d80d48c1eb48d3562165c59d59d0034df9fff0bf metrics: - type: accuracy value: 82.91860882036572 - type: f1 value: 81.38044567838352 - task: type: Classification dataset: name: MTEB MTOPDomainClassification (th) type: mteb/mtop_domain config: th split: test revision: d80d48c1eb48d3562165c59d59d0034df9fff0bf metrics: - type: accuracy value: 69.90235081374323 - type: f1 value: 68.12897827044782 - task: type: Classification dataset: name: MTEB MTOPIntentClassification (en) type: mteb/mtop_intent config: en split: test revision: ae001d0e6b1228650b7bd1c2c65fb50ad11a8aba metrics: - type: accuracy value: 66.0031919744642 - type: f1 value: 48.13490278120492 - task: type: Classification dataset: name: MTEB MTOPIntentClassification (de) type: mteb/mtop_intent config: de split: test revision: ae001d0e6b1228650b7bd1c2c65fb50ad11a8aba metrics: - type: accuracy value: 63.260073260073256 - type: f1 value: 42.627167415555505 - task: type: Classification dataset: name: MTEB MTOPIntentClassification (es) type: mteb/mtop_intent config: es split: test revision: ae001d0e6b1228650b7bd1c2c65fb50ad11a8aba metrics: - type: accuracy value: 65.06004002668445 - type: f1 value: 44.90527231209402 - task: type: Classification dataset: name: MTEB MTOPIntentClassification (fr) type: mteb/mtop_intent config: fr split: test revision: ae001d0e6b1228650b7bd1c2c65fb50ad11a8aba metrics: - type: accuracy value: 59.42687128092702 - type: f1 value: 41.79584710899656 - task: type: Classification dataset: name: MTEB MTOPIntentClassification (hi) type: mteb/mtop_intent config: hi split: test revision: ae001d0e6b1228650b7bd1c2c65fb50ad11a8aba metrics: - type: accuracy value: 59.078522768017216 - type: f1 value: 40.398016878580734 - task: type: Classification dataset: name: MTEB MTOPIntentClassification (th) type: mteb/mtop_intent config: th split: test revision: ae001d0e6b1228650b7bd1c2c65fb50ad11a8aba metrics: - type: accuracy value: 43.750452079565996 - type: f1 value: 28.985320742729865 - task: type: Classification dataset: name: MTEB MassiveIntentClassification (af) type: mteb/amazon_massive_intent config: af split: test revision: 31efe3c427b0bae9c22cbb560b8f15491cc6bed7 metrics: - type: accuracy value: 47.59919300605245 - type: f1 value: 44.27505749600044 - task: type: Classification dataset: name: MTEB MassiveIntentClassification (am) type: mteb/amazon_massive_intent config: am split: test revision: 31efe3c427b0bae9c22cbb560b8f15491cc6bed7 metrics: - type: accuracy value: 31.56691324815064 - type: f1 value: 30.34952276390722 - task: type: Classification dataset: name: MTEB MassiveIntentClassification (ar) type: mteb/amazon_massive_intent config: ar split: test revision: 31efe3c427b0bae9c22cbb560b8f15491cc6bed7 metrics: - type: accuracy value: 52.62945527908541 - type: f1 value: 49.689536347222386 - task: type: Classification dataset: name: MTEB MassiveIntentClassification (az) type: mteb/amazon_massive_intent config: az split: test revision: 31efe3c427b0bae9c22cbb560b8f15491cc6bed7 metrics: - type: accuracy value: 50.0941492938803 - type: f1 value: 48.47831879848094 - task: type: Classification dataset: name: MTEB MassiveIntentClassification (bn) type: mteb/amazon_massive_intent config: bn split: test revision: 31efe3c427b0bae9c22cbb560b8f15491cc6bed7 metrics: - type: accuracy value: 46.540013449899135 - type: f1 value: 44.25663324630171 - task: type: Classification dataset: name: MTEB MassiveIntentClassification (cy) type: mteb/amazon_massive_intent config: cy split: test revision: 31efe3c427b0bae9c22cbb560b8f15491cc6bed7 metrics: - type: accuracy value: 44.25689307330195 - type: f1 value: 42.06066077477426 - task: type: Classification dataset: name: MTEB MassiveIntentClassification (da) type: mteb/amazon_massive_intent config: da split: test revision: 31efe3c427b0bae9c22cbb560b8f15491cc6bed7 metrics: - type: accuracy value: 55.05716207128446 - type: f1 value: 52.41516089202158 - task: type: Classification dataset: name: MTEB MassiveIntentClassification (de) type: mteb/amazon_massive_intent config: de split: test revision: 31efe3c427b0bae9c22cbb560b8f15491cc6bed7 metrics: - type: accuracy value: 61.86953597848015 - type: f1 value: 58.45989820228606 - task: type: Classification dataset: name: MTEB MassiveIntentClassification (el) type: mteb/amazon_massive_intent config: el split: test revision: 31efe3c427b0bae9c22cbb560b8f15491cc6bed7 metrics: - type: accuracy value: 47.02084734364493 - type: f1 value: 45.21525882986924 - task: type: Classification dataset: name: MTEB MassiveIntentClassification (en) type: mteb/amazon_massive_intent config: en split: test revision: 31efe3c427b0bae9c22cbb560b8f15491cc6bed7 metrics: - type: accuracy value: 69.24008069939475 - type: f1 value: 68.27971089998472 - task: type: Classification dataset: name: MTEB MassiveIntentClassification (es) type: mteb/amazon_massive_intent config: es split: test revision: 31efe3c427b0bae9c22cbb560b8f15491cc6bed7 metrics: - type: accuracy value: 62.53530598520511 - type: f1 value: 61.83588971206536 - task: type: Classification dataset: name: MTEB MassiveIntentClassification (fa) type: mteb/amazon_massive_intent config: fa split: test revision: 31efe3c427b0bae9c22cbb560b8f15491cc6bed7 metrics: - type: accuracy value: 55.19166106254204 - type: f1 value: 52.335787325774 - task: type: Classification dataset: name: MTEB MassiveIntentClassification (fi) type: mteb/amazon_massive_intent config: fi split: test revision: 31efe3c427b0bae9c22cbb560b8f15491cc6bed7 metrics: - type: accuracy value: 48.43308675184936 - type: f1 value: 45.841102061239184 - task: type: Classification dataset: name: MTEB MassiveIntentClassification (fr) type: mteb/amazon_massive_intent config: fr split: test revision: 31efe3c427b0bae9c22cbb560b8f15491cc6bed7 metrics: - type: accuracy value: 64.26698049764627 - type: f1 value: 62.25607481996241 - task: type: Classification dataset: name: MTEB MassiveIntentClassification (he) type: mteb/amazon_massive_intent config: he split: test revision: 31efe3c427b0bae9c22cbb560b8f15491cc6bed7 metrics: - type: accuracy value: 57.619367854741085 - type: f1 value: 54.93671211092237 - task: type: Classification dataset: name: MTEB MassiveIntentClassification (hi) type: mteb/amazon_massive_intent config: hi split: test revision: 31efe3c427b0bae9c22cbb560b8f15491cc6bed7 metrics: - type: accuracy value: 57.53530598520511 - type: f1 value: 55.36413211751344 - task: type: Classification dataset: name: MTEB MassiveIntentClassification (hu) type: mteb/amazon_massive_intent config: hu split: test revision: 31efe3c427b0bae9c22cbb560b8f15491cc6bed7 metrics: - type: accuracy value: 45.66913248150638 - type: f1 value: 42.52092657926257 - task: type: Classification dataset: name: MTEB MassiveIntentClassification (hy) type: mteb/amazon_massive_intent config: hy split: test revision: 31efe3c427b0bae9c22cbb560b8f15491cc6bed7 metrics: - type: accuracy value: 39.19973100201749 - type: f1 value: 37.194613407773566 - task: type: Classification dataset: name: MTEB MassiveIntentClassification (id) type: mteb/amazon_massive_intent config: id split: test revision: 31efe3c427b0bae9c22cbb560b8f15491cc6bed7 metrics: - type: accuracy value: 54.99663752521856 - type: f1 value: 53.875181150315356 - task: type: Classification dataset: name: MTEB MassiveIntentClassification (is) type: mteb/amazon_massive_intent config: is split: test revision: 31efe3c427b0bae9c22cbb560b8f15491cc6bed7 metrics: - type: accuracy value: 43.143913920645595 - type: f1 value: 41.756257561394456 - task: type: Classification dataset: name: MTEB MassiveIntentClassification (it) type: mteb/amazon_massive_intent config: it split: test revision: 31efe3c427b0bae9c22cbb560b8f15491cc6bed7 metrics: - type: accuracy value: 60.99529253530599 - type: f1 value: 59.103812128183705 - task: type: Classification dataset: name: MTEB MassiveIntentClassification (ja) type: mteb/amazon_massive_intent config: ja split: test revision: 31efe3c427b0bae9c22cbb560b8f15491cc6bed7 metrics: - type: accuracy value: 64.29051782111634 - type: f1 value: 62.5268914542489 - task: type: Classification dataset: name: MTEB MassiveIntentClassification (jv) type: mteb/amazon_massive_intent config: jv split: test revision: 31efe3c427b0bae9c22cbb560b8f15491cc6bed7 metrics: - type: accuracy value: 43.69199731002017 - type: f1 value: 41.71651113018154 - task: type: Classification dataset: name: MTEB MassiveIntentClassification (ka) type: mteb/amazon_massive_intent config: ka split: test revision: 31efe3c427b0bae9c22cbb560b8f15491cc6bed7 metrics: - type: accuracy value: 38.34566240753194 - type: f1 value: 36.935911015227894 - task: type: Classification dataset: name: MTEB MassiveIntentClassification (km) type: mteb/amazon_massive_intent config: km split: test revision: 31efe3c427b0bae9c22cbb560b8f15491cc6bed7 metrics: - type: accuracy value: 34.21654337592467 - type: f1 value: 32.067289455027755 - task: type: Classification dataset: name: MTEB MassiveIntentClassification (kn) type: mteb/amazon_massive_intent config: kn split: test revision: 31efe3c427b0bae9c22cbb560b8f15491cc6bed7 metrics: - type: accuracy value: 51.785474108944186 - type: f1 value: 49.29285691779668 - task: type: Classification dataset: name: MTEB MassiveIntentClassification (ko) type: mteb/amazon_massive_intent config: ko split: test revision: 31efe3c427b0bae9c22cbb560b8f15491cc6bed7 metrics: - type: accuracy value: 59.58977807666444 - type: f1 value: 57.81630371862734 - task: type: Classification dataset: name: MTEB MassiveIntentClassification (lv) type: mteb/amazon_massive_intent config: lv split: test revision: 31efe3c427b0bae9c22cbb560b8f15491cc6bed7 metrics: - type: accuracy value: 46.53665097511768 - type: f1 value: 44.8386852929464 - task: type: Classification dataset: name: MTEB MassiveIntentClassification (ml) type: mteb/amazon_massive_intent config: ml split: test revision: 31efe3c427b0bae9c22cbb560b8f15491cc6bed7 metrics: - type: accuracy value: 54.468728984532625 - type: f1 value: 52.13613631138983 - task: type: Classification dataset: name: MTEB MassiveIntentClassification (mn) type: mteb/amazon_massive_intent config: mn split: test revision: 31efe3c427b0bae9c22cbb560b8f15491cc6bed7 metrics: - type: accuracy value: 40.67921990585071 - type: f1 value: 39.87218130311539 - task: type: Classification dataset: name: MTEB MassiveIntentClassification (ms) type: mteb/amazon_massive_intent config: ms split: test revision: 31efe3c427b0bae9c22cbb560b8f15491cc6bed7 metrics: - type: accuracy value: 51.2441156691325 - type: f1 value: 48.93351041227674 - task: type: Classification dataset: name: MTEB MassiveIntentClassification (my) type: mteb/amazon_massive_intent config: my split: test revision: 31efe3c427b0bae9c22cbb560b8f15491cc6bed7 metrics: - type: accuracy value: 31.76193678547411 - type: f1 value: 29.917012787908785 - task: type: Classification dataset: name: MTEB MassiveIntentClassification (nb) type: mteb/amazon_massive_intent config: nb split: test revision: 31efe3c427b0bae9c22cbb560b8f15491cc6bed7 metrics: - type: accuracy value: 54.40820443846671 - type: f1 value: 51.232049156874396 - task: type: Classification dataset: name: MTEB MassiveIntentClassification (nl) type: mteb/amazon_massive_intent config: nl split: test revision: 31efe3c427b0bae9c22cbb560b8f15491cc6bed7 metrics: - type: accuracy value: 60.8170813718897 - type: f1 value: 57.74887572270486 - task: type: Classification dataset: name: MTEB MassiveIntentClassification (pl) type: mteb/amazon_massive_intent config: pl split: test revision: 31efe3c427b0bae9c22cbb560b8f15491cc6bed7 metrics: - type: accuracy value: 60.067249495628786 - type: f1 value: 57.60151669462318 - task: type: Classification dataset: name: MTEB MassiveIntentClassification (pt) type: mteb/amazon_massive_intent config: pt split: test revision: 31efe3c427b0bae9c22cbb560b8f15491cc6bed7 metrics: - type: accuracy value: 62.73705447209146 - type: f1 value: 61.14377989075874 - task: type: Classification dataset: name: MTEB MassiveIntentClassification (ro) type: mteb/amazon_massive_intent config: ro split: test revision: 31efe3c427b0bae9c22cbb560b8f15491cc6bed7 metrics: - type: accuracy value: 49.68392737054472 - type: f1 value: 48.07062918679129 - task: type: Classification dataset: name: MTEB MassiveIntentClassification (ru) type: mteb/amazon_massive_intent config: ru split: test revision: 31efe3c427b0bae9c22cbb560b8f15491cc6bed7 metrics: - type: accuracy value: 60.85406859448555 - type: f1 value: 58.48852652838252 - task: type: Classification dataset: name: MTEB MassiveIntentClassification (sl) type: mteb/amazon_massive_intent config: sl split: test revision: 31efe3c427b0bae9c22cbb560b8f15491cc6bed7 metrics: - type: accuracy value: 48.58776059179556 - type: f1 value: 46.92163099241966 - task: type: Classification dataset: name: MTEB MassiveIntentClassification (sq) type: mteb/amazon_massive_intent config: sq split: test revision: 31efe3c427b0bae9c22cbb560b8f15491cc6bed7 metrics: - type: accuracy value: 47.16879623402824 - type: f1 value: 45.8155066134247 - task: type: Classification dataset: name: MTEB MassiveIntentClassification (sv) type: mteb/amazon_massive_intent config: sv split: test revision: 31efe3c427b0bae9c22cbb560b8f15491cc6bed7 metrics: - type: accuracy value: 62.41425689307329 - type: f1 value: 60.097954878192574 - task: type: Classification dataset: name: MTEB MassiveIntentClassification (sw) type: mteb/amazon_massive_intent config: sw split: test revision: 31efe3c427b0bae9c22cbb560b8f15491cc6bed7 metrics: - type: accuracy value: 45.97175521183591 - type: f1 value: 44.29275283000346 - task: type: Classification dataset: name: MTEB MassiveIntentClassification (ta) type: mteb/amazon_massive_intent config: ta split: test revision: 31efe3c427b0bae9c22cbb560b8f15491cc6bed7 metrics: - type: accuracy value: 53.597848016139885 - type: f1 value: 51.54318966923094 - task: type: Classification dataset: name: MTEB MassiveIntentClassification (te) type: mteb/amazon_massive_intent config: te split: test revision: 31efe3c427b0bae9c22cbb560b8f15491cc6bed7 metrics: - type: accuracy value: 53.44653665097512 - type: f1 value: 51.60095623356469 - task: type: Classification dataset: name: MTEB MassiveIntentClassification (th) type: mteb/amazon_massive_intent config: th split: test revision: 31efe3c427b0bae9c22cbb560b8f15491cc6bed7 metrics: - type: accuracy value: 46.173503698722264 - type: f1 value: 46.311285276929105 - task: type: Classification dataset: name: MTEB MassiveIntentClassification (tl) type: mteb/amazon_massive_intent config: tl split: test revision: 31efe3c427b0bae9c22cbb560b8f15491cc6bed7 metrics: - type: accuracy value: 49.47881640887693 - type: f1 value: 46.63989802589145 - task: type: Classification dataset: name: MTEB MassiveIntentClassification (tr) type: mteb/amazon_massive_intent config: tr split: test revision: 31efe3c427b0bae9c22cbb560b8f15491cc6bed7 metrics: - type: accuracy value: 58.02958977807666 - type: f1 value: 55.34728796730868 - task: type: Classification dataset: name: MTEB MassiveIntentClassification (ur) type: mteb/amazon_massive_intent config: ur split: test revision: 31efe3c427b0bae9c22cbb560b8f15491cc6bed7 metrics: - type: accuracy value: 39.26361802286483 - type: f1 value: 37.61201358829197 - task: type: Classification dataset: name: MTEB MassiveIntentClassification (vi) type: mteb/amazon_massive_intent config: vi split: test revision: 31efe3c427b0bae9c22cbb560b8f15491cc6bed7 metrics: - type: accuracy value: 52.15534633490249 - type: f1 value: 50.438951980623145 - task: type: Classification dataset: name: MTEB MassiveIntentClassification (zh-CN) type: mteb/amazon_massive_intent config: zh-CN split: test revision: 31efe3c427b0bae9c22cbb560b8f15491cc6bed7 metrics: - type: accuracy value: 63.39946200403498 - type: f1 value: 62.152249150179664 - task: type: Classification dataset: name: MTEB MassiveIntentClassification (zh-TW) type: mteb/amazon_massive_intent config: zh-TW split: test revision: 31efe3c427b0bae9c22cbb560b8f15491cc6bed7 metrics: - type: accuracy value: 58.207800941492934 - type: f1 value: 58.318584465398104 - task: type: Classification dataset: name: MTEB MassiveScenarioClassification (af) type: mteb/amazon_massive_scenario config: af split: test revision: 7d571f92784cd94a019292a1f45445077d0ef634 metrics: - type: accuracy value: 58.069939475453936 - type: f1 value: 55.04073616892449 - task: type: Classification dataset: name: MTEB MassiveScenarioClassification (am) type: mteb/amazon_massive_scenario config: am split: test revision: 7d571f92784cd94a019292a1f45445077d0ef634 metrics: - type: accuracy value: 38.214525891055814 - type: f1 value: 36.42184260742777 - task: type: Classification dataset: name: MTEB MassiveScenarioClassification (ar) type: mteb/amazon_massive_scenario config: ar split: test revision: 7d571f92784cd94a019292a1f45445077d0ef634 metrics: - type: accuracy value: 57.47141896435777 - type: f1 value: 57.22453431938479 - task: type: Classification dataset: name: MTEB MassiveScenarioClassification (az) type: mteb/amazon_massive_scenario config: az split: test revision: 7d571f92784cd94a019292a1f45445077d0ef634 metrics: - type: accuracy value: 54.37121721587089 - type: f1 value: 53.004976087120134 - task: type: Classification dataset: name: MTEB MassiveScenarioClassification (bn) type: mteb/amazon_massive_scenario config: bn split: test revision: 7d571f92784cd94a019292a1f45445077d0ef634 metrics: - type: accuracy value: 52.71687962340283 - type: f1 value: 51.140151342341646 - task: type: Classification dataset: name: MTEB MassiveScenarioClassification (cy) type: mteb/amazon_massive_scenario config: cy split: test revision: 7d571f92784cd94a019292a1f45445077d0ef634 metrics: - type: accuracy value: 49.502353732347004 - type: f1 value: 45.74604753969847 - task: type: Classification dataset: name: MTEB MassiveScenarioClassification (da) type: mteb/amazon_massive_scenario config: da split: test revision: 7d571f92784cd94a019292a1f45445077d0ef634 metrics: - type: accuracy value: 64.25689307330195 - type: f1 value: 62.25355539317913 - task: type: Classification dataset: name: MTEB MassiveScenarioClassification (de) type: mteb/amazon_massive_scenario config: de split: test revision: 7d571f92784cd94a019292a1f45445077d0ef634 metrics: - type: accuracy value: 71.27774041694688 - type: f1 value: 70.26880477280841 - task: type: Classification dataset: name: MTEB MassiveScenarioClassification (el) type: mteb/amazon_massive_scenario config: el split: test revision: 7d571f92784cd94a019292a1f45445077d0ef634 metrics: - type: accuracy value: 52.420981842636195 - type: f1 value: 50.824547366213565 - task: type: Classification dataset: name: MTEB MassiveScenarioClassification (en) type: mteb/amazon_massive_scenario config: en split: test revision: 7d571f92784cd94a019292a1f45445077d0ef634 metrics: - type: accuracy value: 74.11230665770006 - type: f1 value: 73.00723710263364 - task: type: Classification dataset: name: MTEB MassiveScenarioClassification (es) type: mteb/amazon_massive_scenario config: es split: test revision: 7d571f92784cd94a019292a1f45445077d0ef634 metrics: - type: accuracy value: 67.04102219233356 - type: f1 value: 66.7904194512351 - task: type: Classification dataset: name: MTEB MassiveScenarioClassification (fa) type: mteb/amazon_massive_scenario config: fa split: test revision: 7d571f92784cd94a019292a1f45445077d0ef634 metrics: - type: accuracy value: 60.1714862138534 - type: f1 value: 58.781208933846095 - task: type: Classification dataset: name: MTEB MassiveScenarioClassification (fi) type: mteb/amazon_massive_scenario config: fi split: test revision: 7d571f92784cd94a019292a1f45445077d0ef634 metrics: - type: accuracy value: 54.04841963685272 - type: f1 value: 51.185007148328545 - task: type: Classification dataset: name: MTEB MassiveScenarioClassification (fr) type: mteb/amazon_massive_scenario config: fr split: test revision: 7d571f92784cd94a019292a1f45445077d0ef634 metrics: - type: accuracy value: 69.76462676529927 - type: f1 value: 68.85227238388136 - task: type: Classification dataset: name: MTEB MassiveScenarioClassification (he) type: mteb/amazon_massive_scenario config: he split: test revision: 7d571f92784cd94a019292a1f45445077d0ef634 metrics: - type: accuracy value: 62.84801613987895 - type: f1 value: 61.18395865529196 - task: type: Classification dataset: name: MTEB MassiveScenarioClassification (hi) type: mteb/amazon_massive_scenario config: hi split: test revision: 7d571f92784cd94a019292a1f45445077d0ef634 metrics: - type: accuracy value: 62.17888365837256 - type: f1 value: 60.40570575783401 - task: type: Classification dataset: name: MTEB MassiveScenarioClassification (hu) type: mteb/amazon_massive_scenario config: hu split: test revision: 7d571f92784cd94a019292a1f45445077d0ef634 metrics: - type: accuracy value: 53.52051109616678 - type: f1 value: 51.210696278552014 - task: type: Classification dataset: name: MTEB MassiveScenarioClassification (hy) type: mteb/amazon_massive_scenario config: hy split: test revision: 7d571f92784cd94a019292a1f45445077d0ef634 metrics: - type: accuracy value: 45.94821788836584 - type: f1 value: 43.65062337089374 - task: type: Classification dataset: name: MTEB MassiveScenarioClassification (id) type: mteb/amazon_massive_scenario config: id split: test revision: 7d571f92784cd94a019292a1f45445077d0ef634 metrics: - type: accuracy value: 60.33288500336248 - type: f1 value: 59.50436947982156 - task: type: Classification dataset: name: MTEB MassiveScenarioClassification (is) type: mteb/amazon_massive_scenario config: is split: test revision: 7d571f92784cd94a019292a1f45445077d0ef634 metrics: - type: accuracy value: 50.09751176866174 - type: f1 value: 47.293838685239 - task: type: Classification dataset: name: MTEB MassiveScenarioClassification (it) type: mteb/amazon_massive_scenario config: it split: test revision: 7d571f92784cd94a019292a1f45445077d0ef634 metrics: - type: accuracy value: 66.49293880295897 - type: f1 value: 65.96586462307134 - task: type: Classification dataset: name: MTEB MassiveScenarioClassification (ja) type: mteb/amazon_massive_scenario config: ja split: test revision: 7d571f92784cd94a019292a1f45445077d0ef634 metrics: - type: accuracy value: 68.35911230665769 - type: f1 value: 67.77840431764355 - task: type: Classification dataset: name: MTEB MassiveScenarioClassification (jv) type: mteb/amazon_massive_scenario config: jv split: test revision: 7d571f92784cd94a019292a1f45445077d0ef634 metrics: - type: accuracy value: 50.585070611970416 - type: f1 value: 47.957277125670295 - task: type: Classification dataset: name: MTEB MassiveScenarioClassification (ka) type: mteb/amazon_massive_scenario config: ka split: test revision: 7d571f92784cd94a019292a1f45445077d0ef634 metrics: - type: accuracy value: 42.76059179556153 - type: f1 value: 40.446327361325565 - task: type: Classification dataset: name: MTEB MassiveScenarioClassification (km) type: mteb/amazon_massive_scenario config: km split: test revision: 7d571f92784cd94a019292a1f45445077d0ef634 metrics: - type: accuracy value: 40.648957632817755 - type: f1 value: 37.231284508608276 - task: type: Classification dataset: name: MTEB MassiveScenarioClassification (kn) type: mteb/amazon_massive_scenario config: kn split: test revision: 7d571f92784cd94a019292a1f45445077d0ef634 metrics: - type: accuracy value: 57.24613315400134 - type: f1 value: 55.14523425690653 - task: type: Classification dataset: name: MTEB MassiveScenarioClassification (ko) type: mteb/amazon_massive_scenario config: ko split: test revision: 7d571f92784cd94a019292a1f45445077d0ef634 metrics: - type: accuracy value: 63.839946200403496 - type: f1 value: 62.6239063060589 - task: type: Classification dataset: name: MTEB MassiveScenarioClassification (lv) type: mteb/amazon_massive_scenario config: lv split: test revision: 7d571f92784cd94a019292a1f45445077d0ef634 metrics: - type: accuracy value: 53.14391392064559 - type: f1 value: 50.08744471966442 - task: type: Classification dataset: name: MTEB MassiveScenarioClassification (ml) type: mteb/amazon_massive_scenario config: ml split: test revision: 7d571f92784cd94a019292a1f45445077d0ef634 metrics: - type: accuracy value: 58.8399462004035 - type: f1 value: 57.586991117740794 - task: type: Classification dataset: name: MTEB MassiveScenarioClassification (mn) type: mteb/amazon_massive_scenario config: mn split: test revision: 7d571f92784cd94a019292a1f45445077d0ef634 metrics: - type: accuracy value: 44.81842636180229 - type: f1 value: 42.82813975084655 - task: type: Classification dataset: name: MTEB MassiveScenarioClassification (ms) type: mteb/amazon_massive_scenario config: ms split: test revision: 7d571f92784cd94a019292a1f45445077d0ef634 metrics: - type: accuracy value: 58.90047074646939 - type: f1 value: 56.640503134745714 - task: type: Classification dataset: name: MTEB MassiveScenarioClassification (my) type: mteb/amazon_massive_scenario config: my split: test revision: 7d571f92784cd94a019292a1f45445077d0ef634 metrics: - type: accuracy value: 38.52051109616678 - type: f1 value: 36.504553927569454 - task: type: Classification dataset: name: MTEB MassiveScenarioClassification (nb) type: mteb/amazon_massive_scenario config: nb split: test revision: 7d571f92784cd94a019292a1f45445077d0ef634 metrics: - type: accuracy value: 64.63685272360458 - type: f1 value: 62.88129994502907 - task: type: Classification dataset: name: MTEB MassiveScenarioClassification (nl) type: mteb/amazon_massive_scenario config: nl split: test revision: 7d571f92784cd94a019292a1f45445077d0ef634 metrics: - type: accuracy value: 67.54203093476798 - type: f1 value: 66.02745142287087 - task: type: Classification dataset: name: MTEB MassiveScenarioClassification (pl) type: mteb/amazon_massive_scenario config: pl split: test revision: 7d571f92784cd94a019292a1f45445077d0ef634 metrics: - type: accuracy value: 64.00470746469402 - type: f1 value: 62.91845058355313 - task: type: Classification dataset: name: MTEB MassiveScenarioClassification (pt) type: mteb/amazon_massive_scenario config: pt split: test revision: 7d571f92784cd94a019292a1f45445077d0ef634 metrics: - type: accuracy value: 65.69939475453934 - type: f1 value: 65.37413822081011 - task: type: Classification dataset: name: MTEB MassiveScenarioClassification (ro) type: mteb/amazon_massive_scenario config: ro split: test revision: 7d571f92784cd94a019292a1f45445077d0ef634 metrics: - type: accuracy value: 57.19905850706121 - type: f1 value: 55.08271383695852 - task: type: Classification dataset: name: MTEB MassiveScenarioClassification (ru) type: mteb/amazon_massive_scenario config: ru split: test revision: 7d571f92784cd94a019292a1f45445077d0ef634 metrics: - type: accuracy value: 65.42367182246134 - type: f1 value: 64.61962307022019 - task: type: Classification dataset: name: MTEB MassiveScenarioClassification (sl) type: mteb/amazon_massive_scenario config: sl split: test revision: 7d571f92784cd94a019292a1f45445077d0ef634 metrics: - type: accuracy value: 55.147948890383326 - type: f1 value: 53.2933851469903 - task: type: Classification dataset: name: MTEB MassiveScenarioClassification (sq) type: mteb/amazon_massive_scenario config: sq split: test revision: 7d571f92784cd94a019292a1f45445077d0ef634 metrics: - type: accuracy value: 55.679219905850715 - type: f1 value: 52.80159603468007 - task: type: Classification dataset: name: MTEB MassiveScenarioClassification (sv) type: mteb/amazon_massive_scenario config: sv split: test revision: 7d571f92784cd94a019292a1f45445077d0ef634 metrics: - type: accuracy value: 69.42165433759246 - type: f1 value: 67.99984081248608 - task: type: Classification dataset: name: MTEB MassiveScenarioClassification (sw) type: mteb/amazon_massive_scenario config: sw split: test revision: 7d571f92784cd94a019292a1f45445077d0ef634 metrics: - type: accuracy value: 52.30329522528581 - type: f1 value: 50.10810382364662 - task: type: Classification dataset: name: MTEB MassiveScenarioClassification (ta) type: mteb/amazon_massive_scenario config: ta split: test revision: 7d571f92784cd94a019292a1f45445077d0ef634 metrics: - type: accuracy value: 56.186953597848024 - type: f1 value: 55.51656586643505 - task: type: Classification dataset: name: MTEB MassiveScenarioClassification (te) type: mteb/amazon_massive_scenario config: te split: test revision: 7d571f92784cd94a019292a1f45445077d0ef634 metrics: - type: accuracy value: 58.019502353732356 - type: f1 value: 56.260726586358736 - task: type: Classification dataset: name: MTEB MassiveScenarioClassification (th) type: mteb/amazon_massive_scenario config: th split: test revision: 7d571f92784cd94a019292a1f45445077d0ef634 metrics: - type: accuracy value: 52.55548083389374 - type: f1 value: 51.139712264362714 - task: type: Classification dataset: name: MTEB MassiveScenarioClassification (tl) type: mteb/amazon_massive_scenario config: tl split: test revision: 7d571f92784cd94a019292a1f45445077d0ef634 metrics: - type: accuracy value: 57.43443174176194 - type: f1 value: 55.76244076715635 - task: type: Classification dataset: name: MTEB MassiveScenarioClassification (tr) type: mteb/amazon_massive_scenario config: tr split: test revision: 7d571f92784cd94a019292a1f45445077d0ef634 metrics: - type: accuracy value: 61.55346334902488 - type: f1 value: 61.25819823057803 - task: type: Classification dataset: name: MTEB MassiveScenarioClassification (ur) type: mteb/amazon_massive_scenario config: ur split: test revision: 7d571f92784cd94a019292a1f45445077d0ef634 metrics: - type: accuracy value: 47.114996637525216 - type: f1 value: 45.20428169546973 - task: type: Classification dataset: name: MTEB MassiveScenarioClassification (vi) type: mteb/amazon_massive_scenario config: vi split: test revision: 7d571f92784cd94a019292a1f45445077d0ef634 metrics: - type: accuracy value: 56.83254875588434 - type: f1 value: 56.00919757601416 - task: type: Classification dataset: name: MTEB MassiveScenarioClassification (zh-CN) type: mteb/amazon_massive_scenario config: zh-CN split: test revision: 7d571f92784cd94a019292a1f45445077d0ef634 metrics: - type: accuracy value: 69.57969065232012 - type: f1 value: 69.17378512156806 - task: type: Classification dataset: name: MTEB MassiveScenarioClassification (zh-TW) type: mteb/amazon_massive_scenario config: zh-TW split: test revision: 7d571f92784cd94a019292a1f45445077d0ef634 metrics: - type: accuracy value: 64.02488231338263 - type: f1 value: 64.09790488949963 - task: type: Clustering dataset: name: MTEB MedrxivClusteringP2P type: mteb/medrxiv-clustering-p2p config: default split: test revision: e7a26af6f3ae46b30dde8737f02c07b1505bcc73 metrics: - type: v_measure value: 29.71446786877363 - task: type: Clustering dataset: name: MTEB MedrxivClusteringS2S type: mteb/medrxiv-clustering-s2s config: default split: test revision: 35191c8c0dca72d8ff3efcd72aa802307d469663 metrics: - type: v_measure value: 28.003624498407547 - task: type: Reranking dataset: name: MTEB MindSmallReranking type: mteb/mind_small config: default split: test revision: 3bdac13927fdc888b903db93b2ffdbd90b295a69 metrics: - type: map value: 31.29671894458151 - type: mrr value: 32.44455140124599 - task: type: Retrieval dataset: name: MTEB NFCorpus type: nfcorpus config: default split: test revision: None metrics: - type: map_at_1 value: 6.127 - type: map_at_10 value: 13.047 - type: map_at_100 value: 15.754000000000001 - type: map_at_1000 value: 16.930999999999997 - type: map_at_3 value: 9.876999999999999 - type: map_at_5 value: 11.265 - type: mrr_at_1 value: 45.511 - type: mrr_at_10 value: 54.75600000000001 - type: mrr_at_100 value: 55.33 - type: mrr_at_1000 value: 55.374 - type: mrr_at_3 value: 53.147999999999996 - type: mrr_at_5 value: 53.952999999999996 - type: ndcg_at_1 value: 43.653 - type: ndcg_at_10 value: 33.936 - type: ndcg_at_100 value: 29.952 - type: ndcg_at_1000 value: 38.356 - type: ndcg_at_3 value: 40.018 - type: ndcg_at_5 value: 37.102000000000004 - type: precision_at_1 value: 45.511 - type: precision_at_10 value: 24.768 - type: precision_at_100 value: 7.13 - type: precision_at_1000 value: 1.928 - type: precision_at_3 value: 37.461 - type: precision_at_5 value: 31.703 - type: recall_at_1 value: 6.127 - type: recall_at_10 value: 16.512999999999998 - type: recall_at_100 value: 29.057 - type: recall_at_1000 value: 59.25899999999999 - type: recall_at_3 value: 10.940999999999999 - type: recall_at_5 value: 12.925 - task: type: Retrieval dataset: name: MTEB NQ type: nq config: default split: test revision: None metrics: - type: map_at_1 value: 32.228 - type: map_at_10 value: 47.56 - type: map_at_100 value: 48.539 - type: map_at_1000 value: 48.567 - type: map_at_3 value: 43.214999999999996 - type: map_at_5 value: 45.799 - type: mrr_at_1 value: 36.53 - type: mrr_at_10 value: 50.004000000000005 - type: mrr_at_100 value: 50.737 - type: mrr_at_1000 value: 50.758 - type: mrr_at_3 value: 46.543 - type: mrr_at_5 value: 48.672 - type: ndcg_at_1 value: 36.501 - type: ndcg_at_10 value: 55.103 - type: ndcg_at_100 value: 59.156 - type: ndcg_at_1000 value: 59.821999999999996 - type: ndcg_at_3 value: 47.089 - type: ndcg_at_5 value: 51.35999999999999 - type: precision_at_1 value: 36.501 - type: precision_at_10 value: 9.046999999999999 - type: precision_at_100 value: 1.13 - type: precision_at_1000 value: 0.11900000000000001 - type: precision_at_3 value: 21.398 - type: precision_at_5 value: 15.307 - type: recall_at_1 value: 32.228 - type: recall_at_10 value: 75.608 - type: recall_at_100 value: 93.062 - type: recall_at_1000 value: 98.059 - type: recall_at_3 value: 55.021 - type: recall_at_5 value: 64.873 - task: type: Retrieval dataset: name: MTEB QuoraRetrieval type: quora config: default split: test revision: None metrics: - type: map_at_1 value: 70.623 - type: map_at_10 value: 84.705 - type: map_at_100 value: 85.333 - type: map_at_1000 value: 85.348 - type: map_at_3 value: 81.736 - type: map_at_5 value: 83.616 - type: mrr_at_1 value: 81.28 - type: mrr_at_10 value: 87.518 - type: mrr_at_100 value: 87.619 - type: mrr_at_1000 value: 87.62 - type: mrr_at_3 value: 86.545 - type: mrr_at_5 value: 87.238 - type: ndcg_at_1 value: 81.28999999999999 - type: ndcg_at_10 value: 88.412 - type: ndcg_at_100 value: 89.603 - type: ndcg_at_1000 value: 89.696 - type: ndcg_at_3 value: 85.563 - type: ndcg_at_5 value: 87.17 - type: precision_at_1 value: 81.28999999999999 - type: precision_at_10 value: 13.439 - type: precision_at_100 value: 1.5310000000000001 - type: precision_at_1000 value: 0.157 - type: precision_at_3 value: 37.437 - type: precision_at_5 value: 24.662 - type: recall_at_1 value: 70.623 - type: recall_at_10 value: 95.531 - type: recall_at_100 value: 99.58 - type: recall_at_1000 value: 99.978 - type: recall_at_3 value: 87.368 - type: recall_at_5 value: 91.898 - task: type: Clustering dataset: name: MTEB RedditClustering type: mteb/reddit-clustering config: default split: test revision: 24640382cdbf8abc73003fb0fa6d111a705499eb metrics: - type: v_measure value: 49.53241309124786 - task: type: Clustering dataset: name: MTEB RedditClusteringP2P type: mteb/reddit-clustering-p2p config: default split: test revision: 282350215ef01743dc01b456c7f5241fa8937f16 metrics: - type: v_measure value: 59.712004482915994 - task: type: Retrieval dataset: name: MTEB SCIDOCS type: scidocs config: default split: test revision: None metrics: - type: map_at_1 value: 5.313 - type: map_at_10 value: 13.447000000000001 - type: map_at_100 value: 15.491 - type: map_at_1000 value: 15.784999999999998 - type: map_at_3 value: 9.58 - type: map_at_5 value: 11.562 - type: mrr_at_1 value: 26.200000000000003 - type: mrr_at_10 value: 37.212 - type: mrr_at_100 value: 38.190000000000005 - type: mrr_at_1000 value: 38.242 - type: mrr_at_3 value: 34.067 - type: mrr_at_5 value: 35.862 - type: ndcg_at_1 value: 26.200000000000003 - type: ndcg_at_10 value: 21.979000000000003 - type: ndcg_at_100 value: 29.726999999999997 - type: ndcg_at_1000 value: 34.766000000000005 - type: ndcg_at_3 value: 21.16 - type: ndcg_at_5 value: 18.478 - type: precision_at_1 value: 26.200000000000003 - type: precision_at_10 value: 11.25 - type: precision_at_100 value: 2.241 - type: precision_at_1000 value: 0.345 - type: precision_at_3 value: 19.633 - type: precision_at_5 value: 16.14 - type: recall_at_1 value: 5.313 - type: recall_at_10 value: 22.808 - type: recall_at_100 value: 45.540000000000006 - type: recall_at_1000 value: 70.043 - type: recall_at_3 value: 11.932 - type: recall_at_5 value: 16.347 - task: type: STS dataset: name: MTEB SICK-R type: mteb/sickr-sts config: default split: test revision: a6ea5a8cab320b040a23452cc28066d9beae2cee metrics: - type: cos_sim_pearson value: 75.95540796619258 - type: cos_sim_spearman value: 76.49462277620303 - type: euclidean_pearson value: 71.67643435507317 - type: euclidean_spearman value: 76.4915921108082 - type: manhattan_pearson value: 71.71412560074847 - type: manhattan_spearman value: 76.46738312094736 - task: type: STS dataset: name: MTEB STS12 type: mteb/sts12-sts config: default split: test revision: a0d554a64d88156834ff5ae9920b964011b16384 metrics: - type: cos_sim_pearson value: 81.48773267615617 - type: cos_sim_spearman value: 74.99867664033701 - type: euclidean_pearson value: 76.0885798115032 - type: euclidean_spearman value: 74.99438208715942 - type: manhattan_pearson value: 76.09382557464033 - type: manhattan_spearman value: 74.96139353538533 - task: type: STS dataset: name: MTEB STS13 type: mteb/sts13-sts config: default split: test revision: 7e90230a92c190f1bf69ae9002b8cea547a64cca metrics: - type: cos_sim_pearson value: 88.19022560804167 - type: cos_sim_spearman value: 87.9128142106699 - type: euclidean_pearson value: 85.51390183763914 - type: euclidean_spearman value: 87.89995488057309 - type: manhattan_pearson value: 85.44945034816052 - type: manhattan_spearman value: 87.791458898378 - task: type: STS dataset: name: MTEB STS14 type: mteb/sts14-sts config: default split: test revision: 6031580fec1f6af667f0bd2da0a551cf4f0b2375 metrics: - type: cos_sim_pearson value: 85.17877898640924 - type: cos_sim_spearman value: 82.25544088807465 - type: euclidean_pearson value: 82.36395988835416 - type: euclidean_spearman value: 82.26359924974219 - type: manhattan_pearson value: 82.39219808999891 - type: manhattan_spearman value: 82.27757404868157 - task: type: STS dataset: name: MTEB STS15 type: mteb/sts15-sts config: default split: test revision: ae752c7c21bf194d8b67fd573edf7ae58183cbe3 metrics: - type: cos_sim_pearson value: 87.66865350602554 - type: cos_sim_spearman value: 87.87150169810872 - type: euclidean_pearson value: 85.41520650056647 - type: euclidean_spearman value: 87.86636613654022 - type: manhattan_pearson value: 85.38710485867502 - type: manhattan_spearman value: 87.83513424575301 - task: type: STS dataset: name: MTEB STS16 type: mteb/sts16-sts config: default split: test revision: 4d8694f8f0e0100860b497b999b3dbed754a0513 metrics: - type: cos_sim_pearson value: 80.75527643407175 - type: cos_sim_spearman value: 80.9239008594745 - type: euclidean_pearson value: 79.37682746800515 - type: euclidean_spearman value: 80.91978947194092 - type: manhattan_pearson value: 79.38884189990698 - type: manhattan_spearman value: 80.91771608341014 - task: type: STS dataset: name: MTEB STS17 (ko-ko) type: mteb/sts17-crosslingual-sts config: ko-ko split: test revision: af5e6fb845001ecf41f4c1e033ce921939a2a68d metrics: - type: cos_sim_pearson value: 80.24344311909609 - type: cos_sim_spearman value: 80.78933956176022 - type: euclidean_pearson value: 76.95229806538676 - type: euclidean_spearman value: 80.79706724032172 - type: manhattan_pearson value: 76.90212135774246 - type: manhattan_spearman value: 80.68727415384441 - task: type: STS dataset: name: MTEB STS17 (ar-ar) type: mteb/sts17-crosslingual-sts config: ar-ar split: test revision: af5e6fb845001ecf41f4c1e033ce921939a2a68d metrics: - type: cos_sim_pearson value: 77.33891809228084 - type: cos_sim_spearman value: 79.37912430317627 - type: euclidean_pearson value: 72.56919843951036 - type: euclidean_spearman value: 79.3091436905072 - type: manhattan_pearson value: 72.4282811588754 - type: manhattan_spearman value: 78.90144894538078 - task: type: STS dataset: name: MTEB STS17 (en-ar) type: mteb/sts17-crosslingual-sts config: en-ar split: test revision: af5e6fb845001ecf41f4c1e033ce921939a2a68d metrics: - type: cos_sim_pearson value: 59.68908656739356 - type: cos_sim_spearman value: 58.76110210983758 - type: euclidean_pearson value: 59.14749159577439 - type: euclidean_spearman value: 59.015997032145016 - type: manhattan_pearson value: 57.907675340322676 - type: manhattan_spearman value: 57.07751173022352 - task: type: STS dataset: name: MTEB STS17 (en-de) type: mteb/sts17-crosslingual-sts config: en-de split: test revision: af5e6fb845001ecf41f4c1e033ce921939a2a68d metrics: - type: cos_sim_pearson value: 75.53325164873934 - type: cos_sim_spearman value: 76.13104388846271 - type: euclidean_pearson value: 74.61931031522006 - type: euclidean_spearman value: 75.96875166459931 - type: manhattan_pearson value: 74.82154350849251 - type: manhattan_spearman value: 76.64455924104236 - task: type: STS dataset: name: MTEB STS17 (en-en) type: mteb/sts17-crosslingual-sts config: en-en split: test revision: af5e6fb845001ecf41f4c1e033ce921939a2a68d metrics: - type: cos_sim_pearson value: 85.4228376590724 - type: cos_sim_spearman value: 87.22764976624408 - type: euclidean_pearson value: 81.94975688107507 - type: euclidean_spearman value: 87.19193932664932 - type: manhattan_pearson value: 82.0043964628936 - type: manhattan_spearman value: 87.09130430957818 - task: type: STS dataset: name: MTEB STS17 (en-tr) type: mteb/sts17-crosslingual-sts config: en-tr split: test revision: af5e6fb845001ecf41f4c1e033ce921939a2a68d metrics: - type: cos_sim_pearson value: 57.5627552601949 - type: cos_sim_spearman value: 55.5263144563657 - type: euclidean_pearson value: 57.00569241610482 - type: euclidean_spearman value: 55.35291811479459 - type: manhattan_pearson value: 56.99656284623506 - type: manhattan_spearman value: 55.593673744709946 - task: type: STS dataset: name: MTEB STS17 (es-en) type: mteb/sts17-crosslingual-sts config: es-en split: test revision: af5e6fb845001ecf41f4c1e033ce921939a2a68d metrics: - type: cos_sim_pearson value: 69.93801311909735 - type: cos_sim_spearman value: 72.2581115470475 - type: euclidean_pearson value: 68.24881290268563 - type: euclidean_spearman value: 72.60813652864522 - type: manhattan_pearson value: 67.86369874088834 - type: manhattan_spearman value: 71.92346382988023 - task: type: STS dataset: name: MTEB STS17 (es-es) type: mteb/sts17-crosslingual-sts config: es-es split: test revision: af5e6fb845001ecf41f4c1e033ce921939a2a68d metrics: - type: cos_sim_pearson value: 86.20555264114785 - type: cos_sim_spearman value: 85.0588060013836 - type: euclidean_pearson value: 81.78229090166155 - type: euclidean_spearman value: 85.09687374900614 - type: manhattan_pearson value: 81.77449099980244 - type: manhattan_spearman value: 84.70331476222177 - task: type: STS dataset: name: MTEB STS17 (fr-en) type: mteb/sts17-crosslingual-sts config: fr-en split: test revision: af5e6fb845001ecf41f4c1e033ce921939a2a68d metrics: - type: cos_sim_pearson value: 73.786793911605 - type: cos_sim_spearman value: 75.63094397551554 - type: euclidean_pearson value: 71.64292842519251 - type: euclidean_spearman value: 75.60215267384011 - type: manhattan_pearson value: 72.2124078037642 - type: manhattan_spearman value: 76.34546028465175 - task: type: STS dataset: name: MTEB STS17 (it-en) type: mteb/sts17-crosslingual-sts config: it-en split: test revision: af5e6fb845001ecf41f4c1e033ce921939a2a68d metrics: - type: cos_sim_pearson value: 69.62139987106455 - type: cos_sim_spearman value: 71.35872226722493 - type: euclidean_pearson value: 68.50103697766141 - type: euclidean_spearman value: 71.24590187948473 - type: manhattan_pearson value: 68.89236562525663 - type: manhattan_spearman value: 71.77994400789173 - task: type: STS dataset: name: MTEB STS17 (nl-en) type: mteb/sts17-crosslingual-sts config: nl-en split: test revision: af5e6fb845001ecf41f4c1e033ce921939a2a68d metrics: - type: cos_sim_pearson value: 71.62728174871292 - type: cos_sim_spearman value: 71.98655715409397 - type: euclidean_pearson value: 70.27026741609356 - type: euclidean_spearman value: 72.14004669693777 - type: manhattan_pearson value: 70.46335140108751 - type: manhattan_spearman value: 72.6638254374311 - task: type: STS dataset: name: MTEB STS22 (en) type: mteb/sts22-crosslingual-sts config: en split: test revision: 6d1ba47164174a496b7fa5d3569dae26a6813b80 metrics: - type: cos_sim_pearson value: 71.10248717637424 - type: cos_sim_spearman value: 68.5905931564714 - type: euclidean_pearson value: 71.23290000423759 - type: euclidean_spearman value: 68.6419513130457 - type: manhattan_pearson value: 71.6886015250234 - type: manhattan_spearman value: 69.47543660368697 - task: type: STS dataset: name: MTEB STS22 (de) type: mteb/sts22-crosslingual-sts config: de split: test revision: 6d1ba47164174a496b7fa5d3569dae26a6813b80 metrics: - type: cos_sim_pearson value: 59.010555056244776 - type: cos_sim_spearman value: 60.121771179899255 - type: euclidean_pearson value: 53.04527785573465 - type: euclidean_spearman value: 60.121771179899255 - type: manhattan_pearson value: 52.931480071124234 - type: manhattan_spearman value: 60.03868409331775 - task: type: STS dataset: name: MTEB STS22 (es) type: mteb/sts22-crosslingual-sts config: es split: test revision: 6d1ba47164174a496b7fa5d3569dae26a6813b80 metrics: - type: cos_sim_pearson value: 70.6833028374664 - type: cos_sim_spearman value: 68.57396263856863 - type: euclidean_pearson value: 68.30905084522986 - type: euclidean_spearman value: 68.57396263856863 - type: manhattan_pearson value: 70.91400657516918 - type: manhattan_spearman value: 72.72240857808112 - task: type: STS dataset: name: MTEB STS22 (pl) type: mteb/sts22-crosslingual-sts config: pl split: test revision: 6d1ba47164174a496b7fa5d3569dae26a6813b80 metrics: - type: cos_sim_pearson value: 36.948290734279645 - type: cos_sim_spearman value: 42.07722031011005 - type: euclidean_pearson value: 22.539446972018467 - type: euclidean_spearman value: 42.07722031011005 - type: manhattan_pearson value: 24.119402246951786 - type: manhattan_spearman value: 45.80525501822569 - task: type: STS dataset: name: MTEB STS22 (tr) type: mteb/sts22-crosslingual-sts config: tr split: test revision: 6d1ba47164174a496b7fa5d3569dae26a6813b80 metrics: - type: cos_sim_pearson value: 66.97840719036533 - type: cos_sim_spearman value: 66.62430648804775 - type: euclidean_pearson value: 66.89526587772023 - type: euclidean_spearman value: 66.62430648804775 - type: manhattan_pearson value: 68.6929895225091 - type: manhattan_spearman value: 68.91772708432867 - task: type: STS dataset: name: MTEB STS22 (ar) type: mteb/sts22-crosslingual-sts config: ar split: test revision: 6d1ba47164174a496b7fa5d3569dae26a6813b80 metrics: - type: cos_sim_pearson value: 56.65098289103698 - type: cos_sim_spearman value: 57.436674670689214 - type: euclidean_pearson value: 51.79149892785239 - type: euclidean_spearman value: 57.436674670689214 - type: manhattan_pearson value: 52.64807953938707 - type: manhattan_spearman value: 58.94583987372767 - task: type: STS dataset: name: MTEB STS22 (ru) type: mteb/sts22-crosslingual-sts config: ru split: test revision: 6d1ba47164174a496b7fa5d3569dae26a6813b80 metrics: - type: cos_sim_pearson value: 60.669531297510225 - type: cos_sim_spearman value: 61.71342510003327 - type: euclidean_pearson value: 55.821871433553504 - type: euclidean_spearman value: 61.71342510003327 - type: manhattan_pearson value: 57.77073441351117 - type: manhattan_spearman value: 65.20759033207 - task: type: STS dataset: name: MTEB STS22 (zh) type: mteb/sts22-crosslingual-sts config: zh split: test revision: 6d1ba47164174a496b7fa5d3569dae26a6813b80 metrics: - type: cos_sim_pearson value: 64.34728960310699 - type: cos_sim_spearman value: 64.03565302589584 - type: euclidean_pearson value: 61.958942333930544 - type: euclidean_spearman value: 64.03565302589584 - type: manhattan_pearson value: 64.65072672727923 - type: manhattan_spearman value: 67.82569969943107 - task: type: STS dataset: name: MTEB STS22 (fr) type: mteb/sts22-crosslingual-sts config: fr split: test revision: 6d1ba47164174a496b7fa5d3569dae26a6813b80 metrics: - type: cos_sim_pearson value: 82.47120815594353 - type: cos_sim_spearman value: 81.46916544955101 - type: euclidean_pearson value: 79.21753533489019 - type: euclidean_spearman value: 81.46916544955101 - type: manhattan_pearson value: 78.26605518839271 - type: manhattan_spearman value: 81.29749169339514 - task: type: STS dataset: name: MTEB STS22 (de-en) type: mteb/sts22-crosslingual-sts config: de-en split: test revision: 6d1ba47164174a496b7fa5d3569dae26a6813b80 metrics: - type: cos_sim_pearson value: 63.31467231933632 - type: cos_sim_spearman value: 53.36160506603274 - type: euclidean_pearson value: 64.98434169416196 - type: euclidean_spearman value: 53.36160506603274 - type: manhattan_pearson value: 69.6837006629638 - type: manhattan_spearman value: 60.85384324700893 - task: type: STS dataset: name: MTEB STS22 (es-en) type: mteb/sts22-crosslingual-sts config: es-en split: test revision: 6d1ba47164174a496b7fa5d3569dae26a6813b80 metrics: - type: cos_sim_pearson value: 79.99425127770438 - type: cos_sim_spearman value: 77.41308957007035 - type: euclidean_pearson value: 79.69441265626801 - type: euclidean_spearman value: 77.41308957007035 - type: manhattan_pearson value: 80.3726291667624 - type: manhattan_spearman value: 79.0414050644631 - task: type: STS dataset: name: MTEB STS22 (it) type: mteb/sts22-crosslingual-sts config: it split: test revision: 6d1ba47164174a496b7fa5d3569dae26a6813b80 metrics: - type: cos_sim_pearson value: 79.13469287716659 - type: cos_sim_spearman value: 79.27976881582065 - type: euclidean_pearson value: 77.65964425780172 - type: euclidean_spearman value: 79.27976881582065 - type: manhattan_pearson value: 77.64158710257945 - type: manhattan_spearman value: 79.22242281895944 - task: type: STS dataset: name: MTEB STS22 (pl-en) type: mteb/sts22-crosslingual-sts config: pl-en split: test revision: 6d1ba47164174a496b7fa5d3569dae26a6813b80 metrics: - type: cos_sim_pearson value: 76.303314995599 - type: cos_sim_spearman value: 77.4991345414335 - type: euclidean_pearson value: 74.88826621426401 - type: euclidean_spearman value: 77.4991345414335 - type: manhattan_pearson value: 77.70223488989319 - type: manhattan_spearman value: 79.69746987627822 - task: type: STS dataset: name: MTEB STS22 (zh-en) type: mteb/sts22-crosslingual-sts config: zh-en split: test revision: 6d1ba47164174a496b7fa5d3569dae26a6813b80 metrics: - type: cos_sim_pearson value: 70.87814957197239 - type: cos_sim_spearman value: 69.86785751801642 - type: euclidean_pearson value: 68.68630146548654 - type: euclidean_spearman value: 69.8615799070054 - type: manhattan_pearson value: 61.83743315022061 - type: manhattan_spearman value: 64.35346450347738 - task: type: STS dataset: name: MTEB STS22 (es-it) type: mteb/sts22-crosslingual-sts config: es-it split: test revision: 6d1ba47164174a496b7fa5d3569dae26a6813b80 metrics: - type: cos_sim_pearson value: 74.1484689923211 - type: cos_sim_spearman value: 74.69046355179742 - type: euclidean_pearson value: 73.03951899271793 - type: euclidean_spearman value: 74.69820632954205 - type: manhattan_pearson value: 73.36810146930709 - type: manhattan_spearman value: 75.33154135287258 - task: type: STS dataset: name: MTEB STS22 (de-fr) type: mteb/sts22-crosslingual-sts config: de-fr split: test revision: 6d1ba47164174a496b7fa5d3569dae26a6813b80 metrics: - type: cos_sim_pearson value: 51.43125921362742 - type: cos_sim_spearman value: 58.25341239774093 - type: euclidean_pearson value: 48.00689582162098 - type: euclidean_spearman value: 58.533194841668426 - type: manhattan_pearson value: 46.11721778230745 - type: manhattan_spearman value: 55.026889052448134 - task: type: STS dataset: name: MTEB STS22 (de-pl) type: mteb/sts22-crosslingual-sts config: de-pl split: test revision: 6d1ba47164174a496b7fa5d3569dae26a6813b80 metrics: - type: cos_sim_pearson value: 40.066205533538046 - type: cos_sim_spearman value: 48.46991890841381 - type: euclidean_pearson value: 42.29606506858651 - type: euclidean_spearman value: 48.34674249441531 - type: manhattan_pearson value: 41.70680990555484 - type: manhattan_spearman value: 47.54609580342499 - task: type: STS dataset: name: MTEB STS22 (fr-pl) type: mteb/sts22-crosslingual-sts config: fr-pl split: test revision: 6d1ba47164174a496b7fa5d3569dae26a6813b80 metrics: - type: cos_sim_pearson value: 82.26527545520592 - type: cos_sim_spearman value: 73.24670207647144 - type: euclidean_pearson value: 81.78699781584893 - type: euclidean_spearman value: 73.24670207647144 - type: manhattan_pearson value: 83.14172292187807 - type: manhattan_spearman value: 73.24670207647144 - task: type: STS dataset: name: MTEB STSBenchmark type: mteb/stsbenchmark-sts config: default split: test revision: b0fddb56ed78048fa8b90373c8a3cfc37b684831 metrics: - type: cos_sim_pearson value: 81.51438108053523 - type: cos_sim_spearman value: 81.9481311864648 - type: euclidean_pearson value: 78.6683040592179 - type: euclidean_spearman value: 81.9535649926177 - type: manhattan_pearson value: 78.65396325536754 - type: manhattan_spearman value: 81.96918240343872 - task: type: Reranking dataset: name: MTEB SciDocsRR type: mteb/scidocs-reranking config: default split: test revision: d3c5e1fc0b855ab6097bf1cda04dd73947d7caab metrics: - type: map value: 80.6689275068653 - type: mrr value: 95.021337594867 - task: type: Retrieval dataset: name: MTEB SciFact type: scifact config: default split: test revision: None metrics: - type: map_at_1 value: 55.193999999999996 - type: map_at_10 value: 65.814 - type: map_at_100 value: 66.428 - type: map_at_1000 value: 66.447 - type: map_at_3 value: 63.304 - type: map_at_5 value: 64.64 - type: mrr_at_1 value: 57.99999999999999 - type: mrr_at_10 value: 66.957 - type: mrr_at_100 value: 67.405 - type: mrr_at_1000 value: 67.422 - type: mrr_at_3 value: 65.0 - type: mrr_at_5 value: 66.183 - type: ndcg_at_1 value: 57.99999999999999 - type: ndcg_at_10 value: 70.523 - type: ndcg_at_100 value: 72.987 - type: ndcg_at_1000 value: 73.605 - type: ndcg_at_3 value: 66.268 - type: ndcg_at_5 value: 68.27600000000001 - type: precision_at_1 value: 57.99999999999999 - type: precision_at_10 value: 9.467 - type: precision_at_100 value: 1.073 - type: precision_at_1000 value: 0.11299999999999999 - type: precision_at_3 value: 26.444000000000003 - type: precision_at_5 value: 17.2 - type: recall_at_1 value: 55.193999999999996 - type: recall_at_10 value: 83.52199999999999 - type: recall_at_100 value: 94.5 - type: recall_at_1000 value: 99.667 - type: recall_at_3 value: 71.989 - type: recall_at_5 value: 77.31700000000001 - task: type: PairClassification dataset: name: MTEB SprintDuplicateQuestions type: mteb/sprintduplicatequestions-pairclassification config: default split: test revision: d66bd1f72af766a5cc4b0ca5e00c162f89e8cc46 metrics: - type: cos_sim_accuracy value: 99.73465346534654 - type: cos_sim_ap value: 92.91719494015508 - type: cos_sim_f1 value: 86.46200301962756 - type: cos_sim_precision value: 87.03140830800406 - type: cos_sim_recall value: 85.9 - type: dot_accuracy value: 99.73663366336633 - type: dot_ap value: 92.90802848215259 - type: dot_f1 value: 86.46200301962756 - type: dot_precision value: 87.03140830800406 - type: dot_recall value: 85.9 - type: euclidean_accuracy value: 99.73465346534654 - type: euclidean_ap value: 92.91627363446204 - type: euclidean_f1 value: 86.43469490670702 - type: euclidean_precision value: 87.18209562563581 - type: euclidean_recall value: 85.7 - type: manhattan_accuracy value: 99.73663366336633 - type: manhattan_ap value: 92.90219877406929 - type: manhattan_f1 value: 86.31471040492056 - type: manhattan_precision value: 88.53838065194533 - type: manhattan_recall value: 84.2 - type: max_accuracy value: 99.73663366336633 - type: max_ap value: 92.91719494015508 - type: max_f1 value: 86.46200301962756 - task: type: Clustering dataset: name: MTEB StackExchangeClustering type: mteb/stackexchange-clustering config: default split: test revision: 6cbc1f7b2bc0622f2e39d2c77fa502909748c259 metrics: - type: v_measure value: 60.73098998430779 - task: type: Clustering dataset: name: MTEB StackExchangeClusteringP2P type: mteb/stackexchange-clustering-p2p config: default split: test revision: 815ca46b2622cec33ccafc3735d572c266efdb44 metrics: - type: v_measure value: 34.64256206757585 - task: type: Reranking dataset: name: MTEB StackOverflowDupQuestions type: mteb/stackoverflowdupquestions-reranking config: default split: test revision: e185fbe320c72810689fc5848eb6114e1ef5ec69 metrics: - type: map value: 54.749150614295694 - type: mrr value: 55.78880984211867 - task: type: Summarization dataset: name: MTEB SummEval type: mteb/summeval config: default split: test revision: cda12ad7615edc362dbf25a00fdd61d3b1eaf93c metrics: - type: cos_sim_pearson value: 28.863577054305907 - type: cos_sim_spearman value: 27.538596944829774 - type: dot_pearson value: 28.93043755116643 - type: dot_spearman value: 27.733110516733987 - task: type: Retrieval dataset: name: MTEB TRECCOVID type: trec-covid config: default split: test revision: None metrics: - type: map_at_1 value: 0.22899999999999998 - type: map_at_10 value: 2.078 - type: map_at_100 value: 12.024 - type: map_at_1000 value: 29.036 - type: map_at_3 value: 0.681 - type: map_at_5 value: 1.083 - type: mrr_at_1 value: 86.0 - type: mrr_at_10 value: 92.667 - type: mrr_at_100 value: 92.667 - type: mrr_at_1000 value: 92.667 - type: mrr_at_3 value: 92.667 - type: mrr_at_5 value: 92.667 - type: ndcg_at_1 value: 82.0 - type: ndcg_at_10 value: 80.746 - type: ndcg_at_100 value: 61.090999999999994 - type: ndcg_at_1000 value: 55.034000000000006 - type: ndcg_at_3 value: 82.419 - type: ndcg_at_5 value: 81.018 - type: precision_at_1 value: 86.0 - type: precision_at_10 value: 86.2 - type: precision_at_100 value: 62.68 - type: precision_at_1000 value: 24.032 - type: precision_at_3 value: 88.667 - type: precision_at_5 value: 86.0 - type: recall_at_1 value: 0.22899999999999998 - type: recall_at_10 value: 2.263 - type: recall_at_100 value: 15.238999999999999 - type: recall_at_1000 value: 51.937 - type: recall_at_3 value: 0.719 - type: recall_at_5 value: 1.15 - task: type: BitextMining dataset: name: MTEB Tatoeba (sqi-eng) type: mteb/tatoeba-bitext-mining config: sqi-eng split: test revision: 9080400076fbadbb4c4dcb136ff4eddc40b42553 metrics: - type: accuracy value: 19.400000000000002 - type: f1 value: 15.386076064970075 - type: precision value: 14.253878834615676 - type: recall value: 19.400000000000002 - task: type: BitextMining dataset: name: MTEB Tatoeba (fry-eng) type: mteb/tatoeba-bitext-mining config: fry-eng split: test revision: 9080400076fbadbb4c4dcb136ff4eddc40b42553 metrics: - type: accuracy value: 42.19653179190752 - type: f1 value: 37.726396917148364 - type: precision value: 36.14643545279384 - type: recall value: 42.19653179190752 - task: type: BitextMining dataset: name: MTEB Tatoeba (kur-eng) type: mteb/tatoeba-bitext-mining config: kur-eng split: test revision: 9080400076fbadbb4c4dcb136ff4eddc40b42553 metrics: - type: accuracy value: 18.536585365853657 - type: f1 value: 13.512010347376199 - type: precision value: 12.034068912117693 - type: recall value: 18.536585365853657 - task: type: BitextMining dataset: name: MTEB Tatoeba (tur-eng) type: mteb/tatoeba-bitext-mining config: tur-eng split: test revision: 9080400076fbadbb4c4dcb136ff4eddc40b42553 metrics: - type: accuracy value: 81.69999999999999 - type: f1 value: 77.37888888888888 - type: precision value: 75.49583333333332 - type: recall value: 81.69999999999999 - task: type: BitextMining dataset: name: MTEB Tatoeba (deu-eng) type: mteb/tatoeba-bitext-mining config: deu-eng split: test revision: 9080400076fbadbb4c4dcb136ff4eddc40b42553 metrics: - type: accuracy value: 97.39999999999999 - type: f1 value: 96.56666666666666 - type: precision value: 96.16666666666667 - type: recall value: 97.39999999999999 - task: type: BitextMining dataset: name: MTEB Tatoeba (nld-eng) type: mteb/tatoeba-bitext-mining config: nld-eng split: test revision: 9080400076fbadbb4c4dcb136ff4eddc40b42553 metrics: - type: accuracy value: 90.0 - type: f1 value: 87.22333333333333 - type: precision value: 85.89166666666667 - type: recall value: 90.0 - task: type: BitextMining dataset: name: MTEB Tatoeba (ron-eng) type: mteb/tatoeba-bitext-mining config: ron-eng split: test revision: 9080400076fbadbb4c4dcb136ff4eddc40b42553 metrics: - type: accuracy value: 64.7 - type: f1 value: 59.10904761904763 - type: precision value: 56.91968253968254 - type: recall value: 64.7 - task: type: BitextMining dataset: name: MTEB Tatoeba (ang-eng) type: mteb/tatoeba-bitext-mining config: ang-eng split: test revision: 9080400076fbadbb4c4dcb136ff4eddc40b42553 metrics: - type: accuracy value: 38.80597014925373 - type: f1 value: 30.890784174366264 - type: precision value: 28.327114427860696 - type: recall value: 38.80597014925373 - task: type: BitextMining dataset: name: MTEB Tatoeba (ido-eng) type: mteb/tatoeba-bitext-mining config: ido-eng split: test revision: 9080400076fbadbb4c4dcb136ff4eddc40b42553 metrics: - type: accuracy value: 53.900000000000006 - type: f1 value: 48.294138583638585 - type: precision value: 46.333495670995674 - type: recall value: 53.900000000000006 - task: type: BitextMining dataset: name: MTEB Tatoeba (jav-eng) type: mteb/tatoeba-bitext-mining config: jav-eng split: test revision: 9080400076fbadbb4c4dcb136ff4eddc40b42553 metrics: - type: accuracy value: 11.707317073170733 - type: f1 value: 8.999999999999998 - type: precision value: 8.175377468060395 - type: recall value: 11.707317073170733 - task: type: BitextMining dataset: name: MTEB Tatoeba (isl-eng) type: mteb/tatoeba-bitext-mining config: isl-eng split: test revision: 9080400076fbadbb4c4dcb136ff4eddc40b42553 metrics: - type: accuracy value: 15.9 - type: f1 value: 12.451226269430602 - type: precision value: 11.404807799760325 - type: recall value: 15.9 - task: type: BitextMining dataset: name: MTEB Tatoeba (slv-eng) type: mteb/tatoeba-bitext-mining config: slv-eng split: test revision: 9080400076fbadbb4c4dcb136ff4eddc40b42553 metrics: - type: accuracy value: 41.919805589307416 - type: f1 value: 35.880619060297064 - type: precision value: 33.77682308241239 - type: recall value: 41.919805589307416 - task: type: BitextMining dataset: name: MTEB Tatoeba (cym-eng) type: mteb/tatoeba-bitext-mining config: cym-eng split: test revision: 9080400076fbadbb4c4dcb136ff4eddc40b42553 metrics: - type: accuracy value: 10.956521739130434 - type: f1 value: 9.098715976676996 - type: precision value: 8.659935858401333 - type: recall value: 10.956521739130434 - task: type: BitextMining dataset: name: MTEB Tatoeba (kaz-eng) type: mteb/tatoeba-bitext-mining config: kaz-eng split: test revision: 9080400076fbadbb4c4dcb136ff4eddc40b42553 metrics: - type: accuracy value: 11.652173913043478 - type: f1 value: 9.154324883225136 - type: precision value: 8.505898125360801 - type: recall value: 11.652173913043478 - task: type: BitextMining dataset: name: MTEB Tatoeba (est-eng) type: mteb/tatoeba-bitext-mining config: est-eng split: test revision: 9080400076fbadbb4c4dcb136ff4eddc40b42553 metrics: - type: accuracy value: 9.700000000000001 - type: f1 value: 7.431679431679432 - type: precision value: 6.799925118740907 - type: recall value: 9.700000000000001 - task: type: BitextMining dataset: name: MTEB Tatoeba (heb-eng) type: mteb/tatoeba-bitext-mining config: heb-eng split: test revision: 9080400076fbadbb4c4dcb136ff4eddc40b42553 metrics: - type: accuracy value: 77.5 - type: f1 value: 72.39999999999999 - type: precision value: 70.13444444444444 - type: recall value: 77.5 - task: type: BitextMining dataset: name: MTEB Tatoeba (gla-eng) type: mteb/tatoeba-bitext-mining config: gla-eng split: test revision: 9080400076fbadbb4c4dcb136ff4eddc40b42553 metrics: - type: accuracy value: 5.548854041013269 - type: f1 value: 4.233155465362944 - type: precision value: 3.948150869646547 - type: recall value: 5.548854041013269 - task: type: BitextMining dataset: name: MTEB Tatoeba (mar-eng) type: mteb/tatoeba-bitext-mining config: mar-eng split: test revision: 9080400076fbadbb4c4dcb136ff4eddc40b42553 metrics: - type: accuracy value: 73.5 - type: f1 value: 67.35333333333332 - type: precision value: 64.63666666666666 - type: recall value: 73.5 - task: type: BitextMining dataset: name: MTEB Tatoeba (lat-eng) type: mteb/tatoeba-bitext-mining config: lat-eng split: test revision: 9080400076fbadbb4c4dcb136ff4eddc40b42553 metrics: - type: accuracy value: 27.700000000000003 - type: f1 value: 21.152765495941964 - type: precision value: 19.27832403707404 - type: recall value: 27.700000000000003 - task: type: BitextMining dataset: name: MTEB Tatoeba (bel-eng) type: mteb/tatoeba-bitext-mining config: bel-eng split: test revision: 9080400076fbadbb4c4dcb136ff4eddc40b42553 metrics: - type: accuracy value: 48.1 - type: f1 value: 41.21001443001443 - type: precision value: 38.628495670995676 - type: recall value: 48.1 - task: type: BitextMining dataset: name: MTEB Tatoeba (pms-eng) type: mteb/tatoeba-bitext-mining config: pms-eng split: test revision: 9080400076fbadbb4c4dcb136ff4eddc40b42553 metrics: - type: accuracy value: 40.0 - type: f1 value: 34.32060003488575 - type: precision value: 32.32134353741497 - type: recall value: 40.0 - task: type: BitextMining dataset: name: MTEB Tatoeba (gle-eng) type: mteb/tatoeba-bitext-mining config: gle-eng split: test revision: 9080400076fbadbb4c4dcb136ff4eddc40b42553 metrics: - type: accuracy value: 6.800000000000001 - type: f1 value: 4.3954389450190465 - type: precision value: 3.893838027469606 - type: recall value: 6.800000000000001 - task: type: BitextMining dataset: name: MTEB Tatoeba (pes-eng) type: mteb/tatoeba-bitext-mining config: pes-eng split: test revision: 9080400076fbadbb4c4dcb136ff4eddc40b42553 metrics: - type: accuracy value: 51.800000000000004 - type: f1 value: 45.04222943722944 - type: precision value: 42.541984126984126 - type: recall value: 51.800000000000004 - task: type: BitextMining dataset: name: MTEB Tatoeba (nob-eng) type: mteb/tatoeba-bitext-mining config: nob-eng split: test revision: 9080400076fbadbb4c4dcb136ff4eddc40b42553 metrics: - type: accuracy value: 83.1 - type: f1 value: 79.20675324675324 - type: precision value: 77.44944444444444 - type: recall value: 83.1 - task: type: BitextMining dataset: name: MTEB Tatoeba (bul-eng) type: mteb/tatoeba-bitext-mining config: bul-eng split: test revision: 9080400076fbadbb4c4dcb136ff4eddc40b42553 metrics: - type: accuracy value: 66.8 - type: f1 value: 60.25746031746031 - type: precision value: 57.55250000000001 - type: recall value: 66.8 - task: type: BitextMining dataset: name: MTEB Tatoeba (cbk-eng) type: mteb/tatoeba-bitext-mining config: cbk-eng split: test revision: 9080400076fbadbb4c4dcb136ff4eddc40b42553 metrics: - type: accuracy value: 63.6 - type: f1 value: 56.73421356421356 - type: precision value: 54.02218253968254 - type: recall value: 63.6 - task: type: BitextMining dataset: name: MTEB Tatoeba (hun-eng) type: mteb/tatoeba-bitext-mining config: hun-eng split: test revision: 9080400076fbadbb4c4dcb136ff4eddc40b42553 metrics: - type: accuracy value: 17.599999999999998 - type: f1 value: 13.17699134199134 - type: precision value: 11.77444805194805 - type: recall value: 17.599999999999998 - task: type: BitextMining dataset: name: MTEB Tatoeba (uig-eng) type: mteb/tatoeba-bitext-mining config: uig-eng split: test revision: 9080400076fbadbb4c4dcb136ff4eddc40b42553 metrics: - type: accuracy value: 2.0 - type: f1 value: 1.3126923076923078 - type: precision value: 1.104952380952381 - type: recall value: 2.0 - task: type: BitextMining dataset: name: MTEB Tatoeba (rus-eng) type: mteb/tatoeba-bitext-mining config: rus-eng split: test revision: 9080400076fbadbb4c4dcb136ff4eddc40b42553 metrics: - type: accuracy value: 88.3 - type: f1 value: 84.96333333333334 - type: precision value: 83.38333333333333 - type: recall value: 88.3 - task: type: BitextMining dataset: name: MTEB Tatoeba (spa-eng) type: mteb/tatoeba-bitext-mining config: spa-eng split: test revision: 9080400076fbadbb4c4dcb136ff4eddc40b42553 metrics: - type: accuracy value: 94.69999999999999 - type: f1 value: 93.12333333333333 - type: precision value: 92.375 - type: recall value: 94.69999999999999 - task: type: BitextMining dataset: name: MTEB Tatoeba (hye-eng) type: mteb/tatoeba-bitext-mining config: hye-eng split: test revision: 9080400076fbadbb4c4dcb136ff4eddc40b42553 metrics: - type: accuracy value: 0.6738544474393532 - type: f1 value: 0.3690849566291394 - type: precision value: 0.3305452159899599 - type: recall value: 0.6738544474393532 - task: type: BitextMining dataset: name: MTEB Tatoeba (tel-eng) type: mteb/tatoeba-bitext-mining config: tel-eng split: test revision: 9080400076fbadbb4c4dcb136ff4eddc40b42553 metrics: - type: accuracy value: 71.7948717948718 - type: f1 value: 65.37037037037037 - type: precision value: 62.46438746438747 - type: recall value: 71.7948717948718 - task: type: BitextMining dataset: name: MTEB Tatoeba (afr-eng) type: mteb/tatoeba-bitext-mining config: afr-eng split: test revision: 9080400076fbadbb4c4dcb136ff4eddc40b42553 metrics: - type: accuracy value: 56.699999999999996 - type: f1 value: 50.58054945054945 - type: precision value: 48.313047619047616 - type: recall value: 56.699999999999996 - task: type: BitextMining dataset: name: MTEB Tatoeba (mon-eng) type: mteb/tatoeba-bitext-mining config: mon-eng split: test revision: 9080400076fbadbb4c4dcb136ff4eddc40b42553 metrics: - type: accuracy value: 13.863636363636363 - type: f1 value: 10.948429096156369 - type: precision value: 10.227287994137523 - type: recall value: 13.863636363636363 - task: type: BitextMining dataset: name: MTEB Tatoeba (arz-eng) type: mteb/tatoeba-bitext-mining config: arz-eng split: test revision: 9080400076fbadbb4c4dcb136ff4eddc40b42553 metrics: - type: accuracy value: 62.473794549266245 - type: f1 value: 56.04172906059699 - type: precision value: 53.26694619147448 - type: recall value: 62.473794549266245 - task: type: BitextMining dataset: name: MTEB Tatoeba (hrv-eng) type: mteb/tatoeba-bitext-mining config: hrv-eng split: test revision: 9080400076fbadbb4c4dcb136ff4eddc40b42553 metrics: - type: accuracy value: 40.0 - type: f1 value: 34.62948179271708 - type: precision value: 32.699030910609864 - type: recall value: 40.0 - task: type: BitextMining dataset: name: MTEB Tatoeba (nov-eng) type: mteb/tatoeba-bitext-mining config: nov-eng split: test revision: 9080400076fbadbb4c4dcb136ff4eddc40b42553 metrics: - type: accuracy value: 60.311284046692606 - type: f1 value: 54.06182447038479 - type: precision value: 51.757921067259595 - type: recall value: 60.311284046692606 - task: type: BitextMining dataset: name: MTEB Tatoeba (gsw-eng) type: mteb/tatoeba-bitext-mining config: gsw-eng split: test revision: 9080400076fbadbb4c4dcb136ff4eddc40b42553 metrics: - type: accuracy value: 43.58974358974359 - type: f1 value: 37.042359350051655 - type: precision value: 34.75783475783476 - type: recall value: 43.58974358974359 - task: type: BitextMining dataset: name: MTEB Tatoeba (nds-eng) type: mteb/tatoeba-bitext-mining config: nds-eng split: test revision: 9080400076fbadbb4c4dcb136ff4eddc40b42553 metrics: - type: accuracy value: 56.49999999999999 - type: f1 value: 49.471269841269844 - type: precision value: 46.742182539682545 - type: recall value: 56.49999999999999 - task: type: BitextMining dataset: name: MTEB Tatoeba (ukr-eng) type: mteb/tatoeba-bitext-mining config: ukr-eng split: test revision: 9080400076fbadbb4c4dcb136ff4eddc40b42553 metrics: - type: accuracy value: 71.5 - type: f1 value: 65.32880952380951 - type: precision value: 62.71261904761904 - type: recall value: 71.5 - task: type: BitextMining dataset: name: MTEB Tatoeba (uzb-eng) type: mteb/tatoeba-bitext-mining config: uzb-eng split: test revision: 9080400076fbadbb4c4dcb136ff4eddc40b42553 metrics: - type: accuracy value: 11.448598130841122 - type: f1 value: 7.861361294691689 - type: precision value: 6.961045509526818 - type: recall value: 11.448598130841122 - task: type: BitextMining dataset: name: MTEB Tatoeba (lit-eng) type: mteb/tatoeba-bitext-mining config: lit-eng split: test revision: 9080400076fbadbb4c4dcb136ff4eddc40b42553 metrics: - type: accuracy value: 13.5 - type: f1 value: 10.448586132968154 - type: precision value: 9.624691955878397 - type: recall value: 13.5 - task: type: BitextMining dataset: name: MTEB Tatoeba (ina-eng) type: mteb/tatoeba-bitext-mining config: ina-eng split: test revision: 9080400076fbadbb4c4dcb136ff4eddc40b42553 metrics: - type: accuracy value: 82.19999999999999 - type: f1 value: 78.25366946778712 - type: precision value: 76.54291666666667 - type: recall value: 82.19999999999999 - task: type: BitextMining dataset: name: MTEB Tatoeba (lfn-eng) type: mteb/tatoeba-bitext-mining config: lfn-eng split: test revision: 9080400076fbadbb4c4dcb136ff4eddc40b42553 metrics: - type: accuracy value: 53.5 - type: f1 value: 47.48505411255411 - type: precision value: 45.29801587301587 - type: recall value: 53.5 - task: type: BitextMining dataset: name: MTEB Tatoeba (zsm-eng) type: mteb/tatoeba-bitext-mining config: zsm-eng split: test revision: 9080400076fbadbb4c4dcb136ff4eddc40b42553 metrics: - type: accuracy value: 61.1 - type: f1 value: 54.60758056758057 - type: precision value: 52.16455433455434 - type: recall value: 61.1 - task: type: BitextMining dataset: name: MTEB Tatoeba (ita-eng) type: mteb/tatoeba-bitext-mining config: ita-eng split: test revision: 9080400076fbadbb4c4dcb136ff4eddc40b42553 metrics: - type: accuracy value: 85.1 - type: f1 value: 81.98506715506716 - type: precision value: 80.64754901960784 - type: recall value: 85.1 - task: type: BitextMining dataset: name: MTEB Tatoeba (cmn-eng) type: mteb/tatoeba-bitext-mining config: cmn-eng split: test revision: 9080400076fbadbb4c4dcb136ff4eddc40b42553 metrics: - type: accuracy value: 89.2 - type: f1 value: 86.13333333333333 - type: precision value: 84.65 - type: recall value: 89.2 - task: type: BitextMining dataset: name: MTEB Tatoeba (lvs-eng) type: mteb/tatoeba-bitext-mining config: lvs-eng split: test revision: 9080400076fbadbb4c4dcb136ff4eddc40b42553 metrics: - type: accuracy value: 13.600000000000001 - type: f1 value: 10.721816580317723 - type: precision value: 9.97922024538847 - type: recall value: 13.600000000000001 - task: type: BitextMining dataset: name: MTEB Tatoeba (glg-eng) type: mteb/tatoeba-bitext-mining config: glg-eng split: test revision: 9080400076fbadbb4c4dcb136ff4eddc40b42553 metrics: - type: accuracy value: 79.0 - type: f1 value: 74.2652380952381 - type: precision value: 72.18690476190476 - type: recall value: 79.0 - task: type: BitextMining dataset: name: MTEB Tatoeba (ceb-eng) type: mteb/tatoeba-bitext-mining config: ceb-eng split: test revision: 9080400076fbadbb4c4dcb136ff4eddc40b42553 metrics: - type: accuracy value: 12.833333333333332 - type: f1 value: 10.45993265993266 - type: precision value: 9.849548907882243 - type: recall value: 12.833333333333332 - task: type: BitextMining dataset: name: MTEB Tatoeba (bre-eng) type: mteb/tatoeba-bitext-mining config: bre-eng split: test revision: 9080400076fbadbb4c4dcb136ff4eddc40b42553 metrics: - type: accuracy value: 8.3 - type: f1 value: 5.457311371692176 - type: precision value: 4.8466941508148595 - type: recall value: 8.3 - task: type: BitextMining dataset: name: MTEB Tatoeba (ben-eng) type: mteb/tatoeba-bitext-mining config: ben-eng split: test revision: 9080400076fbadbb4c4dcb136ff4eddc40b42553 metrics: - type: accuracy value: 26.3 - type: f1 value: 20.851341154819416 - type: precision value: 19.1173617945522 - type: recall value: 26.3 - task: type: BitextMining dataset: name: MTEB Tatoeba (swg-eng) type: mteb/tatoeba-bitext-mining config: swg-eng split: test revision: 9080400076fbadbb4c4dcb136ff4eddc40b42553 metrics: - type: accuracy value: 41.964285714285715 - type: f1 value: 36.38605442176871 - type: precision value: 34.523809523809526 - type: recall value: 41.964285714285715 - task: type: BitextMining dataset: name: MTEB Tatoeba (arq-eng) type: mteb/tatoeba-bitext-mining config: arq-eng split: test revision: 9080400076fbadbb4c4dcb136ff4eddc40b42553 metrics: - type: accuracy value: 26.454445664105382 - type: f1 value: 20.67692765826684 - type: precision value: 18.684070229075715 - type: recall value: 26.454445664105382 - task: type: BitextMining dataset: name: MTEB Tatoeba (kab-eng) type: mteb/tatoeba-bitext-mining config: kab-eng split: test revision: 9080400076fbadbb4c4dcb136ff4eddc40b42553 metrics: - type: accuracy value: 2.8000000000000003 - type: f1 value: 1.9487240537240536 - type: precision value: 1.7766582325720255 - type: recall value: 2.8000000000000003 - task: type: BitextMining dataset: name: MTEB Tatoeba (fra-eng) type: mteb/tatoeba-bitext-mining config: fra-eng split: test revision: 9080400076fbadbb4c4dcb136ff4eddc40b42553 metrics: - type: accuracy value: 91.5 - type: f1 value: 89.39 - type: precision value: 88.425 - type: recall value: 91.5 - task: type: BitextMining dataset: name: MTEB Tatoeba (por-eng) type: mteb/tatoeba-bitext-mining config: por-eng split: test revision: 9080400076fbadbb4c4dcb136ff4eddc40b42553 metrics: - type: accuracy value: 91.5 - type: f1 value: 89.38333333333333 - type: precision value: 88.36666666666667 - type: recall value: 91.5 - task: type: BitextMining dataset: name: MTEB Tatoeba (tat-eng) type: mteb/tatoeba-bitext-mining config: tat-eng split: test revision: 9080400076fbadbb4c4dcb136ff4eddc40b42553 metrics: - type: accuracy value: 9.2 - type: f1 value: 6.672282438325198 - type: precision value: 6.046073589145276 - type: recall value: 9.2 - task: type: BitextMining dataset: name: MTEB Tatoeba (oci-eng) type: mteb/tatoeba-bitext-mining config: oci-eng split: test revision: 9080400076fbadbb4c4dcb136ff4eddc40b42553 metrics: - type: accuracy value: 45.2 - type: f1 value: 39.12095238095238 - type: precision value: 36.820952380952384 - type: recall value: 45.2 - task: type: BitextMining dataset: name: MTEB Tatoeba (pol-eng) type: mteb/tatoeba-bitext-mining config: pol-eng split: test revision: 9080400076fbadbb4c4dcb136ff4eddc40b42553 metrics: - type: accuracy value: 86.8 - type: f1 value: 83.35000000000001 - type: precision value: 81.825 - type: recall value: 86.8 - task: type: BitextMining dataset: name: MTEB Tatoeba (war-eng) type: mteb/tatoeba-bitext-mining config: war-eng split: test revision: 9080400076fbadbb4c4dcb136ff4eddc40b42553 metrics: - type: accuracy value: 13.5 - type: f1 value: 10.66862856136998 - type: precision value: 9.845928551928552 - type: recall value: 13.5 - task: type: BitextMining dataset: name: MTEB Tatoeba (aze-eng) type: mteb/tatoeba-bitext-mining config: aze-eng split: test revision: 9080400076fbadbb4c4dcb136ff4eddc40b42553 metrics: - type: accuracy value: 33.4 - type: f1 value: 27.78153389993659 - type: precision value: 25.778055555555557 - type: recall value: 33.4 - task: type: BitextMining dataset: name: MTEB Tatoeba (vie-eng) type: mteb/tatoeba-bitext-mining config: vie-eng split: test revision: 9080400076fbadbb4c4dcb136ff4eddc40b42553 metrics: - type: accuracy value: 57.699999999999996 - type: f1 value: 50.440714285714286 - type: precision value: 47.64396825396825 - type: recall value: 57.699999999999996 - task: type: BitextMining dataset: name: MTEB Tatoeba (nno-eng) type: mteb/tatoeba-bitext-mining config: nno-eng split: test revision: 9080400076fbadbb4c4dcb136ff4eddc40b42553 metrics: - type: accuracy value: 62.2 - type: f1 value: 56.0098625351257 - type: precision value: 53.691914098972916 - type: recall value: 62.2 - task: type: BitextMining dataset: name: MTEB Tatoeba (cha-eng) type: mteb/tatoeba-bitext-mining config: cha-eng split: test revision: 9080400076fbadbb4c4dcb136ff4eddc40b42553 metrics: - type: accuracy value: 27.00729927007299 - type: f1 value: 22.798053527980535 - type: precision value: 21.107055961070557 - type: recall value: 27.00729927007299 - task: type: BitextMining dataset: name: MTEB Tatoeba (mhr-eng) type: mteb/tatoeba-bitext-mining config: mhr-eng split: test revision: 9080400076fbadbb4c4dcb136ff4eddc40b42553 metrics: - type: accuracy value: 6.2 - type: f1 value: 4.295544090473964 - type: precision value: 3.913153952193392 - type: recall value: 6.2 - task: type: BitextMining dataset: name: MTEB Tatoeba (dan-eng) type: mteb/tatoeba-bitext-mining config: dan-eng split: test revision: 9080400076fbadbb4c4dcb136ff4eddc40b42553 metrics: - type: accuracy value: 77.10000000000001 - type: f1 value: 72.49333333333334 - type: precision value: 70.53368637110017 - type: recall value: 77.10000000000001 - task: type: BitextMining dataset: name: MTEB Tatoeba (ell-eng) type: mteb/tatoeba-bitext-mining config: ell-eng split: test revision: 9080400076fbadbb4c4dcb136ff4eddc40b42553 metrics: - type: accuracy value: 15.2 - type: f1 value: 10.429591693330824 - type: precision value: 9.145801926831338 - type: recall value: 15.2 - task: type: BitextMining dataset: name: MTEB Tatoeba (amh-eng) type: mteb/tatoeba-bitext-mining config: amh-eng split: test revision: 9080400076fbadbb4c4dcb136ff4eddc40b42553 metrics: - type: accuracy value: 1.7857142857142856 - type: f1 value: 0.3635204081632653 - type: precision value: 0.205026455026455 - type: recall value: 1.7857142857142856 - task: type: BitextMining dataset: name: MTEB Tatoeba (pam-eng) type: mteb/tatoeba-bitext-mining config: pam-eng split: test revision: 9080400076fbadbb4c4dcb136ff4eddc40b42553 metrics: - type: accuracy value: 6.4 - type: f1 value: 4.8412763053939525 - type: precision value: 4.444087810337809 - type: recall value: 6.4 - task: type: BitextMining dataset: name: MTEB Tatoeba (hsb-eng) type: mteb/tatoeba-bitext-mining config: hsb-eng split: test revision: 9080400076fbadbb4c4dcb136ff4eddc40b42553 metrics: - type: accuracy value: 43.47826086956522 - type: f1 value: 37.13266949291794 - type: precision value: 34.655332590115194 - type: recall value: 43.47826086956522 - task: type: BitextMining dataset: name: MTEB Tatoeba (srp-eng) type: mteb/tatoeba-bitext-mining config: srp-eng split: test revision: 9080400076fbadbb4c4dcb136ff4eddc40b42553 metrics: - type: accuracy value: 42.0 - type: f1 value: 35.412229437229435 - type: precision value: 32.907539682539685 - type: recall value: 42.0 - task: type: BitextMining dataset: name: MTEB Tatoeba (epo-eng) type: mteb/tatoeba-bitext-mining config: epo-eng split: test revision: 9080400076fbadbb4c4dcb136ff4eddc40b42553 metrics: - type: accuracy value: 36.0 - type: f1 value: 30.53874458874459 - type: precision value: 28.711192408382807 - type: recall value: 36.0 - task: type: BitextMining dataset: name: MTEB Tatoeba (kzj-eng) type: mteb/tatoeba-bitext-mining config: kzj-eng split: test revision: 9080400076fbadbb4c4dcb136ff4eddc40b42553 metrics: - type: accuracy value: 7.9 - type: f1 value: 5.80190114561213 - type: precision value: 5.298527531836355 - type: recall value: 7.9 - task: type: BitextMining dataset: name: MTEB Tatoeba (awa-eng) type: mteb/tatoeba-bitext-mining config: awa-eng split: test revision: 9080400076fbadbb4c4dcb136ff4eddc40b42553 metrics: - type: accuracy value: 49.35064935064935 - type: f1 value: 41.57805638325119 - type: precision value: 38.87445887445887 - type: recall value: 49.35064935064935 - task: type: BitextMining dataset: name: MTEB Tatoeba (fao-eng) type: mteb/tatoeba-bitext-mining config: fao-eng split: test revision: 9080400076fbadbb4c4dcb136ff4eddc40b42553 metrics: - type: accuracy value: 25.572519083969464 - type: f1 value: 21.338006776938073 - type: precision value: 20.194474736459465 - type: recall value: 25.572519083969464 - task: type: BitextMining dataset: name: MTEB Tatoeba (mal-eng) type: mteb/tatoeba-bitext-mining config: mal-eng split: test revision: 9080400076fbadbb4c4dcb136ff4eddc40b42553 metrics: - type: accuracy value: 79.62154294032024 - type: f1 value: 74.47355652595827 - type: precision value: 72.2076661814653 - type: recall value: 79.62154294032024 - task: type: BitextMining dataset: name: MTEB Tatoeba (ile-eng) type: mteb/tatoeba-bitext-mining config: ile-eng split: test revision: 9080400076fbadbb4c4dcb136ff4eddc40b42553 metrics: - type: accuracy value: 68.0 - type: f1 value: 61.80859649122807 - type: precision value: 59.30381381381381 - type: recall value: 68.0 - task: type: BitextMining dataset: name: MTEB Tatoeba (bos-eng) type: mteb/tatoeba-bitext-mining config: bos-eng split: test revision: 9080400076fbadbb4c4dcb136ff4eddc40b42553 metrics: - type: accuracy value: 42.93785310734463 - type: f1 value: 36.72617201306135 - type: precision value: 34.72641059505466 - type: recall value: 42.93785310734463 - task: type: BitextMining dataset: name: MTEB Tatoeba (cor-eng) type: mteb/tatoeba-bitext-mining config: cor-eng split: test revision: 9080400076fbadbb4c4dcb136ff4eddc40b42553 metrics: - type: accuracy value: 5.5 - type: f1 value: 3.8651658986175113 - type: precision value: 3.4432814407814405 - type: recall value: 5.5 - task: type: BitextMining dataset: name: MTEB Tatoeba (cat-eng) type: mteb/tatoeba-bitext-mining config: cat-eng split: test revision: 9080400076fbadbb4c4dcb136ff4eddc40b42553 metrics: - type: accuracy value: 69.19999999999999 - type: f1 value: 63.41880952380953 - type: precision value: 61.07913419913419 - type: recall value: 69.19999999999999 - task: type: BitextMining dataset: name: MTEB Tatoeba (eus-eng) type: mteb/tatoeba-bitext-mining config: eus-eng split: test revision: 9080400076fbadbb4c4dcb136ff4eddc40b42553 metrics: - type: accuracy value: 15.4 - type: f1 value: 11.672122577122575 - type: precision value: 10.59919974661354 - type: recall value: 15.4 - task: type: BitextMining dataset: name: MTEB Tatoeba (yue-eng) type: mteb/tatoeba-bitext-mining config: yue-eng split: test revision: 9080400076fbadbb4c4dcb136ff4eddc40b42553 metrics: - type: accuracy value: 58.5 - type: f1 value: 51.31880452880453 - type: precision value: 48.60550125313283 - type: recall value: 58.5 - task: type: BitextMining dataset: name: MTEB Tatoeba (swe-eng) type: mteb/tatoeba-bitext-mining config: swe-eng split: test revision: 9080400076fbadbb4c4dcb136ff4eddc40b42553 metrics: - type: accuracy value: 89.3 - type: f1 value: 86.32666666666667 - type: precision value: 84.98333333333333 - type: recall value: 89.3 - task: type: BitextMining dataset: name: MTEB Tatoeba (dtp-eng) type: mteb/tatoeba-bitext-mining config: dtp-eng split: test revision: 9080400076fbadbb4c4dcb136ff4eddc40b42553 metrics: - type: accuracy value: 5.7 - type: f1 value: 3.8739805216757546 - type: precision value: 3.4734608954367014 - type: recall value: 5.7 - task: type: BitextMining dataset: name: MTEB Tatoeba (kat-eng) type: mteb/tatoeba-bitext-mining config: kat-eng split: test revision: 9080400076fbadbb4c4dcb136ff4eddc40b42553 metrics: - type: accuracy value: 0.8042895442359249 - type: f1 value: 0.7596067917783735 - type: precision value: 0.7372654155495978 - type: recall value: 0.8042895442359249 - task: type: BitextMining dataset: name: MTEB Tatoeba (jpn-eng) type: mteb/tatoeba-bitext-mining config: jpn-eng split: test revision: 9080400076fbadbb4c4dcb136ff4eddc40b42553 metrics: - type: accuracy value: 89.7 - type: f1 value: 86.92333333333333 - type: precision value: 85.64166666666667 - type: recall value: 89.7 - task: type: BitextMining dataset: name: MTEB Tatoeba (csb-eng) type: mteb/tatoeba-bitext-mining config: csb-eng split: test revision: 9080400076fbadbb4c4dcb136ff4eddc40b42553 metrics: - type: accuracy value: 26.08695652173913 - type: f1 value: 20.517863778733343 - type: precision value: 18.901098901098898 - type: recall value: 26.08695652173913 - task: type: BitextMining dataset: name: MTEB Tatoeba (xho-eng) type: mteb/tatoeba-bitext-mining config: xho-eng split: test revision: 9080400076fbadbb4c4dcb136ff4eddc40b42553 metrics: - type: accuracy value: 12.676056338028168 - type: f1 value: 9.526324614352783 - type: precision value: 9.006292657908235 - type: recall value: 12.676056338028168 - task: type: BitextMining dataset: name: MTEB Tatoeba (orv-eng) type: mteb/tatoeba-bitext-mining config: orv-eng split: test revision: 9080400076fbadbb4c4dcb136ff4eddc40b42553 metrics: - type: accuracy value: 24.910179640718564 - type: f1 value: 19.645099411566473 - type: precision value: 17.676076418591386 - type: recall value: 24.910179640718564 - task: type: BitextMining dataset: name: MTEB Tatoeba (ind-eng) type: mteb/tatoeba-bitext-mining config: ind-eng split: test revision: 9080400076fbadbb4c4dcb136ff4eddc40b42553 metrics: - type: accuracy value: 61.4 - type: f1 value: 54.64269841269841 - type: precision value: 51.981071428571425 - type: recall value: 61.4 - task: type: BitextMining dataset: name: MTEB Tatoeba (tuk-eng) type: mteb/tatoeba-bitext-mining config: tuk-eng split: test revision: 9080400076fbadbb4c4dcb136ff4eddc40b42553 metrics: - type: accuracy value: 11.330049261083744 - type: f1 value: 9.610016420361248 - type: precision value: 9.123781574258464 - type: recall value: 11.330049261083744 - task: type: BitextMining dataset: name: MTEB Tatoeba (max-eng) type: mteb/tatoeba-bitext-mining config: max-eng split: test revision: 9080400076fbadbb4c4dcb136ff4eddc40b42553 metrics: - type: accuracy value: 27.816901408450708 - type: f1 value: 22.51925345174495 - type: precision value: 21.10468365750056 - type: recall value: 27.816901408450708 - task: type: BitextMining dataset: name: MTEB Tatoeba (swh-eng) type: mteb/tatoeba-bitext-mining config: swh-eng split: test revision: 9080400076fbadbb4c4dcb136ff4eddc40b42553 metrics: - type: accuracy value: 11.282051282051283 - type: f1 value: 7.777167097237831 - type: precision value: 7.050109879436802 - type: recall value: 11.282051282051283 - task: type: BitextMining dataset: name: MTEB Tatoeba (hin-eng) type: mteb/tatoeba-bitext-mining config: hin-eng split: test revision: 9080400076fbadbb4c4dcb136ff4eddc40b42553 metrics: - type: accuracy value: 86.0 - type: f1 value: 82.05857142857143 - type: precision value: 80.25 - type: recall value: 86.0 - task: type: BitextMining dataset: name: MTEB Tatoeba (dsb-eng) type: mteb/tatoeba-bitext-mining config: dsb-eng split: test revision: 9080400076fbadbb4c4dcb136ff4eddc40b42553 metrics: - type: accuracy value: 34.44676409185804 - type: f1 value: 28.296517215097587 - type: precision value: 26.16624956236465 - type: recall value: 34.44676409185804 - task: type: BitextMining dataset: name: MTEB Tatoeba (ber-eng) type: mteb/tatoeba-bitext-mining config: ber-eng split: test revision: 9080400076fbadbb4c4dcb136ff4eddc40b42553 metrics: - type: accuracy value: 7.199999999999999 - type: f1 value: 5.500051631938041 - type: precision value: 5.164411510424442 - type: recall value: 7.199999999999999 - task: type: BitextMining dataset: name: MTEB Tatoeba (tam-eng) type: mteb/tatoeba-bitext-mining config: tam-eng split: test revision: 9080400076fbadbb4c4dcb136ff4eddc40b42553 metrics: - type: accuracy value: 71.9869706840391 - type: f1 value: 65.79339227547696 - type: precision value: 63.16503800217155 - type: recall value: 71.9869706840391 - task: type: BitextMining dataset: name: MTEB Tatoeba (slk-eng) type: mteb/tatoeba-bitext-mining config: slk-eng split: test revision: 9080400076fbadbb4c4dcb136ff4eddc40b42553 metrics: - type: accuracy value: 70.89999999999999 - type: f1 value: 65.4152380952381 - type: precision value: 63.106666666666655 - type: recall value: 70.89999999999999 - task: type: BitextMining dataset: name: MTEB Tatoeba (tgl-eng) type: mteb/tatoeba-bitext-mining config: tgl-eng split: test revision: 9080400076fbadbb4c4dcb136ff4eddc40b42553 metrics: - type: accuracy value: 21.0 - type: f1 value: 17.86438197644649 - type: precision value: 16.84469948469949 - type: recall value: 21.0 - task: type: BitextMining dataset: name: MTEB Tatoeba (ast-eng) type: mteb/tatoeba-bitext-mining config: ast-eng split: test revision: 9080400076fbadbb4c4dcb136ff4eddc40b42553 metrics: - type: accuracy value: 62.20472440944882 - type: f1 value: 55.81364829396325 - type: precision value: 53.262092238470196 - type: recall value: 62.20472440944882 - task: type: BitextMining dataset: name: MTEB Tatoeba (mkd-eng) type: mteb/tatoeba-bitext-mining config: mkd-eng split: test revision: 9080400076fbadbb4c4dcb136ff4eddc40b42553 metrics: - type: accuracy value: 41.8 - type: f1 value: 34.724603174603175 - type: precision value: 32.040277777777774 - type: recall value: 41.8 - task: type: BitextMining dataset: name: MTEB Tatoeba (khm-eng) type: mteb/tatoeba-bitext-mining config: khm-eng split: test revision: 9080400076fbadbb4c4dcb136ff4eddc40b42553 metrics: - type: accuracy value: 0.41551246537396125 - type: f1 value: 0.3462603878116343 - type: precision value: 0.32317636195752536 - type: recall value: 0.41551246537396125 - task: type: BitextMining dataset: name: MTEB Tatoeba (ces-eng) type: mteb/tatoeba-bitext-mining config: ces-eng split: test revision: 9080400076fbadbb4c4dcb136ff4eddc40b42553 metrics: - type: accuracy value: 85.6 - type: f1 value: 81.81333333333333 - type: precision value: 80.08333333333334 - type: recall value: 85.6 - task: type: BitextMining dataset: name: MTEB Tatoeba (tzl-eng) type: mteb/tatoeba-bitext-mining config: tzl-eng split: test revision: 9080400076fbadbb4c4dcb136ff4eddc40b42553 metrics: - type: accuracy value: 31.73076923076923 - type: f1 value: 26.097374847374844 - type: precision value: 24.31891025641026 - type: recall value: 31.73076923076923 - task: type: BitextMining dataset: name: MTEB Tatoeba (urd-eng) type: mteb/tatoeba-bitext-mining config: urd-eng split: test revision: 9080400076fbadbb4c4dcb136ff4eddc40b42553 metrics: - type: accuracy value: 9.6 - type: f1 value: 6.598392371412457 - type: precision value: 5.855494356434758 - type: recall value: 9.6 - task: type: BitextMining dataset: name: MTEB Tatoeba (ara-eng) type: mteb/tatoeba-bitext-mining config: ara-eng split: test revision: 9080400076fbadbb4c4dcb136ff4eddc40b42553 metrics: - type: accuracy value: 83.5 - type: f1 value: 79.65190476190476 - type: precision value: 77.875 - type: recall value: 83.5 - task: type: BitextMining dataset: name: MTEB Tatoeba (kor-eng) type: mteb/tatoeba-bitext-mining config: kor-eng split: test revision: 9080400076fbadbb4c4dcb136ff4eddc40b42553 metrics: - type: accuracy value: 80.5 - type: f1 value: 75.75999999999999 - type: precision value: 73.60333333333332 - type: recall value: 80.5 - task: type: BitextMining dataset: name: MTEB Tatoeba (yid-eng) type: mteb/tatoeba-bitext-mining config: yid-eng split: test revision: 9080400076fbadbb4c4dcb136ff4eddc40b42553 metrics: - type: accuracy value: 2.1226415094339623 - type: f1 value: 1.4622641509433962 - type: precision value: 1.2637578616352203 - type: recall value: 2.1226415094339623 - task: type: BitextMining dataset: name: MTEB Tatoeba (fin-eng) type: mteb/tatoeba-bitext-mining config: fin-eng split: test revision: 9080400076fbadbb4c4dcb136ff4eddc40b42553 metrics: - type: accuracy value: 23.0 - type: f1 value: 18.111780719280716 - type: precision value: 16.497738095238095 - type: recall value: 23.0 - task: type: BitextMining dataset: name: MTEB Tatoeba (tha-eng) type: mteb/tatoeba-bitext-mining config: tha-eng split: test revision: 9080400076fbadbb4c4dcb136ff4eddc40b42553 metrics: - type: accuracy value: 4.562043795620438 - type: f1 value: 3.1632119907667358 - type: precision value: 2.8806772100567724 - type: recall value: 4.562043795620438 - task: type: BitextMining dataset: name: MTEB Tatoeba (wuu-eng) type: mteb/tatoeba-bitext-mining config: wuu-eng split: test revision: 9080400076fbadbb4c4dcb136ff4eddc40b42553 metrics: - type: accuracy value: 75.9 - type: f1 value: 70.57690476190476 - type: precision value: 68.19761904761904 - type: recall value: 75.9 - task: type: Retrieval dataset: name: MTEB Touche2020 type: webis-touche2020 config: default split: test revision: None metrics: - type: map_at_1 value: 2.804 - type: map_at_10 value: 11.267000000000001 - type: map_at_100 value: 17.034 - type: map_at_1000 value: 18.733 - type: map_at_3 value: 6.071 - type: map_at_5 value: 8.187 - type: mrr_at_1 value: 34.694 - type: mrr_at_10 value: 50.504000000000005 - type: mrr_at_100 value: 51.162 - type: mrr_at_1000 value: 51.162 - type: mrr_at_3 value: 45.918 - type: mrr_at_5 value: 49.082 - type: ndcg_at_1 value: 33.672999999999995 - type: ndcg_at_10 value: 27.478 - type: ndcg_at_100 value: 37.961 - type: ndcg_at_1000 value: 50.117 - type: ndcg_at_3 value: 30.156 - type: ndcg_at_5 value: 29.293999999999997 - type: precision_at_1 value: 34.694 - type: precision_at_10 value: 24.082 - type: precision_at_100 value: 7.632999999999999 - type: precision_at_1000 value: 1.569 - type: precision_at_3 value: 30.612000000000002 - type: precision_at_5 value: 29.387999999999998 - type: recall_at_1 value: 2.804 - type: recall_at_10 value: 17.785 - type: recall_at_100 value: 47.452 - type: recall_at_1000 value: 84.687 - type: recall_at_3 value: 6.9190000000000005 - type: recall_at_5 value: 10.807 - task: type: Classification dataset: name: MTEB ToxicConversationsClassification type: mteb/toxic_conversations_50k config: default split: test revision: d7c0de2777da35d6aae2200a62c6e0e5af397c4c metrics: - type: accuracy value: 74.5162 - type: ap value: 15.022137849208509 - type: f1 value: 56.77914300422838 - task: type: Classification dataset: name: MTEB TweetSentimentExtractionClassification type: mteb/tweet_sentiment_extraction config: default split: test revision: d604517c81ca91fe16a244d1248fc021f9ecee7a metrics: - type: accuracy value: 59.589700056593095 - type: f1 value: 59.93893560752363 - task: type: Clustering dataset: name: MTEB TwentyNewsgroupsClustering type: mteb/twentynewsgroups-clustering config: default split: test revision: 6125ec4e24fa026cec8a478383ee943acfbd5449 metrics: - type: v_measure value: 40.11538634360855 - task: type: PairClassification dataset: name: MTEB TwitterSemEval2015 type: mteb/twittersemeval2015-pairclassification config: default split: test revision: 70970daeab8776df92f5ea462b6173c0b46fd2d1 metrics: - type: cos_sim_accuracy value: 83.97806520832091 - type: cos_sim_ap value: 67.80381341664686 - type: cos_sim_f1 value: 63.01665268958908 - type: cos_sim_precision value: 57.713407943822695 - type: cos_sim_recall value: 69.39313984168865 - type: dot_accuracy value: 83.9899862907552 - type: dot_ap value: 67.80914960711299 - type: dot_f1 value: 63.0287144048612 - type: dot_precision value: 57.46252444058223 - type: dot_recall value: 69.78891820580475 - type: euclidean_accuracy value: 83.9601835846695 - type: euclidean_ap value: 67.79862461635126 - type: euclidean_f1 value: 63.02426882389545 - type: euclidean_precision value: 59.64664310954063 - type: euclidean_recall value: 66.80738786279683 - type: manhattan_accuracy value: 83.94230196101806 - type: manhattan_ap value: 67.78560087328111 - type: manhattan_f1 value: 63.10622881851117 - type: manhattan_precision value: 56.63939584644431 - type: manhattan_recall value: 71.2401055408971 - type: max_accuracy value: 83.9899862907552 - type: max_ap value: 67.80914960711299 - type: max_f1 value: 63.10622881851117 - task: type: PairClassification dataset: name: MTEB TwitterURLCorpus type: mteb/twitterurlcorpus-pairclassification config: default split: test revision: 8b6510b0b1fa4e4c4f879467980e9be563ec1cdf metrics: - type: cos_sim_accuracy value: 89.04994760740482 - type: cos_sim_ap value: 85.71231674852108 - type: cos_sim_f1 value: 78.92350867093619 - type: cos_sim_precision value: 74.07807645549101 - type: cos_sim_recall value: 84.44718201416693 - type: dot_accuracy value: 89.05188807389295 - type: dot_ap value: 85.71776365526502 - type: dot_f1 value: 78.92055922835156 - type: dot_precision value: 74.34152317430069 - type: dot_recall value: 84.10070834616569 - type: euclidean_accuracy value: 89.05188807389295 - type: euclidean_ap value: 85.7114644968015 - type: euclidean_f1 value: 78.9458525345622 - type: euclidean_precision value: 74.14119556397078 - type: euclidean_recall value: 84.41638435478903 - type: manhattan_accuracy value: 89.06547133930997 - type: manhattan_ap value: 85.70658730333459 - type: manhattan_f1 value: 78.91009741543552 - type: manhattan_precision value: 74.00714719169308 - type: manhattan_recall value: 84.5087773329227 - type: max_accuracy value: 89.06547133930997 - type: max_ap value: 85.71776365526502 - type: max_f1 value: 78.9458525345622 --- ## Bedrock Titan Text Embeddings v2 This repository contains the MTEB scores and usage examples of Bedrock Titan Text Embeddings v2. You can use the embedding model either via the Bedrock InvokeModel API or via Bedrock's batch jobs. For RAG use cases we recommend the former to embed queries during search (latency optimized) and the latter to index corpus (throughput optimized). ## Using Bedrock's InvokeModel API ```python import json import boto3 class TitanEmbeddings(object): accept = "application/json" content_type = "application/json" def __init__(self, model_id="amazon.titan-embed-text-v2:0"): self.bedrock = boto3.client(service_name='bedrock-runtime') self.model_id = model_id def __call__(self, text, dimensions, normalize=True): """ Returns Titan Embeddings Args: text (str): text to embed dimensions (int): Number of output dimensions. normalize (bool): Whether to return the normalized embedding or not. Return: List[float]: Embedding """ body = json.dumps({ "inputText": text, "dimensions": dimensions, "normalize": normalize }) response = self.bedrock.invoke_model( body=body, modelId=self.model_id, accept=self.accept, contentType=self.content_type ) response_body = json.loads(response.get('body').read()) return response_body['embedding'] if __name__ == '__main__': """ Entrypoint for Amazon Titan Embeddings V2 - Text example. """ dimensions = 1024 normalize = True titan_embeddings_v2 = TitanEmbeddings(model_id="amazon.titan-embed-text-v2:0") input_text = "What are the different services that you offer?" embedding = titan_embeddings_v2(input_text, dimensions, normalize) print(f"{input_text=}") print(f"{embedding[:10]=}") ``` ## Using Bedrock's batch jobs ```python import requests from aws_requests_auth.boto_utils import BotoAWSRequestsAuth region = "us-east-1" base_uri = f"bedrock.{region}.amazonaws.com" batch_job_uri = f"https://{base_uri}/model-invocation-job/" # For details on how to set up an IAM role for batch inference, see # https://docs.aws.amazon.com/bedrock/latest/userguide/batch-inference-permissions.html role_arn = "arn:aws:iam::111122223333:role/my-batch-inference-role" payload = { "inputDataConfig": { "s3InputDataConfig": { "s3Uri": "s3://my-input-bucket/batch-input/", "s3InputFormat": "JSONL" } }, "jobName": "embeddings-v2-batch-job", "modelId": "amazon.titan-embed-text-v2:0", "outputDataConfig": { "s3OutputDataConfig": { "s3Uri": "s3://my-output-bucket/batch-output/" } }, "roleArn": role_arn } request_auth = BotoAWSRequestsAuth( aws_host=base_uri, aws_region=region, aws_service="bedrock" ) response= requests.request("POST", batch_job_uri, json=payload, auth=request_auth) print(response.json()) ```
[ "SUMMARIZATION" ]
[ "BIOSSES", "SCIFACT" ]
KaraKaraWitch/HiTZ-GoLLIE-13B-AsSafeTensors
KaraKaraWitch
text-generation
[ "safetensors", "llama", "code", "text-generation-inference", "Information Extraction", "IE", "Named Entity Recogniton", "Event Extraction", "Relation Extraction", "LLaMA", "text-generation", "custom_code", "en", "dataset:ACE05", "dataset:bc5cdr", "dataset:conll2003", "dataset:ncbi_disease", "dataset:conll2012_ontonotesv5", "dataset:rams", "dataset:tacred", "dataset:wnut_17", "arxiv:2310.03668", "license:llama2", "region:us" ]
2024-11-09T19:44:39
2024-11-09T19:56:16
148
0
--- datasets: - ACE05 - bc5cdr - conll2003 - ncbi_disease - conll2012_ontonotesv5 - rams - tacred - wnut_17 language: - en license: llama2 metrics: - f1 pipeline_tag: text-generation tags: - code - text-generation-inference - Information Extraction - IE - Named Entity Recogniton - Event Extraction - Relation Extraction - LLaMA --- <p align="center"> <br> <img src="https://github.com/hitz-zentroa/GoLLIE/raw/main/assets/GoLLIE.png" style="height: 250px;"> <h2 align="center"><b>G</b>uideline f<b>o</b>llowing <b>L</b>arge <b>L</b>anguage Model for <b>I</b>nformation <b>E</b>xtraction</h2> <br> # Model Card for GoLLIE 13B <p align="justify"> We present GoLLIE, a Large Language Model trained to follow annotation guidelines. GoLLIE outperforms previous approaches on zero-shot Information Extraction and allows the user to perform inferences with annotation schemas defined on the fly. Different from previous approaches, GoLLIE is able to follow detailed definitions and does not only rely on the knowledge already encoded in the LLM. - 💻 Code: [https://github.com/osainz59/CoLLIE/](https://github.com/hitz-zentroa/GoLLIE) - 📒 Blog Post: [GoLLIE: Guideline-following Large Language Model for Information Extraction](https://hitz-zentroa.github.io/GoLLIE/) - 📖 Paper: [GoLLIE: Annotation Guidelines improve Zero-Shot Information-Extraction](https://arxiv.org/abs/2310.03668) - 🐕 GoLLIE Colection in the 🤗HuggingFace Hub: [HiTZ/gollie](https://huggingface.co/collections/HiTZ/gollie-651bf19ee315e8a224aacc4f) - 🚀 Example Jupyter Notebooks: [GoLLIE Notebooks](https://github.com/hitz-zentroa/GoLLIE/tree/main/notebooks) </p> <p align="center"> <img src="https://github.com/hitz-zentroa/GoLLIE/raw/main/assets/zero_shot_results.png"> </p> ### Model Description - **Developed by:** [Oscar Sainz](https://osainz59.github.io/), [Iker García-Ferrero](https://ikergarcia1996.github.io/Iker-Garcia-Ferrero/), [Rodrigo Agerri](https://ragerri.github.io/), [Oier Lopez de Lacalle](https://oierldl.github.io/), [German Rigau](https://adimen.si.ehu.es/~rigau/) and [Eneko Agirre](https://eagirre.github.io/) - **Institution:** [HiTZ Basque Center for Language Technology](http://www.hitz.eus/) - [Ixa](https://www.ixa.eus/node/2?language=en), [University of the Basque Country UPV/EHU](https://www.ehu.eus/en/en-home) - **Model type:** Text Generation - **Language(s) (NLP):** English - **License:** LLaMA2 License for the base and merged model. Apache 2.0 for pre-trained LoRA Adapters - **Finetuned from model:** CODE-LLaMA2 ## Schema definition and inference example The labels are represented as Python classes, and the guidelines or instructions are introduced as docstrings. The model start generating after the `result = [` line. ```Python # Entity definitions @dataclass class Launcher(Template): """Refers to a vehicle designed primarily to transport payloads from the Earth's surface to space. Launchers can carry various payloads, including satellites, crewed spacecraft, and cargo, into various orbits or even beyond Earth's orbit. They are usually multi-stage vehicles that use rocket engines for propulsion.""" mention: str """ The name of the launcher vehicle. Such as: "Sturn V", "Atlas V", "Soyuz", "Ariane 5" """ space_company: str # The company that operates the launcher. Such as: "Blue origin", "ESA", "Boeing", "ISRO", "Northrop Grumman", "Arianespace" crew: List[str] # Names of the crew members boarding the Launcher. Such as: "Neil Armstrong", "Michael Collins", "Buzz Aldrin" @dataclass class Mission(Template): """Any planned or accomplished journey beyond Earth's atmosphere with specific objectives, either crewed or uncrewed. It includes missions to satellites, the International Space Station (ISS), other celestial bodies, and deep space.""" mention: str """ The name of the mission. Such as: "Apollo 11", "Artemis", "Mercury" """ date: str # The start date of the mission departure: str # The place from which the vehicle will be launched. Such as: "Florida", "Houston", "French Guiana" destination: str # The place or planet to which the launcher will be sent. Such as "Moon", "low-orbit", "Saturn" # This is the text to analyze text = ( "The Ares 3 mission to Mars is scheduled for 2032. The Starship rocket build by SpaceX will take off from Boca Chica," "carrying the astronauts Max Rutherford, Elena Soto, and Jake Martinez." ) # The annotation instances that take place in the text above are listed here result = [ Mission(mention='Ares 3', date='2032', departure='Boca Chica', destination='Mars'), Launcher(mention='Starship', space_company='SpaceX', crew=['Max Rutherford', 'Elena Soto', 'Jake Martinez']) ] ``` ## How to Get Started with the Model Please read our [🚀 Example Jupyter Notebooks](https://github.com/hitz-zentroa/GoLLIE/tree/main/notebooks) to get started with GoLLIE. The best way to load the model is using our custom `load_model` fuction. However, you can also load them using the AutoModelForCausalLM class. **Important**: Our flash attention implementation has small numerical differences compared to the attention implementation in Huggingface. You must use the flag `trust_remote_code=True` or you will get inferior results. Flash attention requires an available CUDA GPU. Running GOLLIE pre-trained models on a CPU is not supported. We plan to address this in future releases. First, install flash attention 2: ```bash pip install flash-attn --no-build-isolation pip install git+https://github.com/HazyResearch/flash-attention.git#subdirectory=csrc/rotary ``` Then you can load the model using ```python import torch from transformers import AutoTokenizer, AutoModelForCausalLM tokenizer = AutoTokenizer.from_pretrained("HiTZ/GoLLIE-7B") model = AutoModelForCausalLM.from_pretrained("HiTZ/GoLLIE-7B", trust_remote_code=True, torch_dtype=torch.bfloat16) model.to("cuda") ``` Read our [🚀 Example Jupyter Notebooks](https://github.com/hitz-zentroa/GoLLIE/tree/main/notebooks) to learn how to easily define guidelines, generate model inputs and parse the output! ### Training Data This is the list of task used for training and evaluating GoLLIE. However, as demonstrated in the 🚀 [Create Custom Task notebook](https://github.com/hitz-zentroa/GoLLIE/blob/main/notebooks/Create%20Custom%20Task.ipynb) GoLLIE can perform a wide range of unseen tasks. For more info, read our [📖Paper](https://arxiv.org/abs/2310.03668). <p align="center"> <img src="https://github.com/hitz-zentroa/GoLLIE/raw/main/assets/datasets.png"> </p> ## Evaluation | Model | Supervised average F1 | Zero-shot average F1 | 🤗HuggingFace Hub | |---|:---------------------:|:--------------------:|:---------------------------------------------------------:| | GoLLIE-7B | 73.0 | 55.3 | [HiTZ/GoLLIE-7B](https://huggingface.co/HiTZ/GoLLIE-7B) | | GoLLIE-13B | 73.9 | 56.0 | [HiTZ/GoLLIE-13B](https://huggingface.co/HiTZ/GoLLIE-13B) | | GoLLIE-34B | **75.0** | **57.2** | [HiTZ/GoLLIE-34B](https://huggingface.co/HiTZ/GoLLIE-34B) | ## Environmental Impact | Model | Hardware | FLOPs | Time (h) | CO<sup>2</sup>eq (kg) | |----------------|-------------------|---------------------------|-------------------|-------------------------------------| | GoLLIE 7B | 1xA100 | 11.9e<sup>18</sup> | 44.5 | 1.57 | | GoLLIE 13B | 1xA100 | 22.7e<sup>18</sup> | 79.5 | 2.80 | | GoLLIE 34B | 2xA100 | 55.8e<sup>18</sup> | 94.6 | 6.67 | ## Citation ``` @misc{sainz2023gollie, title={GoLLIE: Annotation Guidelines improve Zero-Shot Information-Extraction}, author={Oscar Sainz and Iker García-Ferrero and Rodrigo Agerri and Oier Lopez de Lacalle and German Rigau and Eneko Agirre}, year={2023}, eprint={2310.03668}, archivePrefix={arXiv}, primaryClass={cs.CL} } ```
[ "RELATION_EXTRACTION", "EVENT_EXTRACTION" ]
[ "BC5CDR", "NCBI DISEASE" ]
mav23/Llama3-Med42-8B-GGUF
mav23
text-generation
[ "gguf", "m42", "health", "healthcare", "clinical-llm", "text-generation", "en", "arxiv:2408.06142", "license:llama3", "region:us", "conversational" ]
2024-10-13T13:38:44
2024-10-13T14:26:17
147
0
--- language: - en license: llama3 license_name: llama3 pipeline_tag: text-generation tags: - m42 - health - healthcare - clinical-llm inference: false --- # **Med42-v2 - A Suite of Clinically-aligned Large Language Models** Med42-v2 is a suite of open-access clinical large language models (LLM) instruct and preference-tuned by M42 to expand access to medical knowledge. Built off LLaMA-3 and comprising either 8 or 70 billion parameters, these generative AI systems provide high-quality answers to medical questions. ## Key performance metrics: - Med42-v2-70B outperforms GPT-4.0 in most of the MCQA tasks. - Med42-v2-70B achieves a MedQA zero-shot performance of 79.10, surpassing the prior state-of-the-art among all openly available medical LLMs. - Med42-v2-70B sits at the top of the Clinical Elo Rating Leaderboard. |Models|Elo Score| |:---:|:---:| |**Med42-v2-70B**| 1764 | |Llama3-70B-Instruct| 1643 | |GPT4-o| 1426 | |Llama3-8B-Instruct| 1352 | |Mixtral-8x7b-Instruct| 970 | |**Med42-v2-8B**| 924 | |OpenBioLLM-70B| 657 | |JSL-MedLlama-3-8B-v2.0| 447 | ## Limitations & Safe Use - The Med42-v2 suite of models is not ready for real clinical use. Extensive human evaluation is undergoing as it is required to ensure safety. - Potential for generating incorrect or harmful information. - Risk of perpetuating biases in training data. Use this suite of models responsibly! Do not rely on them for medical usage without rigorous safety testing. ## Model Details *Disclaimer: This large language model is not yet ready for clinical use without further testing and validation. It should not be relied upon for making medical decisions or providing patient care.* Beginning with Llama3 models, Med42-v2 were instruction-tuned using a dataset of ~1B tokens compiled from different open-access and high-quality sources, including medical flashcards, exam questions, and open-domain dialogues. **Model Developers:** M42 Health AI Team **Finetuned from model:** Llama3 - 8B & 70B Instruct **Context length:** 8k tokens **Input:** Text only data **Output:** Model generates text only **Status:** This is a static model trained on an offline dataset. Future versions of the tuned models will be released as we enhance the model's performance. **License:** Llama 3 Community License Agreement **Research Paper:** [Med42-v2: A Suite of Clinical LLMs](https://huggingface.co/papers/2408.06142) ## Intended Use The Med42-v2 suite of models is being made available for further testing and assessment as AI assistants to enhance clinical decision-making and access to LLMs for healthcare use. Potential use cases include: - Medical question answering - Patient record summarization - Aiding medical diagnosis - General health Q&A **Run the model** You can use the 🤗 Transformers library `text-generation` pipeline to do inference. ```python import transformers import torch model_name_or_path = "m42-health/Llama3-Med42-8B" pipeline = transformers.pipeline( "text-generation", model=model_name_or_path, torch_dtype=torch.bfloat16, device_map="auto", ) messages = [ { "role": "system", "content": ( "You are a helpful, respectful and honest medical assistant. You are a second version of Med42 developed by the AI team at M42, UAE. " "Always answer as helpfully as possible, while being safe. " "Your answers should not include any harmful, unethical, racist, sexist, toxic, dangerous, or illegal content. " "Please ensure that your responses are socially unbiased and positive in nature. If a question does not make any sense, or is not factually coherent, explain why instead of answering something not correct. " "If you don’t know the answer to a question, please don’t share false information." ), }, {"role": "user", "content": "What are the symptoms of diabetes?"}, ] prompt = pipeline.tokenizer.apply_chat_template( messages, tokenize=False, add_generation_prompt=False ) stop_tokens = [ pipeline.tokenizer.eos_token_id, pipeline.tokenizer.convert_tokens_to_ids("<|eot_id|>"), ] outputs = pipeline( prompt, max_new_tokens=512, eos_token_id=stop_tokens, do_sample=True, temperature=0.4, top_k=150, top_p=0.75, ) print(outputs[0]["generated_text"][len(prompt) :]) ``` ## Hardware and Software The training was conducted on the NVIDIA DGX cluster with H100 GPUs, utilizing PyTorch's Fully Sharded Data Parallel (FSDP) framework. ## Evaluation Results ### Open-ended question generation To ensure a robust evaluation of our model's output quality, we employ the LLM-as-a-Judge approach using Prometheus-8x7b-v2.0. Our assessment uses 4,000 carefully curated publicly accessible healthcare-related questions, generating responses from various models. We then use Prometheus to conduct pairwise comparisons of the answers. Drawing inspiration from the LMSYS Chatbot-Arena methodology, we present the results as Elo ratings for each model. To maintain fairness and eliminate potential bias from prompt engineering, we used the same simple system prompt for every model throughout the evaluation process. Below is the scoring rubric we used to prompt Prometheus to select the best answer: ``` ### Score Rubric: Which response is of higher overall quality in a medical context? Consider: * Relevance: Does it directly address the question? * Completeness: Does it cover all important aspects, details and subpoints? * Safety: Does it avoid unsafe practices and address potential risks? * Ethics: Does it maintain confidentiality and avoid biases? * Clarity: Is it professional, clear and easy to understand? ``` #### Elo Ratings |Models|Elo Score| |:---:|:---:| |**Med42-v2-70B**| 1764 | |Llama3-70B-Instruct| 1643 | |GPT4-o| 1426 | |Llama3-8B-Instruct| 1352 | |Mixtral-8x7b-Instruct| 970 | |**Med42-v2-8B**| 924 | |OpenBioLLM-70B| 657 | |JSL-MedLlama-3-8B-v2.0| 447 | #### Win-rate ![plot](./pairwise_model_comparison.svg) ### MCQA Evaluation Med42-v2 improves performance on every clinical benchmark compared to our previous version, including MedQA, MedMCQA, USMLE, MMLU clinical topics and MMLU Pro clinical subset. For all evaluations reported so far, we use [EleutherAI's evaluation harness library](https://github.com/EleutherAI/lm-evaluation-harness) and report zero-shot accuracies (except otherwise stated). We integrated chat templates into harness and computed the likelihood for the full answer instead of only the tokens "a.", "b.", "c." or "d.". |Model|MMLU Pro|MMLU|MedMCQA|MedQA|USMLE| |---:|:---:|:---:|:---:|:---:|:---:| |**Med42v2-70B**|64.36|87.12|73.20|79.10|83.80| |**Med42v2-8B**|54.30|75.76|61.34|62.84|67.04| |OpenBioLLM-70B|64.24|90.40|73.18|76.90|79.01| |GPT-4.0<sup>&dagger;</sup>|-|87.00|69.50|78.90|84.05| |MedGemini*|-|-|-|84.00|-| |Med-PaLM-2 (5-shot)*|-|87.77|71.30|79.70|-| |Med42|-|76.72|60.90|61.50|71.85| |ClinicalCamel-70B|-|69.75|47.00|53.40|54.30| |GPT-3.5<sup>&dagger;</sup>|-|66.63|50.10|50.80|53.00| |Llama3-8B-Instruct|48.24|72.89|59.65|61.64|60.38| |Llama3-70B-Instruct|64.24|85.99|72.03|78.88|83.57| **For MedGemini, results are reported for MedQA without self-training and without search. We note that 0-shot performance is not reported for Med-PaLM 2. Further details can be found at [https://github.com/m42health/med42](https://github.com/m42health/med42)*. <sup>&dagger;</sup> *Results as reported in the paper [Capabilities of GPT-4 on Medical Challenge Problems](https://www.microsoft.com/en-us/research/uploads/prod/2023/03/GPT-4_medical_benchmarks.pdf)*. ## Accessing Med42 and Reporting Issues Please report any software "bug" or other problems through one of the following means: - Reporting issues with the model: [https://github.com/m42health/med42](https://github.com/m42health/med42) - Reporting risky content generated by the model, bugs and/or any security concerns: [https://forms.office.com/r/fPY4Ksecgf](https://forms.office.com/r/fPY4Ksecgf) - M42’s privacy policy available at [https://m42.ae/privacy-policy/](https://m42.ae/privacy-policy/) - Reporting violations of the Acceptable Use Policy or unlicensed uses of Med42: <[email protected]> ## Acknowledgements We thank the Torch FSDP team for their robust distributed training framework, the EleutherAI harness team for their valuable evaluation tools, and the Hugging Face Alignment team for their contributions to responsible AI development. ## Citation ``` @misc{med42v2, Author = {Cl{\'e}ment Christophe and Praveen K Kanithi and Tathagata Raha and Shadab Khan and Marco AF Pimentel}, Title = {Med42-v2: A Suite of Clinical LLMs}, Year = {2024}, Eprint = {arXiv:2408.06142}, url={https://arxiv.org/abs/2408.06142}, } ```
[ "QUESTION_ANSWERING", "SUMMARIZATION" ]
[ "MEDQA" ]
HiTZ/GoLLIE-34B
HiTZ
text-generation
[ "transformers", "pytorch", "llama", "text-generation", "code", "text-generation-inference", "Information Extraction", "IE", "Named Entity Recogniton", "Event Extraction", "Relation Extraction", "LLaMA", "custom_code", "en", "dataset:ACE05", "dataset:bc5cdr", "dataset:conll2003", "dataset:ncbi_disease", "dataset:conll2012_ontonotesv5", "dataset:rams", "dataset:tacred", "dataset:wnut_17", "arxiv:2310.03668", "license:llama2", "autotrain_compatible", "endpoints_compatible", "region:us" ]
2023-09-29T23:57:40
2023-10-20T07:12:32
146
38
--- datasets: - ACE05 - bc5cdr - conll2003 - ncbi_disease - conll2012_ontonotesv5 - rams - tacred - wnut_17 language: - en license: llama2 metrics: - f1 pipeline_tag: text-generation tags: - code - text-generation-inference - Information Extraction - IE - Named Entity Recogniton - Event Extraction - Relation Extraction - LLaMA --- <p align="center"> <br> <img src="https://github.com/hitz-zentroa/GoLLIE/raw/main/assets/GoLLIE.png" style="height: 250px;"> <h2 align="center"><b>G</b>uideline f<b>o</b>llowing <b>L</b>arge <b>L</b>anguage Model for <b>I</b>nformation <b>E</b>xtraction</h2> <br> # Model Card for GoLLIE 34B <p align="justify"> We present GoLLIE, a Large Language Model trained to follow annotation guidelines. GoLLIE outperforms previous approaches on zero-shot Information Extraction and allows the user to perform inferences with annotation schemas defined on the fly. Different from previous approaches, GoLLIE is able to follow detailed definitions and does not only rely on the knowledge already encoded in the LLM. - 💻 Code: [https://github.com/osainz59/CoLLIE/](https://github.com/hitz-zentroa/GoLLIE) - 📒 Blog Post: [GoLLIE: Guideline-following Large Language Model for Information Extraction](https://hitz-zentroa.github.io/GoLLIE/) - 📖 Paper: [GoLLIE: Annotation Guidelines improve Zero-Shot Information-Extraction](https://arxiv.org/abs/2310.03668) - 🐕 GoLLIE Colection in the 🤗HuggingFace Hub: [HiTZ/gollie](https://huggingface.co/collections/HiTZ/gollie-651bf19ee315e8a224aacc4f) - 🚀 Example Jupyter Notebooks: [GoLLIE Notebooks](https://github.com/hitz-zentroa/GoLLIE/tree/main/notebooks) </p> <p align="center"> <img src="https://github.com/hitz-zentroa/GoLLIE/raw/main/assets/zero_shot_results.png"> </p> ### Model Description - **Developed by:** [Oscar Sainz](https://osainz59.github.io/), [Iker García-Ferrero](https://ikergarcia1996.github.io/Iker-Garcia-Ferrero/), [Rodrigo Agerri](https://ragerri.github.io/), [Oier Lopez de Lacalle](https://oierldl.github.io/), [German Rigau](https://adimen.si.ehu.es/~rigau/) and [Eneko Agirre](https://eagirre.github.io/) - **Institution:** [HiTZ Basque Center for Language Technology](http://www.hitz.eus/) - [Ixa](https://www.ixa.eus/node/2?language=en), [University of the Basque Country UPV/EHU](https://www.ehu.eus/en/en-home) - **Model type:** Text Generation - **Language(s) (NLP):** English - **License:** LLaMA2 License for the base and merged model. Apache 2.0 for pre-trained LoRA Adapters - **Finetuned from model:** CODE-LLaMA2 ## Schema definition and inference example The labels are represented as Python classes, and the guidelines or instructions are introduced as docstrings. The model start generating after the `result = [` line. ```Python # Entity definitions @dataclass class Launcher(Template): """Refers to a vehicle designed primarily to transport payloads from the Earth's surface to space. Launchers can carry various payloads, including satellites, crewed spacecraft, and cargo, into various orbits or even beyond Earth's orbit. They are usually multi-stage vehicles that use rocket engines for propulsion.""" mention: str """ The name of the launcher vehicle. Such as: "Sturn V", "Atlas V", "Soyuz", "Ariane 5" """ space_company: str # The company that operates the launcher. Such as: "Blue origin", "ESA", "Boeing", "ISRO", "Northrop Grumman", "Arianespace" crew: List[str] # Names of the crew members boarding the Launcher. Such as: "Neil Armstrong", "Michael Collins", "Buzz Aldrin" @dataclass class Mission(Template): """Any planned or accomplished journey beyond Earth's atmosphere with specific objectives, either crewed or uncrewed. It includes missions to satellites, the International Space Station (ISS), other celestial bodies, and deep space.""" mention: str """ The name of the mission. Such as: "Apollo 11", "Artemis", "Mercury" """ date: str # The start date of the mission departure: str # The place from which the vehicle will be launched. Such as: "Florida", "Houston", "French Guiana" destination: str # The place or planet to which the launcher will be sent. Such as "Moon", "low-orbit", "Saturn" # This is the text to analyze text = ( "The Ares 3 mission to Mars is scheduled for 2032. The Starship rocket build by SpaceX will take off from Boca Chica," "carrying the astronauts Max Rutherford, Elena Soto, and Jake Martinez." ) # The annotation instances that take place in the text above are listed here result = [ Mission(mention='Ares 3', date='2032', departure='Boca Chica', destination='Mars'), Launcher(mention='Starship', space_company='SpaceX', crew=['Max Rutherford', 'Elena Soto', 'Jake Martinez']) ] ``` ## How to Get Started with the Model Please read our [🚀 Example Jupyter Notebooks](https://github.com/hitz-zentroa/GoLLIE/tree/main/notebooks) to get started with GoLLIE. The best way to load the model is using our custom `load_model` fuction. However, you can also load them using the AutoModelForCausalLM class. **Important**: Our flash attention implementation has small numerical differences compared to the attention implementation in Huggingface. You must use the flag `trust_remote_code=True` or you will get inferior results. Flash attention requires an available CUDA GPU. Running GOLLIE pre-trained models on a CPU is not supported. We plan to address this in future releases. First, install flash attention 2: ```bash pip install flash-attn --no-build-isolation pip install git+https://github.com/HazyResearch/flash-attention.git#subdirectory=csrc/rotary ``` Then you can load the model using ```python import torch from transformers import AutoTokenizer, AutoModelForCausalLM tokenizer = AutoTokenizer.from_pretrained("HiTZ/GoLLIE-7B") model = AutoModelForCausalLM.from_pretrained("HiTZ/GoLLIE-7B", trust_remote_code=True, torch_dtype=torch.bfloat16) model.to("cuda") ``` Read our [🚀 Example Jupyter Notebooks](https://github.com/hitz-zentroa/GoLLIE/tree/main/notebooks) to learn how to easily define guidelines, generate model inputs and parse the output! ### Training Data This is the list of task used for training and evaluating GoLLIE. However, as demonstrated in the 🚀 [Create Custom Task notebook](https://github.com/hitz-zentroa/GoLLIE/blob/main/notebooks/Create%20Custom%20Task.ipynb) GoLLIE can perform a wide range of unseen tasks. For more info, read our [📖Paper](https://arxiv.org/abs/2310.03668). <p align="center"> <img src="https://github.com/hitz-zentroa/GoLLIE/raw/main/assets/datasets.png"> </p> ## Evaluation | Model | Supervised average F1 | Zero-shot average F1 | 🤗HuggingFace Hub | |---|:---------------------:|:--------------------:|:---------------------------------------------------------:| | GoLLIE-7B | 73.0 | 55.3 | [HiTZ/GoLLIE-7B](https://huggingface.co/HiTZ/GoLLIE-7B) | | GoLLIE-13B | 73.9 | 56.0 | [HiTZ/GoLLIE-13B](https://huggingface.co/HiTZ/GoLLIE-13B) | | GoLLIE-34B | **75.0** | **57.2** | [HiTZ/GoLLIE-34B](https://huggingface.co/HiTZ/GoLLIE-34B) | ## Environmental Impact | Model | Hardware | FLOPs | Time (h) | CO<sup>2</sup>eq (kg) | |----------------|-------------------|---------------------------|-------------------|-------------------------------------| | GoLLIE 7B | 1xA100 | 11.9e<sup>18</sup> | 44.5 | 1.57 | | GoLLIE 13B | 1xA100 | 22.7e<sup>18</sup> | 79.5 | 2.80 | | GoLLIE 34B | 2xA100 | 55.8e<sup>18</sup> | 94.6 | 6.67 | ## Citation ``` @misc{sainz2023gollie, title={GoLLIE: Annotation Guidelines improve Zero-Shot Information-Extraction}, author={Oscar Sainz and Iker García-Ferrero and Rodrigo Agerri and Oier Lopez de Lacalle and German Rigau and Eneko Agirre}, year={2023}, eprint={2310.03668}, archivePrefix={arXiv}, primaryClass={cs.CL} } ```
[ "RELATION_EXTRACTION", "EVENT_EXTRACTION" ]
[ "BC5CDR", "NCBI DISEASE" ]
yibinlei/LENS-d4000
yibinlei
feature-extraction
[ "transformers", "safetensors", "mistral", "feature-extraction", "text-embedding", "sentence-similarity", "mteb", "arxiv:2501.09749", "license:apache-2.0", "model-index", "text-generation-inference", "text-embeddings-inference", "endpoints_compatible", "region:us" ]
2024-12-30T02:11:11
2025-01-22T11:23:33
145
1
--- license: apache-2.0 tags: - text-embedding - feature-extraction - sentence-similarity - transformers - mteb model-index: - name: Gouzi3618/LENS-4000 results: - task: type: Classification dataset: name: MTEB AmazonCounterfactualClassification (en) type: mteb/amazon_counterfactual config: en split: test revision: e8379541af4e31359cca9fbcf4b00f2671dba205 metrics: - type: accuracy value: 93.61194029850746 - type: ap value: 73.89383804438975 - type: ap_weighted value: 73.89383804438975 - type: f1 value: 90.31690759629414 - type: f1_weighted value: 93.75647989786705 - type: main_score value: 93.61194029850746 - task: type: Classification dataset: name: MTEB AmazonPolarityClassification (default) type: mteb/amazon_polarity config: default split: test revision: e2d317d38cd51312af73b3d32a06d1a08b442046 metrics: - type: accuracy value: 97.05455 - type: ap value: 95.53082050876944 - type: ap_weighted value: 95.53082050876944 - type: f1 value: 97.05405422635297 - type: f1_weighted value: 97.05405422635297 - type: main_score value: 97.05455 - task: type: Classification dataset: name: MTEB AmazonReviewsClassification (en) type: mteb/amazon_reviews_multi config: en split: test revision: 1399c76144fd37290681b995c656ef9b2e06e26d metrics: - type: accuracy value: 62.834 - type: f1 value: 61.45864309016823 - type: f1_weighted value: 61.45864309016823 - type: main_score value: 62.834 - task: type: Retrieval dataset: name: MTEB ArguAna (default) type: mteb/arguana config: default split: test revision: c22ab2a51041ffd869aaddef7af8d8215647e41a metrics: - type: main_score value: 77.31700000000001 - type: map_at_1 value: 56.757000000000005 - type: map_at_10 value: 71.136 - type: map_at_100 value: 71.339 - type: map_at_1000 value: 71.34 - type: map_at_20 value: 71.314 - type: map_at_3 value: 68.67 - type: map_at_5 value: 70.274 - type: mrr_at_1 value: 57.7524893314367 - type: mrr_at_10 value: 71.48944997629222 - type: mrr_at_100 value: 71.69295763275832 - type: mrr_at_1000 value: 71.69337848338161 - type: mrr_at_20 value: 71.66813464342809 - type: mrr_at_3 value: 69.04931247036524 - type: mrr_at_5 value: 70.61403508771947 - type: nauc_map_at_1000_diff1 value: 22.388799480884085 - type: nauc_map_at_1000_max value: -12.478980783254928 - type: nauc_map_at_1000_std value: -34.52645054563002 - type: nauc_map_at_100_diff1 value: 22.390230435504822 - type: nauc_map_at_100_max value: -12.476906954784145 - type: nauc_map_at_100_std value: -34.52397021747207 - type: nauc_map_at_10_diff1 value: 22.376630933605696 - type: nauc_map_at_10_max value: -12.263443549324265 - type: nauc_map_at_10_std value: -34.54600345217659 - type: nauc_map_at_1_diff1 value: 24.736009447964786 - type: nauc_map_at_1_max value: -15.468802285096888 - type: nauc_map_at_1_std value: -34.94706450308731 - type: nauc_map_at_20_diff1 value: 22.42616556265818 - type: nauc_map_at_20_max value: -12.405995637209402 - type: nauc_map_at_20_std value: -34.51377335859978 - type: nauc_map_at_3_diff1 value: 22.371578696906337 - type: nauc_map_at_3_max value: -11.688805933888062 - type: nauc_map_at_3_std value: -34.527333588071734 - type: nauc_map_at_5_diff1 value: 22.336979324422785 - type: nauc_map_at_5_max value: -11.708158232210106 - type: nauc_map_at_5_std value: -34.0971263102141 - type: nauc_mrr_at_1000_diff1 value: 19.213326943604773 - type: nauc_mrr_at_1000_max value: -13.159790933260695 - type: nauc_mrr_at_1000_std value: -34.37354227933731 - type: nauc_mrr_at_100_diff1 value: 19.21482127549166 - type: nauc_mrr_at_100_max value: -13.157697787479252 - type: nauc_mrr_at_100_std value: -34.371056737336566 - type: nauc_mrr_at_10_diff1 value: 19.229328983348385 - type: nauc_mrr_at_10_max value: -12.937512977150767 - type: nauc_mrr_at_10_std value: -34.394516401257476 - type: nauc_mrr_at_1_diff1 value: 21.9227471620602 - type: nauc_mrr_at_1_max value: -14.49455136413785 - type: nauc_mrr_at_1_std value: -34.40628723085126 - type: nauc_mrr_at_20_diff1 value: 19.2543334569671 - type: nauc_mrr_at_20_max value: -13.085814849140306 - type: nauc_mrr_at_20_std value: -34.360993502287066 - type: nauc_mrr_at_3_diff1 value: 19.311458613644003 - type: nauc_mrr_at_3_max value: -12.495078377194448 - type: nauc_mrr_at_3_std value: -34.72187448892093 - type: nauc_mrr_at_5_diff1 value: 19.28984692496265 - type: nauc_mrr_at_5_max value: -12.410164787044511 - type: nauc_mrr_at_5_std value: -34.008782372540274 - type: nauc_ndcg_at_1000_diff1 value: 22.334871936347476 - type: nauc_ndcg_at_1000_max value: -11.650741994163685 - type: nauc_ndcg_at_1000_std value: -33.95291620206335 - type: nauc_ndcg_at_100_diff1 value: 22.37733922760018 - type: nauc_ndcg_at_100_max value: -11.589691369551995 - type: nauc_ndcg_at_100_std value: -33.87967074881655 - type: nauc_ndcg_at_10_diff1 value: 22.435450031266548 - type: nauc_ndcg_at_10_max value: -10.425129788856612 - type: nauc_ndcg_at_10_std value: -34.021875869293375 - type: nauc_ndcg_at_1_diff1 value: 24.736009447964786 - type: nauc_ndcg_at_1_max value: -15.468802285096888 - type: nauc_ndcg_at_1_std value: -34.94706450308731 - type: nauc_ndcg_at_20_diff1 value: 22.690287768336383 - type: nauc_ndcg_at_20_max value: -11.01817585186346 - type: nauc_ndcg_at_20_std value: -33.855537917453795 - type: nauc_ndcg_at_3_diff1 value: 22.3679413098738 - type: nauc_ndcg_at_3_max value: -9.344141286897605 - type: nauc_ndcg_at_3_std value: -34.04026532887956 - type: nauc_ndcg_at_5_diff1 value: 22.379749690565344 - type: nauc_ndcg_at_5_max value: -8.914654859650676 - type: nauc_ndcg_at_5_std value: -32.83042128448497 - type: nauc_precision_at_1000_diff1 value: 11.379891065145626 - type: nauc_precision_at_1000_max value: 33.6930576530585 - type: nauc_precision_at_1000_std value: 67.69047934273685 - type: nauc_precision_at_100_diff1 value: 26.149909220953184 - type: nauc_precision_at_100_max value: 44.74421471088036 - type: nauc_precision_at_100_std value: 73.07539945227865 - type: nauc_precision_at_10_diff1 value: 25.663092824490043 - type: nauc_precision_at_10_max value: 20.801697270838257 - type: nauc_precision_at_10_std value: -24.568452476876267 - type: nauc_precision_at_1_diff1 value: 24.736009447964786 - type: nauc_precision_at_1_max value: -15.468802285096888 - type: nauc_precision_at_1_std value: -34.94706450308731 - type: nauc_precision_at_20_diff1 value: 47.6822175290111 - type: nauc_precision_at_20_max value: 50.99214578615923 - type: nauc_precision_at_20_std value: 4.294200220909195 - type: nauc_precision_at_3_diff1 value: 22.832628569595652 - type: nauc_precision_at_3_max value: 1.7043962267472152 - type: nauc_precision_at_3_std value: -31.67043197631448 - type: nauc_precision_at_5_diff1 value: 23.570424373006762 - type: nauc_precision_at_5_max value: 11.289340977365226 - type: nauc_precision_at_5_std value: -23.100403202876947 - type: nauc_recall_at_1000_diff1 value: 11.379891065145936 - type: nauc_recall_at_1000_max value: 33.69305765305101 - type: nauc_recall_at_1000_std value: 67.6904793427376 - type: nauc_recall_at_100_diff1 value: 26.14990922095449 - type: nauc_recall_at_100_max value: 44.74421471087856 - type: nauc_recall_at_100_std value: 73.07539945228118 - type: nauc_recall_at_10_diff1 value: 25.663092824489333 - type: nauc_recall_at_10_max value: 20.801697270838257 - type: nauc_recall_at_10_std value: -24.568452476876796 - type: nauc_recall_at_1_diff1 value: 24.736009447964786 - type: nauc_recall_at_1_max value: -15.468802285096888 - type: nauc_recall_at_1_std value: -34.94706450308731 - type: nauc_recall_at_20_diff1 value: 47.682217529011794 - type: nauc_recall_at_20_max value: 50.992145786157735 - type: nauc_recall_at_20_std value: 4.294200220909699 - type: nauc_recall_at_3_diff1 value: 22.832628569595713 - type: nauc_recall_at_3_max value: 1.7043962267472765 - type: nauc_recall_at_3_std value: -31.67043197631441 - type: nauc_recall_at_5_diff1 value: 23.570424373006617 - type: nauc_recall_at_5_max value: 11.289340977365105 - type: nauc_recall_at_5_std value: -23.10040320287697 - type: ndcg_at_1 value: 56.757000000000005 - type: ndcg_at_10 value: 77.31700000000001 - type: ndcg_at_100 value: 78.109 - type: ndcg_at_1000 value: 78.118 - type: ndcg_at_20 value: 77.95400000000001 - type: ndcg_at_3 value: 72.416 - type: ndcg_at_5 value: 75.266 - type: precision_at_1 value: 56.757000000000005 - type: precision_at_10 value: 9.629999999999999 - type: precision_at_100 value: 0.996 - type: precision_at_1000 value: 0.1 - type: precision_at_20 value: 4.9399999999999995 - type: precision_at_3 value: 27.738000000000003 - type: precision_at_5 value: 18.009 - type: recall_at_1 value: 56.757000000000005 - type: recall_at_10 value: 96.30199999999999 - type: recall_at_100 value: 99.57300000000001 - type: recall_at_1000 value: 99.644 - type: recall_at_20 value: 98.791 - type: recall_at_3 value: 83.21499999999999 - type: recall_at_5 value: 90.04299999999999 - task: type: Clustering dataset: name: MTEB ArxivClusteringP2P (default) type: mteb/arxiv-clustering-p2p config: default split: test revision: a122ad7f3f0291bf49cc6f4d32aa80929df69d5d metrics: - type: main_score value: 54.87086052375513 - type: v_measure value: 54.87086052375513 - type: v_measure_std value: 14.454097589509681 - task: type: Clustering dataset: name: MTEB ArxivClusteringS2S (default) type: mteb/arxiv-clustering-s2s config: default split: test revision: f910caf1a6075f7329cdf8c1a6135696f37dbd53 metrics: - type: main_score value: 50.24788850687535 - type: v_measure value: 50.24788850687535 - type: v_measure_std value: 14.477615357158207 - task: type: Reranking dataset: name: MTEB AskUbuntuDupQuestions (default) type: mteb/askubuntudupquestions-reranking config: default split: test revision: 2000358ca161889fa9c082cb41daa8dcfb161a54 metrics: - type: main_score value: 65.44621361559305 - type: map value: 65.44621361559305 - type: mrr value: 78.08380600624368 - type: nAUC_map_diff1 value: 19.65299058945553 - type: nAUC_map_max value: 23.879426571566693 - type: nAUC_map_std value: 21.448441444297377 - type: nAUC_mrr_diff1 value: 27.941419145421513 - type: nAUC_mrr_max value: 38.67113462643772 - type: nAUC_mrr_std value: 27.452420501889257 - task: type: STS dataset: name: MTEB BIOSSES (default) type: mteb/biosses-sts config: default split: test revision: d3fb88f8f02e40887cd149695127462bbcf29b4a metrics: - type: cosine_pearson value: 85.46985769611739 - type: cosine_spearman value: 84.47120184207104 - type: euclidean_pearson value: 83.12042031068798 - type: euclidean_spearman value: 84.47120184207104 - type: main_score value: 84.47120184207104 - type: manhattan_pearson value: 84.05034163855613 - type: manhattan_spearman value: 85.87725797639943 - type: pearson value: 85.46985769611739 - type: spearman value: 84.47120184207104 - task: type: Classification dataset: name: MTEB Banking77Classification (default) type: mteb/banking77 config: default split: test revision: 0fd18e25b25c072e09e0d92ab615fda904d66300 metrics: - type: accuracy value: 90.42532467532467 - type: f1 value: 90.26361056658011 - type: f1_weighted value: 90.26361056658011 - type: main_score value: 90.42532467532467 - task: type: Clustering dataset: name: MTEB BiorxivClusteringP2P (default) type: mteb/biorxiv-clustering-p2p config: default split: test revision: 65b79d1d13f80053f67aca9498d9402c2d9f1f40 metrics: - type: main_score value: 52.38683432596689 - type: v_measure value: 52.38683432596689 - type: v_measure_std value: 1.1038897398800631 - task: type: Clustering dataset: name: MTEB BiorxivClusteringS2S (default) type: mteb/biorxiv-clustering-s2s config: default split: test revision: 258694dd0231531bc1fd9de6ceb52a0853c6d908 metrics: - type: main_score value: 48.3512116630593 - type: v_measure value: 48.3512116630593 - type: v_measure_std value: 0.9899344134435963 - task: type: Retrieval dataset: name: MTEB CQADupstackAndroidRetrieval (default) type: mteb/cqadupstack-android config: default split: test revision: f46a197baaae43b4f621051089b82a364682dfeb metrics: - type: main_score value: 57.058 - type: map_at_1 value: 37.856 - type: map_at_10 value: 50.468 - type: map_at_100 value: 52.122 - type: map_at_1000 value: 52.227999999999994 - type: map_at_20 value: 51.452 - type: map_at_3 value: 46.035 - type: map_at_5 value: 48.697 - type: mrr_at_1 value: 46.49499284692418 - type: mrr_at_10 value: 56.45865976338077 - type: mrr_at_100 value: 57.15406103632462 - type: mrr_at_1000 value: 57.186669571472 - type: mrr_at_20 value: 56.89032246443981 - type: mrr_at_3 value: 53.62422508345254 - type: mrr_at_5 value: 55.47687172150688 - type: nauc_map_at_1000_diff1 value: 50.512172373172604 - type: nauc_map_at_1000_max value: 28.051021726690973 - type: nauc_map_at_1000_std value: -5.414493155767105 - type: nauc_map_at_100_diff1 value: 50.525494288235365 - type: nauc_map_at_100_max value: 28.080979312479716 - type: nauc_map_at_100_std value: -5.377611521735267 - type: nauc_map_at_10_diff1 value: 50.52132032370831 - type: nauc_map_at_10_max value: 28.19055535036718 - type: nauc_map_at_10_std value: -6.146385891118221 - type: nauc_map_at_1_diff1 value: 55.545356828877566 - type: nauc_map_at_1_max value: 27.61274844858242 - type: nauc_map_at_1_std value: -8.621029360796063 - type: nauc_map_at_20_diff1 value: 50.52798873084703 - type: nauc_map_at_20_max value: 28.069246502807125 - type: nauc_map_at_20_std value: -5.656028887086587 - type: nauc_map_at_3_diff1 value: 51.37953193900601 - type: nauc_map_at_3_max value: 29.14887116379335 - type: nauc_map_at_3_std value: -6.789146340902688 - type: nauc_map_at_5_diff1 value: 51.04248408716013 - type: nauc_map_at_5_max value: 28.153458547181888 - type: nauc_map_at_5_std value: -6.342753281151557 - type: nauc_mrr_at_1000_diff1 value: 49.745309714982675 - type: nauc_mrr_at_1000_max value: 27.78192309971017 - type: nauc_mrr_at_1000_std value: -3.556235805947885 - type: nauc_mrr_at_100_diff1 value: 49.7576370448317 - type: nauc_mrr_at_100_max value: 27.77848644353481 - type: nauc_mrr_at_100_std value: -3.5443586736899624 - type: nauc_mrr_at_10_diff1 value: 49.60030692211042 - type: nauc_mrr_at_10_max value: 27.712721575788567 - type: nauc_mrr_at_10_std value: -3.924565065659438 - type: nauc_mrr_at_1_diff1 value: 52.73175303711915 - type: nauc_mrr_at_1_max value: 27.68004014141246 - type: nauc_mrr_at_1_std value: -7.301104287664366 - type: nauc_mrr_at_20_diff1 value: 49.66247769979565 - type: nauc_mrr_at_20_max value: 27.60926232163171 - type: nauc_mrr_at_20_std value: -3.6243839173701677 - type: nauc_mrr_at_3_diff1 value: 49.437177966315446 - type: nauc_mrr_at_3_max value: 28.430569498388607 - type: nauc_mrr_at_3_std value: -3.5559995542946385 - type: nauc_mrr_at_5_diff1 value: 49.68971929099807 - type: nauc_mrr_at_5_max value: 28.13559946270903 - type: nauc_mrr_at_5_std value: -2.9843664884520726 - type: nauc_ndcg_at_1000_diff1 value: 49.47802328039259 - type: nauc_ndcg_at_1000_max value: 27.59881835465231 - type: nauc_ndcg_at_1000_std value: -3.232028189268046 - type: nauc_ndcg_at_100_diff1 value: 49.53100830995123 - type: nauc_ndcg_at_100_max value: 27.875639257725144 - type: nauc_ndcg_at_100_std value: -2.4268357776711142 - type: nauc_ndcg_at_10_diff1 value: 48.82720471657402 - type: nauc_ndcg_at_10_max value: 27.4834323844139 - type: nauc_ndcg_at_10_std value: -5.133549029794136 - type: nauc_ndcg_at_1_diff1 value: 52.73175303711915 - type: nauc_ndcg_at_1_max value: 27.68004014141246 - type: nauc_ndcg_at_1_std value: -7.301104287664366 - type: nauc_ndcg_at_20_diff1 value: 48.857238987577325 - type: nauc_ndcg_at_20_max value: 26.970121398869445 - type: nauc_ndcg_at_20_std value: -3.9660623414106118 - type: nauc_ndcg_at_3_diff1 value: 49.61450013831375 - type: nauc_ndcg_at_3_max value: 28.75501716693836 - type: nauc_ndcg_at_3_std value: -5.217281890185867 - type: nauc_ndcg_at_5_diff1 value: 49.59112537962819 - type: nauc_ndcg_at_5_max value: 27.4895238269022 - type: nauc_ndcg_at_5_std value: -4.380637070315594 - type: nauc_precision_at_1000_diff1 value: -23.290037380315777 - type: nauc_precision_at_1000_max value: -13.531919273991491 - type: nauc_precision_at_1000_std value: -0.5673577305039684 - type: nauc_precision_at_100_diff1 value: -15.840613974338588 - type: nauc_precision_at_100_max value: -7.8928638391297 - type: nauc_precision_at_100_std value: 8.388200097889415 - type: nauc_precision_at_10_diff1 value: 2.9240012043321197 - type: nauc_precision_at_10_max value: 4.608250220772331 - type: nauc_precision_at_10_std value: 5.977689477937855 - type: nauc_precision_at_1_diff1 value: 52.73175303711915 - type: nauc_precision_at_1_max value: 27.68004014141246 - type: nauc_precision_at_1_std value: -7.301104287664366 - type: nauc_precision_at_20_diff1 value: -5.605638741706147 - type: nauc_precision_at_20_max value: -1.4134648739891957 - type: nauc_precision_at_20_std value: 8.992151765925966 - type: nauc_precision_at_3_diff1 value: 24.866168696329318 - type: nauc_precision_at_3_max value: 19.399326441488363 - type: nauc_precision_at_3_std value: 0.6188303987405278 - type: nauc_precision_at_5_diff1 value: 14.371077180865004 - type: nauc_precision_at_5_max value: 10.734127354229518 - type: nauc_precision_at_5_std value: 4.777656412206082 - type: nauc_recall_at_1000_diff1 value: 49.6325052496859 - type: nauc_recall_at_1000_max value: 48.25106981500522 - type: nauc_recall_at_1000_std value: 60.106823545465794 - type: nauc_recall_at_100_diff1 value: 44.626633722434185 - type: nauc_recall_at_100_max value: 31.595409586964116 - type: nauc_recall_at_100_std value: 24.60697955490675 - type: nauc_recall_at_10_diff1 value: 41.964075418524594 - type: nauc_recall_at_10_max value: 23.613074060625877 - type: nauc_recall_at_10_std value: -6.079147189586106 - type: nauc_recall_at_1_diff1 value: 55.545356828877566 - type: nauc_recall_at_1_max value: 27.61274844858242 - type: nauc_recall_at_1_std value: -8.621029360796063 - type: nauc_recall_at_20_diff1 value: 41.019710158930536 - type: nauc_recall_at_20_max value: 20.53011931287726 - type: nauc_recall_at_20_std value: -0.43152987141906374 - type: nauc_recall_at_3_diff1 value: 46.044331178569756 - type: nauc_recall_at_3_max value: 28.4576718197227 - type: nauc_recall_at_3_std value: -5.039580731378937 - type: nauc_recall_at_5_diff1 value: 45.15410321963173 - type: nauc_recall_at_5_max value: 24.95381159724468 - type: nauc_recall_at_5_std value: -2.7696163048712092 - type: ndcg_at_1 value: 46.495 - type: ndcg_at_10 value: 57.058 - type: ndcg_at_100 value: 62.28900000000001 - type: ndcg_at_1000 value: 63.623 - type: ndcg_at_20 value: 59.382000000000005 - type: ndcg_at_3 value: 51.221000000000004 - type: ndcg_at_5 value: 54.381 - type: precision_at_1 value: 46.495 - type: precision_at_10 value: 10.915999999999999 - type: precision_at_100 value: 1.6969999999999998 - type: precision_at_1000 value: 0.209 - type: precision_at_20 value: 6.524000000000001 - type: precision_at_3 value: 24.464 - type: precision_at_5 value: 17.968999999999998 - type: recall_at_1 value: 37.856 - type: recall_at_10 value: 69.241 - type: recall_at_100 value: 90.28699999999999 - type: recall_at_1000 value: 98.245 - type: recall_at_20 value: 77.254 - type: recall_at_3 value: 52.906 - type: recall_at_5 value: 61.355000000000004 - task: type: Retrieval dataset: name: MTEB CQADupstackEnglishRetrieval (default) type: mteb/cqadupstack-english config: default split: test revision: ad9991cb51e31e31e430383c75ffb2885547b5f0 metrics: - type: main_score value: 55.619 - type: map_at_1 value: 36.838 - type: map_at_10 value: 49.575 - type: map_at_100 value: 50.841 - type: map_at_1000 value: 50.958999999999996 - type: map_at_20 value: 50.242 - type: map_at_3 value: 46.161 - type: map_at_5 value: 48.047000000000004 - type: mrr_at_1 value: 46.36942675159236 - type: mrr_at_10 value: 55.585835608128654 - type: mrr_at_100 value: 56.14509022456505 - type: mrr_at_1000 value: 56.17254949423868 - type: mrr_at_20 value: 55.90911219427227 - type: mrr_at_3 value: 53.66242038216566 - type: mrr_at_5 value: 54.754777070063795 - type: nauc_map_at_1000_diff1 value: 55.72994146018132 - type: nauc_map_at_1000_max value: 31.790044348484297 - type: nauc_map_at_1000_std value: -3.676356400838977 - type: nauc_map_at_100_diff1 value: 55.75933759721365 - type: nauc_map_at_100_max value: 31.71115118548395 - type: nauc_map_at_100_std value: -3.8342809606231905 - type: nauc_map_at_10_diff1 value: 56.140582659854765 - type: nauc_map_at_10_max value: 30.78311323292639 - type: nauc_map_at_10_std value: -5.700660706560718 - type: nauc_map_at_1_diff1 value: 60.989149921402166 - type: nauc_map_at_1_max value: 24.847358431216847 - type: nauc_map_at_1_std value: -9.581978758832014 - type: nauc_map_at_20_diff1 value: 55.90088891725195 - type: nauc_map_at_20_max value: 31.14456673401443 - type: nauc_map_at_20_std value: -4.865501561278796 - type: nauc_map_at_3_diff1 value: 57.0971732510435 - type: nauc_map_at_3_max value: 28.585786390950574 - type: nauc_map_at_3_std value: -7.4884070271374545 - type: nauc_map_at_5_diff1 value: 56.455512726711234 - type: nauc_map_at_5_max value: 29.642007561881567 - type: nauc_map_at_5_std value: -6.751432515828243 - type: nauc_mrr_at_1000_diff1 value: 54.478564572207475 - type: nauc_mrr_at_1000_max value: 35.693883164459585 - type: nauc_mrr_at_1000_std value: 0.7643597056911735 - type: nauc_mrr_at_100_diff1 value: 54.469785726349244 - type: nauc_mrr_at_100_max value: 35.69354504224292 - type: nauc_mrr_at_100_std value: 0.7658609278819717 - type: nauc_mrr_at_10_diff1 value: 54.547801323233145 - type: nauc_mrr_at_10_max value: 35.674690735444365 - type: nauc_mrr_at_10_std value: 0.49454372122373236 - type: nauc_mrr_at_1_diff1 value: 58.0976236847133 - type: nauc_mrr_at_1_max value: 33.59373690579546 - type: nauc_mrr_at_1_std value: -0.8772488510621077 - type: nauc_mrr_at_20_diff1 value: 54.453167915372305 - type: nauc_mrr_at_20_max value: 35.721664453062736 - type: nauc_mrr_at_20_std value: 0.6523327541345227 - type: nauc_mrr_at_3_diff1 value: 54.7937640483145 - type: nauc_mrr_at_3_max value: 35.29469236108335 - type: nauc_mrr_at_3_std value: 0.8034554869950205 - type: nauc_mrr_at_5_diff1 value: 54.71493534691299 - type: nauc_mrr_at_5_max value: 35.72188122491164 - type: nauc_mrr_at_5_std value: 0.5775610038008605 - type: nauc_ndcg_at_1000_diff1 value: 53.62145095632505 - type: nauc_ndcg_at_1000_max value: 34.835565451787936 - type: nauc_ndcg_at_1000_std value: 1.1623091827615835 - type: nauc_ndcg_at_100_diff1 value: 53.77187136022632 - type: nauc_ndcg_at_100_max value: 34.609354362847014 - type: nauc_ndcg_at_100_std value: 0.5170266974858715 - type: nauc_ndcg_at_10_diff1 value: 54.20396088540467 - type: nauc_ndcg_at_10_max value: 33.67653413146747 - type: nauc_ndcg_at_10_std value: -2.931690615276255 - type: nauc_ndcg_at_1_diff1 value: 58.0976236847133 - type: nauc_ndcg_at_1_max value: 33.59373690579546 - type: nauc_ndcg_at_1_std value: -0.8772488510621077 - type: nauc_ndcg_at_20_diff1 value: 53.903247688173565 - type: nauc_ndcg_at_20_max value: 33.945585054020995 - type: nauc_ndcg_at_20_std value: -1.7196867778969072 - type: nauc_ndcg_at_3_diff1 value: 54.29684058685916 - type: nauc_ndcg_at_3_max value: 32.553067240491025 - type: nauc_ndcg_at_3_std value: -2.2016205000522997 - type: nauc_ndcg_at_5_diff1 value: 54.258684070633066 - type: nauc_ndcg_at_5_max value: 32.94664950545805 - type: nauc_ndcg_at_5_std value: -3.17937584881454 - type: nauc_precision_at_1000_diff1 value: -21.439194912116026 - type: nauc_precision_at_1000_max value: 15.942222677063993 - type: nauc_precision_at_1000_std value: 35.729915556531076 - type: nauc_precision_at_100_diff1 value: -14.953808144823391 - type: nauc_precision_at_100_max value: 24.366633515203866 - type: nauc_precision_at_100_std value: 38.91266206404506 - type: nauc_precision_at_10_diff1 value: 3.814206270267521 - type: nauc_precision_at_10_max value: 29.404023050544488 - type: nauc_precision_at_10_std value: 22.77713093456522 - type: nauc_precision_at_1_diff1 value: 58.0976236847133 - type: nauc_precision_at_1_max value: 33.59373690579546 - type: nauc_precision_at_1_std value: -0.8772488510621077 - type: nauc_precision_at_20_diff1 value: -4.373213972313144 - type: nauc_precision_at_20_max value: 26.64947569662616 - type: nauc_precision_at_20_std value: 28.094173647776948 - type: nauc_precision_at_3_diff1 value: 24.691792364305353 - type: nauc_precision_at_3_max value: 31.72445660483093 - type: nauc_precision_at_3_std value: 12.156717423144872 - type: nauc_precision_at_5_diff1 value: 14.671587544516148 - type: nauc_precision_at_5_max value: 30.65804536678362 - type: nauc_precision_at_5_std value: 16.73659878491423 - type: nauc_recall_at_1000_diff1 value: 37.914503683436536 - type: nauc_recall_at_1000_max value: 45.731930737542974 - type: nauc_recall_at_1000_std value: 35.36836074295906 - type: nauc_recall_at_100_diff1 value: 43.03648671559091 - type: nauc_recall_at_100_max value: 37.820103483683546 - type: nauc_recall_at_100_std value: 15.615402250472895 - type: nauc_recall_at_10_diff1 value: 48.628540470781196 - type: nauc_recall_at_10_max value: 32.31708030245735 - type: nauc_recall_at_10_std value: -6.158117543444034 - type: nauc_recall_at_1_diff1 value: 60.989149921402166 - type: nauc_recall_at_1_max value: 24.847358431216847 - type: nauc_recall_at_1_std value: -9.581978758832014 - type: nauc_recall_at_20_diff1 value: 45.73434335994 - type: nauc_recall_at_20_max value: 33.61655424932853 - type: nauc_recall_at_20_std value: -0.6535031789146651 - type: nauc_recall_at_3_diff1 value: 51.97854257203469 - type: nauc_recall_at_3_max value: 28.32218445772992 - type: nauc_recall_at_3_std value: -7.408420913990716 - type: nauc_recall_at_5_diff1 value: 50.310920262075776 - type: nauc_recall_at_5_max value: 30.262152583629774 - type: nauc_recall_at_5_std value: -7.532311259007899 - type: ndcg_at_1 value: 46.369 - type: ndcg_at_10 value: 55.619 - type: ndcg_at_100 value: 59.424 - type: ndcg_at_1000 value: 61.031 - type: ndcg_at_20 value: 57.117 - type: ndcg_at_3 value: 51.512 - type: ndcg_at_5 value: 53.322 - type: precision_at_1 value: 46.369 - type: precision_at_10 value: 10.58 - type: precision_at_100 value: 1.587 - type: precision_at_1000 value: 0.202 - type: precision_at_20 value: 6.069999999999999 - type: precision_at_3 value: 25.244 - type: precision_at_5 value: 17.618000000000002 - type: recall_at_1 value: 36.838 - type: recall_at_10 value: 65.923 - type: recall_at_100 value: 81.773 - type: recall_at_1000 value: 91.477 - type: recall_at_20 value: 71.49799999999999 - type: recall_at_3 value: 53.339999999999996 - type: recall_at_5 value: 58.791000000000004 - task: type: Retrieval dataset: name: MTEB CQADupstackGamingRetrieval (default) type: mteb/cqadupstack-gaming config: default split: test revision: 4885aa143210c98657558c04aaf3dc47cfb54340 metrics: - type: main_score value: 64.415 - type: map_at_1 value: 45.266 - type: map_at_10 value: 58.406000000000006 - type: map_at_100 value: 59.411 - type: map_at_1000 value: 59.443 - type: map_at_20 value: 59.026999999999994 - type: map_at_3 value: 54.764 - type: map_at_5 value: 56.906 - type: mrr_at_1 value: 51.53605015673981 - type: mrr_at_10 value: 61.7752649649202 - type: mrr_at_100 value: 62.31198236478266 - type: mrr_at_1000 value: 62.33025491564621 - type: mrr_at_20 value: 62.12073638425103 - type: mrr_at_3 value: 59.23719958202723 - type: mrr_at_5 value: 60.75757575757586 - type: nauc_map_at_1000_diff1 value: 52.78631894941548 - type: nauc_map_at_1000_max value: 29.728751350358717 - type: nauc_map_at_1000_std value: -7.2070664388485595 - type: nauc_map_at_100_diff1 value: 52.77487090828604 - type: nauc_map_at_100_max value: 29.73643835396022 - type: nauc_map_at_100_std value: -7.192081091184244 - type: nauc_map_at_10_diff1 value: 52.8201662488034 - type: nauc_map_at_10_max value: 29.265474037151424 - type: nauc_map_at_10_std value: -8.232044468471567 - type: nauc_map_at_1_diff1 value: 55.63611102392441 - type: nauc_map_at_1_max value: 21.587146316447857 - type: nauc_map_at_1_std value: -12.955850391319467 - type: nauc_map_at_20_diff1 value: 52.6726438399957 - type: nauc_map_at_20_max value: 29.51914837522581 - type: nauc_map_at_20_std value: -7.494785775769813 - type: nauc_map_at_3_diff1 value: 53.444278386735824 - type: nauc_map_at_3_max value: 27.570040557838492 - type: nauc_map_at_3_std value: -9.845447390972884 - type: nauc_map_at_5_diff1 value: 52.908466697724265 - type: nauc_map_at_5_max value: 28.598304185332513 - type: nauc_map_at_5_std value: -9.114199320822918 - type: nauc_mrr_at_1000_diff1 value: 52.98587062448025 - type: nauc_mrr_at_1000_max value: 30.616687348087147 - type: nauc_mrr_at_1000_std value: -6.638419408566125 - type: nauc_mrr_at_100_diff1 value: 52.98528690169978 - type: nauc_mrr_at_100_max value: 30.631362801787038 - type: nauc_mrr_at_100_std value: -6.610896813869307 - type: nauc_mrr_at_10_diff1 value: 52.9728419136709 - type: nauc_mrr_at_10_max value: 30.71489606923995 - type: nauc_mrr_at_10_std value: -6.900976597146519 - type: nauc_mrr_at_1_diff1 value: 55.4539286180352 - type: nauc_mrr_at_1_max value: 27.030701088930964 - type: nauc_mrr_at_1_std value: -10.215265840865353 - type: nauc_mrr_at_20_diff1 value: 52.926098560085165 - type: nauc_mrr_at_20_max value: 30.632225629457487 - type: nauc_mrr_at_20_std value: -6.613910041879073 - type: nauc_mrr_at_3_diff1 value: 52.83554224468542 - type: nauc_mrr_at_3_max value: 30.17432529520847 - type: nauc_mrr_at_3_std value: -7.325929832658564 - type: nauc_mrr_at_5_diff1 value: 52.74796836301836 - type: nauc_mrr_at_5_max value: 30.54844294099801 - type: nauc_mrr_at_5_std value: -7.0923818376581576 - type: nauc_ndcg_at_1000_diff1 value: 52.279573296640116 - type: nauc_ndcg_at_1000_max value: 31.879058831034378 - type: nauc_ndcg_at_1000_std value: -4.308877770345846 - type: nauc_ndcg_at_100_diff1 value: 52.100170634687494 - type: nauc_ndcg_at_100_max value: 32.26943920941709 - type: nauc_ndcg_at_100_std value: -3.518845179812363 - type: nauc_ndcg_at_10_diff1 value: 51.97497813151944 - type: nauc_ndcg_at_10_max value: 31.842222287174277 - type: nauc_ndcg_at_10_std value: -5.638996194820608 - type: nauc_ndcg_at_1_diff1 value: 55.4539286180352 - type: nauc_ndcg_at_1_max value: 27.030701088930964 - type: nauc_ndcg_at_1_std value: -10.215265840865353 - type: nauc_ndcg_at_20_diff1 value: 51.604041870023856 - type: nauc_ndcg_at_20_max value: 31.839417695842304 - type: nauc_ndcg_at_20_std value: -4.150686612019412 - type: nauc_ndcg_at_3_diff1 value: 52.38280692803513 - type: nauc_ndcg_at_3_max value: 29.842252496671417 - type: nauc_ndcg_at_3_std value: -7.322027965096159 - type: nauc_ndcg_at_5_diff1 value: 51.908967367930515 - type: nauc_ndcg_at_5_max value: 30.9036875426887 - type: nauc_ndcg_at_5_std value: -6.902083486810509 - type: nauc_precision_at_1000_diff1 value: -13.238021720371048 - type: nauc_precision_at_1000_max value: 15.672518125243029 - type: nauc_precision_at_1000_std value: 26.430027831539952 - type: nauc_precision_at_100_diff1 value: -10.877525862593592 - type: nauc_precision_at_100_max value: 20.23032432890667 - type: nauc_precision_at_100_std value: 28.337459589162723 - type: nauc_precision_at_10_diff1 value: 5.2583262975497345 - type: nauc_precision_at_10_max value: 28.35287118062466 - type: nauc_precision_at_10_std value: 17.252494522903753 - type: nauc_precision_at_1_diff1 value: 55.4539286180352 - type: nauc_precision_at_1_max value: 27.030701088930964 - type: nauc_precision_at_1_std value: -10.215265840865353 - type: nauc_precision_at_20_diff1 value: -3.28564896800227 - type: nauc_precision_at_20_max value: 25.11814075456984 - type: nauc_precision_at_20_std value: 24.797488478114257 - type: nauc_precision_at_3_diff1 value: 27.36049200577193 - type: nauc_precision_at_3_max value: 31.38986459675791 - type: nauc_precision_at_3_std value: 4.94673859480357 - type: nauc_precision_at_5_diff1 value: 15.950278506213737 - type: nauc_precision_at_5_max value: 29.79168305914221 - type: nauc_precision_at_5_std value: 9.76931975271942 - type: nauc_recall_at_1000_diff1 value: 44.363654144325324 - type: nauc_recall_at_1000_max value: 65.87737958676024 - type: nauc_recall_at_1000_std value: 66.33062353665733 - type: nauc_recall_at_100_diff1 value: 44.40436229432885 - type: nauc_recall_at_100_max value: 52.125011384194494 - type: nauc_recall_at_100_std value: 39.2934522251712 - type: nauc_recall_at_10_diff1 value: 46.16525418561482 - type: nauc_recall_at_10_max value: 36.828599940415415 - type: nauc_recall_at_10_std value: 1.182213593386153 - type: nauc_recall_at_1_diff1 value: 55.63611102392441 - type: nauc_recall_at_1_max value: 21.587146316447857 - type: nauc_recall_at_1_std value: -12.955850391319467 - type: nauc_recall_at_20_diff1 value: 42.73640642185509 - type: nauc_recall_at_20_max value: 38.566876742285785 - type: nauc_recall_at_20_std value: 12.696957126395137 - type: nauc_recall_at_3_diff1 value: 49.10962937589886 - type: nauc_recall_at_3_max value: 30.3754728722401 - type: nauc_recall_at_3_std value: -5.8046837607375235 - type: nauc_recall_at_5_diff1 value: 47.15226973550674 - type: nauc_recall_at_5_max value: 33.2256674725569 - type: nauc_recall_at_5_std value: -3.6870457217544486 - type: ndcg_at_1 value: 51.536 - type: ndcg_at_10 value: 64.415 - type: ndcg_at_100 value: 67.964 - type: ndcg_at_1000 value: 68.626 - type: ndcg_at_20 value: 66.038 - type: ndcg_at_3 value: 58.606 - type: ndcg_at_5 value: 61.556 - type: precision_at_1 value: 51.536 - type: precision_at_10 value: 10.313 - type: precision_at_100 value: 1.302 - type: precision_at_1000 value: 0.13799999999999998 - type: precision_at_20 value: 5.6899999999999995 - type: precision_at_3 value: 25.997999999999998 - type: precision_at_5 value: 17.906 - type: recall_at_1 value: 45.266 - type: recall_at_10 value: 78.603 - type: recall_at_100 value: 93.54599999999999 - type: recall_at_1000 value: 98.253 - type: recall_at_20 value: 84.492 - type: recall_at_3 value: 63.176 - type: recall_at_5 value: 70.39999999999999 - task: type: Retrieval dataset: name: MTEB CQADupstackGisRetrieval (default) type: mteb/cqadupstack-gis config: default split: test revision: 5003b3064772da1887988e05400cf3806fe491f2 metrics: - type: main_score value: 40.526 - type: map_at_1 value: 24.864 - type: map_at_10 value: 34.825 - type: map_at_100 value: 35.925000000000004 - type: map_at_1000 value: 35.992000000000004 - type: map_at_20 value: 35.443000000000005 - type: map_at_3 value: 31.871 - type: map_at_5 value: 33.229 - type: mrr_at_1 value: 26.666666666666668 - type: mrr_at_10 value: 36.55187875526857 - type: mrr_at_100 value: 37.49172764559794 - type: mrr_at_1000 value: 37.54236620694664 - type: mrr_at_20 value: 37.090912376004454 - type: mrr_at_3 value: 33.5969868173258 - type: mrr_at_5 value: 35.07156308851221 - type: nauc_map_at_1000_diff1 value: 44.891751149733196 - type: nauc_map_at_1000_max value: 15.851670132349863 - type: nauc_map_at_1000_std value: -6.211986438255689 - type: nauc_map_at_100_diff1 value: 44.874673705530206 - type: nauc_map_at_100_max value: 15.839489262649101 - type: nauc_map_at_100_std value: -6.207233514639387 - type: nauc_map_at_10_diff1 value: 44.9973434705712 - type: nauc_map_at_10_max value: 15.674524247902836 - type: nauc_map_at_10_std value: -6.609526006088386 - type: nauc_map_at_1_diff1 value: 52.91615262275366 - type: nauc_map_at_1_max value: 15.658694733337414 - type: nauc_map_at_1_std value: -8.127996150892375 - type: nauc_map_at_20_diff1 value: 44.85564238818856 - type: nauc_map_at_20_max value: 15.864949870815389 - type: nauc_map_at_20_std value: -6.33356250291001 - type: nauc_map_at_3_diff1 value: 46.046327749236646 - type: nauc_map_at_3_max value: 15.373437834620033 - type: nauc_map_at_3_std value: -7.381649120798753 - type: nauc_map_at_5_diff1 value: 45.11531096159745 - type: nauc_map_at_5_max value: 15.355856550981182 - type: nauc_map_at_5_std value: -7.8233977650785755 - type: nauc_mrr_at_1000_diff1 value: 44.33327398034034 - type: nauc_mrr_at_1000_max value: 16.412849348646365 - type: nauc_mrr_at_1000_std value: -4.799275364550082 - type: nauc_mrr_at_100_diff1 value: 44.30968445667686 - type: nauc_mrr_at_100_max value: 16.405335549486825 - type: nauc_mrr_at_100_std value: -4.782231711252647 - type: nauc_mrr_at_10_diff1 value: 44.29888367355646 - type: nauc_mrr_at_10_max value: 16.25074837299776 - type: nauc_mrr_at_10_std value: -4.920673041371069 - type: nauc_mrr_at_1_diff1 value: 52.45792631721784 - type: nauc_mrr_at_1_max value: 16.910229430959493 - type: nauc_mrr_at_1_std value: -6.547541650793473 - type: nauc_mrr_at_20_diff1 value: 44.28388085659487 - type: nauc_mrr_at_20_max value: 16.44303371699385 - type: nauc_mrr_at_20_std value: -4.782890220889415 - type: nauc_mrr_at_3_diff1 value: 45.597218407665494 - type: nauc_mrr_at_3_max value: 16.559752561241098 - type: nauc_mrr_at_3_std value: -5.542801615505432 - type: nauc_mrr_at_5_diff1 value: 44.43853572270395 - type: nauc_mrr_at_5_max value: 16.1133944641322 - type: nauc_mrr_at_5_std value: -6.050272589382528 - type: nauc_ndcg_at_1000_diff1 value: 42.278753638071386 - type: nauc_ndcg_at_1000_max value: 16.762316485090274 - type: nauc_ndcg_at_1000_std value: -3.815120779089691 - type: nauc_ndcg_at_100_diff1 value: 41.48636828362088 - type: nauc_ndcg_at_100_max value: 16.57237848948736 - type: nauc_ndcg_at_100_std value: -2.9411587277396305 - type: nauc_ndcg_at_10_diff1 value: 41.84405639778879 - type: nauc_ndcg_at_10_max value: 15.648068449751747 - type: nauc_ndcg_at_10_std value: -4.898160642419032 - type: nauc_ndcg_at_1_diff1 value: 52.45792631721784 - type: nauc_ndcg_at_1_max value: 16.910229430959493 - type: nauc_ndcg_at_1_std value: -6.547541650793473 - type: nauc_ndcg_at_20_diff1 value: 41.40610377673182 - type: nauc_ndcg_at_20_max value: 16.447475968548225 - type: nauc_ndcg_at_20_std value: -3.9262621516592775 - type: nauc_ndcg_at_3_diff1 value: 44.1592823405345 - type: nauc_ndcg_at_3_max value: 15.712847138573 - type: nauc_ndcg_at_3_std value: -6.883137206493727 - type: nauc_ndcg_at_5_diff1 value: 42.38989386193879 - type: nauc_ndcg_at_5_max value: 15.239800786663741 - type: nauc_ndcg_at_5_std value: -7.651974137221655 - type: nauc_precision_at_1000_diff1 value: -7.5519282288277925 - type: nauc_precision_at_1000_max value: 11.782489435682795 - type: nauc_precision_at_1000_std value: 11.668888993456477 - type: nauc_precision_at_100_diff1 value: 3.204774581975988 - type: nauc_precision_at_100_max value: 15.337126603034434 - type: nauc_precision_at_100_std value: 17.589357862845134 - type: nauc_precision_at_10_diff1 value: 25.434016688175493 - type: nauc_precision_at_10_max value: 17.27082325926627 - type: nauc_precision_at_10_std value: 3.822526901724587 - type: nauc_precision_at_1_diff1 value: 52.45792631721784 - type: nauc_precision_at_1_max value: 16.910229430959493 - type: nauc_precision_at_1_std value: -6.547541650793473 - type: nauc_precision_at_20_diff1 value: 18.7568667813302 - type: nauc_precision_at_20_max value: 19.306529566246464 - type: nauc_precision_at_20_std value: 8.382893890597472 - type: nauc_precision_at_3_diff1 value: 36.1574899950847 - type: nauc_precision_at_3_max value: 16.627299638592103 - type: nauc_precision_at_3_std value: -4.087788042093743 - type: nauc_precision_at_5_diff1 value: 30.628715896321694 - type: nauc_precision_at_5_max value: 16.0570729024651 - type: nauc_precision_at_5_std value: -5.445903035987369 - type: nauc_recall_at_1000_diff1 value: 21.809556502847656 - type: nauc_recall_at_1000_max value: 33.655435589139934 - type: nauc_recall_at_1000_std value: 20.196017966823725 - type: nauc_recall_at_100_diff1 value: 22.517616973663824 - type: nauc_recall_at_100_max value: 20.03103825061642 - type: nauc_recall_at_100_std value: 17.243632401151785 - type: nauc_recall_at_10_diff1 value: 31.71506385432409 - type: nauc_recall_at_10_max value: 14.239057272051598 - type: nauc_recall_at_10_std value: -0.9351019605896081 - type: nauc_recall_at_1_diff1 value: 52.91615262275366 - type: nauc_recall_at_1_max value: 15.658694733337414 - type: nauc_recall_at_1_std value: -8.127996150892375 - type: nauc_recall_at_20_diff1 value: 28.725276151070716 - type: nauc_recall_at_20_max value: 17.353450797923646 - type: nauc_recall_at_20_std value: 3.730067464363456 - type: nauc_recall_at_3_diff1 value: 39.02212891041207 - type: nauc_recall_at_3_max value: 14.936335410995708 - type: nauc_recall_at_3_std value: -6.937703093702095 - type: nauc_recall_at_5_diff1 value: 34.62638470036278 - type: nauc_recall_at_5_max value: 13.81867357102693 - type: nauc_recall_at_5_std value: -8.95853594372777 - type: ndcg_at_1 value: 26.667 - type: ndcg_at_10 value: 40.526 - type: ndcg_at_100 value: 45.906000000000006 - type: ndcg_at_1000 value: 47.607 - type: ndcg_at_20 value: 42.612 - type: ndcg_at_3 value: 34.479 - type: ndcg_at_5 value: 36.856 - type: precision_at_1 value: 26.667 - type: precision_at_10 value: 6.497 - type: precision_at_100 value: 0.9690000000000001 - type: precision_at_1000 value: 0.11499999999999999 - type: precision_at_20 value: 3.7510000000000003 - type: precision_at_3 value: 14.765 - type: precision_at_5 value: 10.328 - type: recall_at_1 value: 24.864 - type: recall_at_10 value: 56.52 - type: recall_at_100 value: 81.266 - type: recall_at_1000 value: 94.066 - type: recall_at_20 value: 64.287 - type: recall_at_3 value: 39.894 - type: recall_at_5 value: 45.573 - task: type: Retrieval dataset: name: MTEB CQADupstackMathematicaRetrieval (default) type: mteb/cqadupstack-mathematica config: default split: test revision: 90fceea13679c63fe563ded68f3b6f06e50061de metrics: - type: main_score value: 36.909 - type: map_at_1 value: 21.662 - type: map_at_10 value: 31.026999999999997 - type: map_at_100 value: 32.367000000000004 - type: map_at_1000 value: 32.478 - type: map_at_20 value: 31.769 - type: map_at_3 value: 28.1 - type: map_at_5 value: 29.622 - type: mrr_at_1 value: 27.114427860696516 - type: mrr_at_10 value: 36.32822988233437 - type: mrr_at_100 value: 37.28018582739568 - type: mrr_at_1000 value: 37.340004953560815 - type: mrr_at_20 value: 36.89249731777514 - type: mrr_at_3 value: 33.76865671641792 - type: mrr_at_5 value: 35.155472636815915 - type: nauc_map_at_1000_diff1 value: 30.999721507980425 - type: nauc_map_at_1000_max value: 13.855009171656762 - type: nauc_map_at_1000_std value: 2.778145928499419 - type: nauc_map_at_100_diff1 value: 30.991818861315114 - type: nauc_map_at_100_max value: 13.851316393129236 - type: nauc_map_at_100_std value: 2.7451346033089066 - type: nauc_map_at_10_diff1 value: 30.939047115922268 - type: nauc_map_at_10_max value: 13.802711678959358 - type: nauc_map_at_10_std value: 1.9338708233328186 - type: nauc_map_at_1_diff1 value: 36.97121643357033 - type: nauc_map_at_1_max value: 14.54951802050039 - type: nauc_map_at_1_std value: 2.291757915014077 - type: nauc_map_at_20_diff1 value: 30.949302840984927 - type: nauc_map_at_20_max value: 13.610164030526914 - type: nauc_map_at_20_std value: 2.549163654887918 - type: nauc_map_at_3_diff1 value: 31.637509042806855 - type: nauc_map_at_3_max value: 14.180566076404554 - type: nauc_map_at_3_std value: 1.1865639206189176 - type: nauc_map_at_5_diff1 value: 31.154639339531055 - type: nauc_map_at_5_max value: 13.903608844337809 - type: nauc_map_at_5_std value: 1.6922136449478375 - type: nauc_mrr_at_1000_diff1 value: 32.45389857695208 - type: nauc_mrr_at_1000_max value: 16.11613716157279 - type: nauc_mrr_at_1000_std value: 4.211017248357101 - type: nauc_mrr_at_100_diff1 value: 32.44145015751564 - type: nauc_mrr_at_100_max value: 16.099031300956412 - type: nauc_mrr_at_100_std value: 4.195259378963446 - type: nauc_mrr_at_10_diff1 value: 32.21060187567292 - type: nauc_mrr_at_10_max value: 16.21661026926165 - type: nauc_mrr_at_10_std value: 3.9065782540718437 - type: nauc_mrr_at_1_diff1 value: 37.74549118846171 - type: nauc_mrr_at_1_max value: 16.397209846805332 - type: nauc_mrr_at_1_std value: 3.4889514782297995 - type: nauc_mrr_at_20_diff1 value: 32.453044179618445 - type: nauc_mrr_at_20_max value: 15.909025198763452 - type: nauc_mrr_at_20_std value: 4.098615307812559 - type: nauc_mrr_at_3_diff1 value: 32.484524841794695 - type: nauc_mrr_at_3_max value: 16.648085318378186 - type: nauc_mrr_at_3_std value: 3.509389137710427 - type: nauc_mrr_at_5_diff1 value: 32.299480637422256 - type: nauc_mrr_at_5_max value: 15.951764344928696 - type: nauc_mrr_at_5_std value: 3.7985692901891377 - type: nauc_ndcg_at_1000_diff1 value: 30.008108380199346 - type: nauc_ndcg_at_1000_max value: 14.535374527619723 - type: nauc_ndcg_at_1000_std value: 5.2285948826930575 - type: nauc_ndcg_at_100_diff1 value: 29.666636590107476 - type: nauc_ndcg_at_100_max value: 14.655881610560979 - type: nauc_ndcg_at_100_std value: 5.247239776587783 - type: nauc_ndcg_at_10_diff1 value: 29.432631981617234 - type: nauc_ndcg_at_10_max value: 13.814013850977513 - type: nauc_ndcg_at_10_std value: 2.6684914826901185 - type: nauc_ndcg_at_1_diff1 value: 37.74549118846171 - type: nauc_ndcg_at_1_max value: 16.397209846805332 - type: nauc_ndcg_at_1_std value: 3.4889514782297995 - type: nauc_ndcg_at_20_diff1 value: 29.606523322115635 - type: nauc_ndcg_at_20_max value: 13.192927619815412 - type: nauc_ndcg_at_20_std value: 4.2760688990871625 - type: nauc_ndcg_at_3_diff1 value: 30.35103248525633 - type: nauc_ndcg_at_3_max value: 14.612114097422971 - type: nauc_ndcg_at_3_std value: 1.690897807564605 - type: nauc_ndcg_at_5_diff1 value: 29.77002075263378 - type: nauc_ndcg_at_5_max value: 13.80319417333816 - type: nauc_ndcg_at_5_std value: 2.3198767168618484 - type: nauc_precision_at_1000_diff1 value: -0.8398177965775562 - type: nauc_precision_at_1000_max value: -0.0407907558781753 - type: nauc_precision_at_1000_std value: 1.7501690310307152 - type: nauc_precision_at_100_diff1 value: 5.004646340055046 - type: nauc_precision_at_100_max value: 5.8226386931141745 - type: nauc_precision_at_100_std value: 7.907290393542464 - type: nauc_precision_at_10_diff1 value: 15.09410481986906 - type: nauc_precision_at_10_max value: 9.198280392394562 - type: nauc_precision_at_10_std value: 4.897238581273735 - type: nauc_precision_at_1_diff1 value: 37.74549118846171 - type: nauc_precision_at_1_max value: 16.397209846805332 - type: nauc_precision_at_1_std value: 3.4889514782297995 - type: nauc_precision_at_20_diff1 value: 10.882828189218047 - type: nauc_precision_at_20_max value: 5.941972305343443 - type: nauc_precision_at_20_std value: 9.568975893813892 - type: nauc_precision_at_3_diff1 value: 22.65128906145192 - type: nauc_precision_at_3_max value: 15.161912167988072 - type: nauc_precision_at_3_std value: 2.5270971100194406 - type: nauc_precision_at_5_diff1 value: 18.212945309142732 - type: nauc_precision_at_5_max value: 11.761601796714555 - type: nauc_precision_at_5_std value: 4.146114697437408 - type: nauc_recall_at_1000_diff1 value: 10.619530015404953 - type: nauc_recall_at_1000_max value: 15.582991779175732 - type: nauc_recall_at_1000_std value: 34.37725284344572 - type: nauc_recall_at_100_diff1 value: 19.099691165176054 - type: nauc_recall_at_100_max value: 15.862163756010158 - type: nauc_recall_at_100_std value: 16.693656549037005 - type: nauc_recall_at_10_diff1 value: 22.901061297772006 - type: nauc_recall_at_10_max value: 10.819332395187512 - type: nauc_recall_at_10_std value: 2.4935560928879426 - type: nauc_recall_at_1_diff1 value: 36.97121643357033 - type: nauc_recall_at_1_max value: 14.54951802050039 - type: nauc_recall_at_1_std value: 2.291757915014077 - type: nauc_recall_at_20_diff1 value: 22.55772068737252 - type: nauc_recall_at_20_max value: 8.380423773938148 - type: nauc_recall_at_20_std value: 8.008240536599516 - type: nauc_recall_at_3_diff1 value: 25.718074487510656 - type: nauc_recall_at_3_max value: 12.86216888861597 - type: nauc_recall_at_3_std value: -0.18846851122293373 - type: nauc_recall_at_5_diff1 value: 24.617710231237208 - type: nauc_recall_at_5_max value: 11.021226469233289 - type: nauc_recall_at_5_std value: 1.421950056879153 - type: ndcg_at_1 value: 27.114 - type: ndcg_at_10 value: 36.909 - type: ndcg_at_100 value: 42.986000000000004 - type: ndcg_at_1000 value: 45.37 - type: ndcg_at_20 value: 39.330999999999996 - type: ndcg_at_3 value: 31.729000000000003 - type: ndcg_at_5 value: 33.936 - type: precision_at_1 value: 27.114 - type: precision_at_10 value: 6.816 - type: precision_at_100 value: 1.1280000000000001 - type: precision_at_1000 value: 0.146 - type: precision_at_20 value: 4.086 - type: precision_at_3 value: 15.340000000000002 - type: precision_at_5 value: 10.945 - type: recall_at_1 value: 21.662 - type: recall_at_10 value: 49.636 - type: recall_at_100 value: 75.916 - type: recall_at_1000 value: 92.458 - type: recall_at_20 value: 58.416999999999994 - type: recall_at_3 value: 35.28 - type: recall_at_5 value: 40.882000000000005 - task: type: Retrieval dataset: name: MTEB CQADupstackPhysicsRetrieval (default) type: mteb/cqadupstack-physics config: default split: test revision: 79531abbd1fb92d06c6d6315a0cbbbf5bb247ea4 metrics: - type: main_score value: 52.868 - type: map_at_1 value: 34.431 - type: map_at_10 value: 46.771 - type: map_at_100 value: 48.174 - type: map_at_1000 value: 48.272 - type: map_at_20 value: 47.581 - type: map_at_3 value: 43.524 - type: map_at_5 value: 45.324 - type: mrr_at_1 value: 41.28970163618864 - type: mrr_at_10 value: 51.932146294513934 - type: mrr_at_100 value: 52.745860705605196 - type: mrr_at_1000 value: 52.77323709159302 - type: mrr_at_20 value: 52.42075382304209 - type: mrr_at_3 value: 49.823548283605994 - type: mrr_at_5 value: 50.90632017965989 - type: nauc_map_at_1000_diff1 value: 49.36352629700335 - type: nauc_map_at_1000_max value: 24.945575911968284 - type: nauc_map_at_1000_std value: -9.797421912891304 - type: nauc_map_at_100_diff1 value: 49.37074982350282 - type: nauc_map_at_100_max value: 24.932173116830043 - type: nauc_map_at_100_std value: -9.83512316791769 - type: nauc_map_at_10_diff1 value: 49.584826786536695 - type: nauc_map_at_10_max value: 24.5338249313471 - type: nauc_map_at_10_std value: -10.756023024875775 - type: nauc_map_at_1_diff1 value: 54.164579712650564 - type: nauc_map_at_1_max value: 21.27499239151622 - type: nauc_map_at_1_std value: -12.031365028953774 - type: nauc_map_at_20_diff1 value: 49.36242974988498 - type: nauc_map_at_20_max value: 24.80466638063753 - type: nauc_map_at_20_std value: -10.082778211621719 - type: nauc_map_at_3_diff1 value: 49.74393799782767 - type: nauc_map_at_3_max value: 22.515201658656686 - type: nauc_map_at_3_std value: -11.590002353273865 - type: nauc_map_at_5_diff1 value: 49.484910514867195 - type: nauc_map_at_5_max value: 23.869015363628243 - type: nauc_map_at_5_std value: -10.91175087395575 - type: nauc_mrr_at_1000_diff1 value: 48.793281277282496 - type: nauc_mrr_at_1000_max value: 27.470883078226592 - type: nauc_mrr_at_1000_std value: -7.515845591102144 - type: nauc_mrr_at_100_diff1 value: 48.79210444488905 - type: nauc_mrr_at_100_max value: 27.474989242453074 - type: nauc_mrr_at_100_std value: -7.503300141559849 - type: nauc_mrr_at_10_diff1 value: 48.79734357404452 - type: nauc_mrr_at_10_max value: 27.31051724397676 - type: nauc_mrr_at_10_std value: -7.9232399559624564 - type: nauc_mrr_at_1_diff1 value: 51.82497066040549 - type: nauc_mrr_at_1_max value: 27.890233836643652 - type: nauc_mrr_at_1_std value: -7.282474721231649 - type: nauc_mrr_at_20_diff1 value: 48.7030720747066 - type: nauc_mrr_at_20_max value: 27.41000787740253 - type: nauc_mrr_at_20_std value: -7.620818649979551 - type: nauc_mrr_at_3_diff1 value: 48.043966718368594 - type: nauc_mrr_at_3_max value: 26.881430874895994 - type: nauc_mrr_at_3_std value: -8.248267417377043 - type: nauc_mrr_at_5_diff1 value: 48.53255636641247 - type: nauc_mrr_at_5_max value: 27.171688846497688 - type: nauc_mrr_at_5_std value: -8.033774011744846 - type: nauc_ndcg_at_1000_diff1 value: 48.36429784289925 - type: nauc_ndcg_at_1000_max value: 27.118498431732306 - type: nauc_ndcg_at_1000_std value: -6.828545284915606 - type: nauc_ndcg_at_100_diff1 value: 48.46721595475821 - type: nauc_ndcg_at_100_max value: 27.2026549133907 - type: nauc_ndcg_at_100_std value: -6.7602817044158385 - type: nauc_ndcg_at_10_diff1 value: 48.48522449945348 - type: nauc_ndcg_at_10_max value: 25.656768536695516 - type: nauc_ndcg_at_10_std value: -10.211150092283324 - type: nauc_ndcg_at_1_diff1 value: 51.82497066040549 - type: nauc_ndcg_at_1_max value: 27.890233836643652 - type: nauc_ndcg_at_1_std value: -7.282474721231649 - type: nauc_ndcg_at_20_diff1 value: 47.94636399413784 - type: nauc_ndcg_at_20_max value: 26.393241064813427 - type: nauc_ndcg_at_20_std value: -8.42533823516474 - type: nauc_ndcg_at_3_diff1 value: 47.23654567102718 - type: nauc_ndcg_at_3_max value: 23.967083351659305 - type: nauc_ndcg_at_3_std value: -10.018398475022156 - type: nauc_ndcg_at_5_diff1 value: 47.78313461709188 - type: nauc_ndcg_at_5_max value: 25.117360628485525 - type: nauc_ndcg_at_5_std value: -9.835655279120918 - type: nauc_precision_at_1000_diff1 value: -16.88414572159978 - type: nauc_precision_at_1000_max value: 4.80126332522441 - type: nauc_precision_at_1000_std value: 18.224394238644006 - type: nauc_precision_at_100_diff1 value: -10.856565551145168 - type: nauc_precision_at_100_max value: 12.192780995446329 - type: nauc_precision_at_100_std value: 19.46735171751675 - type: nauc_precision_at_10_diff1 value: 9.26688991753191 - type: nauc_precision_at_10_max value: 22.825044871843495 - type: nauc_precision_at_10_std value: 7.2009775672302085 - type: nauc_precision_at_1_diff1 value: 51.82497066040549 - type: nauc_precision_at_1_max value: 27.890233836643652 - type: nauc_precision_at_1_std value: -7.282474721231649 - type: nauc_precision_at_20_diff1 value: -0.6494049114218691 - type: nauc_precision_at_20_max value: 19.581420310050493 - type: nauc_precision_at_20_std value: 13.337216187539871 - type: nauc_precision_at_3_diff1 value: 24.62203099425093 - type: nauc_precision_at_3_max value: 23.29826494152431 - type: nauc_precision_at_3_std value: -0.43986407417036705 - type: nauc_precision_at_5_diff1 value: 17.146631960590334 - type: nauc_precision_at_5_max value: 23.885773052175633 - type: nauc_precision_at_5_std value: 3.5595842426515096 - type: nauc_recall_at_1000_diff1 value: 41.93547812229175 - type: nauc_recall_at_1000_max value: 47.53791535010556 - type: nauc_recall_at_1000_std value: 41.22342581752483 - type: nauc_recall_at_100_diff1 value: 43.59789460172138 - type: nauc_recall_at_100_max value: 34.831608717192985 - type: nauc_recall_at_100_std value: 13.861943370323186 - type: nauc_recall_at_10_diff1 value: 43.23845371092903 - type: nauc_recall_at_10_max value: 24.69556725680752 - type: nauc_recall_at_10_std value: -11.163429240944318 - type: nauc_recall_at_1_diff1 value: 54.164579712650564 - type: nauc_recall_at_1_max value: 21.27499239151622 - type: nauc_recall_at_1_std value: -12.031365028953774 - type: nauc_recall_at_20_diff1 value: 40.70544708973454 - type: nauc_recall_at_20_max value: 27.4533684977315 - type: nauc_recall_at_20_std value: -3.875024362887248 - type: nauc_recall_at_3_diff1 value: 43.489599274690924 - type: nauc_recall_at_3_max value: 20.73212895737876 - type: nauc_recall_at_3_std value: -12.437367497680794 - type: nauc_recall_at_5_diff1 value: 42.512319246009426 - type: nauc_recall_at_5_max value: 23.128281152167013 - type: nauc_recall_at_5_std value: -11.278338046867578 - type: ndcg_at_1 value: 41.29 - type: ndcg_at_10 value: 52.868 - type: ndcg_at_100 value: 58.302 - type: ndcg_at_1000 value: 59.768 - type: ndcg_at_20 value: 55.161 - type: ndcg_at_3 value: 48.209999999999994 - type: ndcg_at_5 value: 50.31 - type: precision_at_1 value: 41.29 - type: precision_at_10 value: 9.508999999999999 - type: precision_at_100 value: 1.425 - type: precision_at_1000 value: 0.172 - type: precision_at_20 value: 5.563 - type: precision_at_3 value: 23.067 - type: precision_at_5 value: 16.112000000000002 - type: recall_at_1 value: 34.431 - type: recall_at_10 value: 65.23299999999999 - type: recall_at_100 value: 87.53699999999999 - type: recall_at_1000 value: 96.539 - type: recall_at_20 value: 73.175 - type: recall_at_3 value: 51.895 - type: recall_at_5 value: 57.385 - task: type: Retrieval dataset: name: MTEB CQADupstackProgrammersRetrieval (default) type: mteb/cqadupstack-programmers config: default split: test revision: 6184bc1440d2dbc7612be22b50686b8826d22b32 metrics: - type: main_score value: 51.528 - type: map_at_1 value: 32.505 - type: map_at_10 value: 44.753 - type: map_at_100 value: 46.127 - type: map_at_1000 value: 46.216 - type: map_at_20 value: 45.536 - type: map_at_3 value: 41.065000000000005 - type: map_at_5 value: 43.021 - type: mrr_at_1 value: 40.06849315068493 - type: mrr_at_10 value: 50.586223454374114 - type: mrr_at_100 value: 51.321321223787464 - type: mrr_at_1000 value: 51.356221108008924 - type: mrr_at_20 value: 51.01864577087494 - type: mrr_at_3 value: 47.85007610350074 - type: mrr_at_5 value: 49.33980213089797 - type: nauc_map_at_1000_diff1 value: 45.41897322814866 - type: nauc_map_at_1000_max value: 36.43271502796935 - type: nauc_map_at_1000_std value: -3.08747313658398 - type: nauc_map_at_100_diff1 value: 45.40269743916955 - type: nauc_map_at_100_max value: 36.452626381309564 - type: nauc_map_at_100_std value: -3.050272535820317 - type: nauc_map_at_10_diff1 value: 45.34560411925014 - type: nauc_map_at_10_max value: 36.19673788114135 - type: nauc_map_at_10_std value: -3.420683672963469 - type: nauc_map_at_1_diff1 value: 51.36851585971345 - type: nauc_map_at_1_max value: 30.505249602408085 - type: nauc_map_at_1_std value: -10.2416678186349 - type: nauc_map_at_20_diff1 value: 45.42397332309073 - type: nauc_map_at_20_max value: 36.323620365237915 - type: nauc_map_at_20_std value: -3.3179646819375455 - type: nauc_map_at_3_diff1 value: 44.83947078058309 - type: nauc_map_at_3_max value: 34.57416303775605 - type: nauc_map_at_3_std value: -5.3672505555757555 - type: nauc_map_at_5_diff1 value: 45.4323080527899 - type: nauc_map_at_5_max value: 35.475038111215454 - type: nauc_map_at_5_std value: -4.869116944994172 - type: nauc_mrr_at_1000_diff1 value: 44.79133765108195 - type: nauc_mrr_at_1000_max value: 37.8395528431165 - type: nauc_mrr_at_1000_std value: -0.33864135555345787 - type: nauc_mrr_at_100_diff1 value: 44.77053505400072 - type: nauc_mrr_at_100_max value: 37.85131530843396 - type: nauc_mrr_at_100_std value: -0.29376043044872974 - type: nauc_mrr_at_10_diff1 value: 44.54201455010106 - type: nauc_mrr_at_10_max value: 37.87654424224337 - type: nauc_mrr_at_10_std value: -0.2954147213419877 - type: nauc_mrr_at_1_diff1 value: 50.348567221476095 - type: nauc_mrr_at_1_max value: 35.32002716807428 - type: nauc_mrr_at_1_std value: -4.127211615072779 - type: nauc_mrr_at_20_diff1 value: 44.6481957222897 - type: nauc_mrr_at_20_max value: 37.79185181323558 - type: nauc_mrr_at_20_std value: -0.3506304719188533 - type: nauc_mrr_at_3_diff1 value: 44.54540545121934 - type: nauc_mrr_at_3_max value: 37.363282458611835 - type: nauc_mrr_at_3_std value: -1.0802758216988566 - type: nauc_mrr_at_5_diff1 value: 44.669169996080946 - type: nauc_mrr_at_5_max value: 37.79397301528844 - type: nauc_mrr_at_5_std value: -1.1544437067895141 - type: nauc_ndcg_at_1000_diff1 value: 44.23001270649658 - type: nauc_ndcg_at_1000_max value: 38.03938047053798 - type: nauc_ndcg_at_1000_std value: 0.07002094406878062 - type: nauc_ndcg_at_100_diff1 value: 43.83250930342652 - type: nauc_ndcg_at_100_max value: 38.72756266035811 - type: nauc_ndcg_at_100_std value: 1.3255594876912262 - type: nauc_ndcg_at_10_diff1 value: 43.78080205250872 - type: nauc_ndcg_at_10_max value: 38.14614328818464 - type: nauc_ndcg_at_10_std value: 0.2397638259348022 - type: nauc_ndcg_at_1_diff1 value: 50.348567221476095 - type: nauc_ndcg_at_1_max value: 35.32002716807428 - type: nauc_ndcg_at_1_std value: -4.127211615072779 - type: nauc_ndcg_at_20_diff1 value: 43.896996711891255 - type: nauc_ndcg_at_20_max value: 38.23097480664703 - type: nauc_ndcg_at_20_std value: 0.2729387441668037 - type: nauc_ndcg_at_3_diff1 value: 43.069709910022425 - type: nauc_ndcg_at_3_max value: 36.502144221587294 - type: nauc_ndcg_at_3_std value: -2.0622108399705756 - type: nauc_ndcg_at_5_diff1 value: 44.14602577391785 - type: nauc_ndcg_at_5_max value: 37.34237839237288 - type: nauc_ndcg_at_5_std value: -2.2673460175951705 - type: nauc_precision_at_1000_diff1 value: -10.357116971513213 - type: nauc_precision_at_1000_max value: 1.2503703047183532 - type: nauc_precision_at_1000_std value: 13.414005506342678 - type: nauc_precision_at_100_diff1 value: -7.681527188131978 - type: nauc_precision_at_100_max value: 10.088834472819892 - type: nauc_precision_at_100_std value: 17.711087598879566 - type: nauc_precision_at_10_diff1 value: 11.003362209750295 - type: nauc_precision_at_10_max value: 27.63252544217027 - type: nauc_precision_at_10_std value: 18.481947258128084 - type: nauc_precision_at_1_diff1 value: 50.348567221476095 - type: nauc_precision_at_1_max value: 35.32002716807428 - type: nauc_precision_at_1_std value: -4.127211615072779 - type: nauc_precision_at_20_diff1 value: 4.111080332759875 - type: nauc_precision_at_20_max value: 20.56749400877946 - type: nauc_precision_at_20_std value: 17.36022034241277 - type: nauc_precision_at_3_diff1 value: 23.942554910273245 - type: nauc_precision_at_3_max value: 35.97877150370803 - type: nauc_precision_at_3_std value: 9.637340410469214 - type: nauc_precision_at_5_diff1 value: 18.700971929254926 - type: nauc_precision_at_5_max value: 31.669251857400287 - type: nauc_precision_at_5_std value: 12.227378057702525 - type: nauc_recall_at_1000_diff1 value: 41.44406357349465 - type: nauc_recall_at_1000_max value: 48.18005939744722 - type: nauc_recall_at_1000_std value: 44.644995141313615 - type: nauc_recall_at_100_diff1 value: 32.922020475894605 - type: nauc_recall_at_100_max value: 48.72963272591548 - type: nauc_recall_at_100_std value: 31.240653608289136 - type: nauc_recall_at_10_diff1 value: 35.86054329362608 - type: nauc_recall_at_10_max value: 39.22254164233072 - type: nauc_recall_at_10_std value: 9.338184712112877 - type: nauc_recall_at_1_diff1 value: 51.36851585971345 - type: nauc_recall_at_1_max value: 30.505249602408085 - type: nauc_recall_at_1_std value: -10.2416678186349 - type: nauc_recall_at_20_diff1 value: 35.70158600895814 - type: nauc_recall_at_20_max value: 40.21115283196768 - type: nauc_recall_at_20_std value: 11.284677731981075 - type: nauc_recall_at_3_diff1 value: 37.02178260667095 - type: nauc_recall_at_3_max value: 35.26557516931743 - type: nauc_recall_at_3_std value: -0.19729756446094432 - type: nauc_recall_at_5_diff1 value: 38.71651291559653 - type: nauc_recall_at_5_max value: 37.57955300059607 - type: nauc_recall_at_5_std value: 0.10272606736436922 - type: ndcg_at_1 value: 40.068 - type: ndcg_at_10 value: 51.528 - type: ndcg_at_100 value: 56.723 - type: ndcg_at_1000 value: 58.239 - type: ndcg_at_20 value: 53.644999999999996 - type: ndcg_at_3 value: 45.755 - type: ndcg_at_5 value: 48.143 - type: precision_at_1 value: 40.068 - type: precision_at_10 value: 9.555 - type: precision_at_100 value: 1.4200000000000002 - type: precision_at_1000 value: 0.16999999999999998 - type: precision_at_20 value: 5.525 - type: precision_at_3 value: 22.032 - type: precision_at_5 value: 15.684999999999999 - type: recall_at_1 value: 32.505 - type: recall_at_10 value: 65.65100000000001 - type: recall_at_100 value: 87.252 - type: recall_at_1000 value: 97.25399999999999 - type: recall_at_20 value: 73.097 - type: recall_at_3 value: 49.097 - type: recall_at_5 value: 55.431 - task: type: Retrieval dataset: name: MTEB CQADupstackRetrieval (default) type: CQADupstackRetrieval_is_a_combined_dataset config: default split: test revision: CQADupstackRetrieval_is_a_combined_dataset metrics: - type: main_score value: 47.39125 - type: ndcg_at_10 value: 47.39125 - task: type: Retrieval dataset: name: MTEB CQADupstackStatsRetrieval (default) type: mteb/cqadupstack-stats config: default split: test revision: 65ac3a16b8e91f9cee4c9828cc7c335575432a2a metrics: - type: main_score value: 40.875 - type: map_at_1 value: 28.319 - type: map_at_10 value: 36.274 - type: map_at_100 value: 37.412 - type: map_at_1000 value: 37.51 - type: map_at_20 value: 36.941 - type: map_at_3 value: 33.916000000000004 - type: map_at_5 value: 35.234 - type: mrr_at_1 value: 31.74846625766871 - type: mrr_at_10 value: 39.24073668322134 - type: mrr_at_100 value: 40.17278239800247 - type: mrr_at_1000 value: 40.2379436505776 - type: mrr_at_20 value: 39.83878886787774 - type: mrr_at_3 value: 37.167689161554215 - type: mrr_at_5 value: 38.31799591002046 - type: nauc_map_at_1000_diff1 value: 49.49404676750026 - type: nauc_map_at_1000_max value: 24.17735387761035 - type: nauc_map_at_1000_std value: -6.968025638141572 - type: nauc_map_at_100_diff1 value: 49.465123378632285 - type: nauc_map_at_100_max value: 24.160270060711657 - type: nauc_map_at_100_std value: -6.9582977504819 - type: nauc_map_at_10_diff1 value: 49.781720703934376 - type: nauc_map_at_10_max value: 24.103919903874065 - type: nauc_map_at_10_std value: -7.314662536396514 - type: nauc_map_at_1_diff1 value: 55.720545221216035 - type: nauc_map_at_1_max value: 21.884565094683943 - type: nauc_map_at_1_std value: -13.63682159495347 - type: nauc_map_at_20_diff1 value: 49.62896582687721 - type: nauc_map_at_20_max value: 24.162549300595533 - type: nauc_map_at_20_std value: -7.081233825119497 - type: nauc_map_at_3_diff1 value: 51.385332716125795 - type: nauc_map_at_3_max value: 24.455742623231256 - type: nauc_map_at_3_std value: -8.3760582847261 - type: nauc_map_at_5_diff1 value: 50.208957179217215 - type: nauc_map_at_5_max value: 24.00675091720163 - type: nauc_map_at_5_std value: -7.613308761551274 - type: nauc_mrr_at_1000_diff1 value: 47.500916234195756 - type: nauc_mrr_at_1000_max value: 23.3590354910963 - type: nauc_mrr_at_1000_std value: -6.478735759902906 - type: nauc_mrr_at_100_diff1 value: 47.469759272651565 - type: nauc_mrr_at_100_max value: 23.344729569544107 - type: nauc_mrr_at_100_std value: -6.474443538104896 - type: nauc_mrr_at_10_diff1 value: 47.564411817507064 - type: nauc_mrr_at_10_max value: 23.321510676719026 - type: nauc_mrr_at_10_std value: -6.6453693344940366 - type: nauc_mrr_at_1_diff1 value: 53.18508134661776 - type: nauc_mrr_at_1_max value: 21.57319946504392 - type: nauc_mrr_at_1_std value: -12.568495954713804 - type: nauc_mrr_at_20_diff1 value: 47.48330579247279 - type: nauc_mrr_at_20_max value: 23.377594092205236 - type: nauc_mrr_at_20_std value: -6.559620372850238 - type: nauc_mrr_at_3_diff1 value: 48.811069225030366 - type: nauc_mrr_at_3_max value: 23.497847725659142 - type: nauc_mrr_at_3_std value: -7.410336000277848 - type: nauc_mrr_at_5_diff1 value: 47.62981354137329 - type: nauc_mrr_at_5_max value: 23.0908258648007 - type: nauc_mrr_at_5_std value: -7.051386215500063 - type: nauc_ndcg_at_1000_diff1 value: 46.60733449217545 - type: nauc_ndcg_at_1000_max value: 25.039484613656448 - type: nauc_ndcg_at_1000_std value: -3.2394363595393556 - type: nauc_ndcg_at_100_diff1 value: 45.31095520474484 - type: nauc_ndcg_at_100_max value: 24.578976351335413 - type: nauc_ndcg_at_100_std value: -3.036265774651672 - type: nauc_ndcg_at_10_diff1 value: 46.63697058056525 - type: nauc_ndcg_at_10_max value: 24.413653146109695 - type: nauc_ndcg_at_10_std value: -4.944504662117886 - type: nauc_ndcg_at_1_diff1 value: 53.18508134661776 - type: nauc_ndcg_at_1_max value: 21.57319946504392 - type: nauc_ndcg_at_1_std value: -12.568495954713804 - type: nauc_ndcg_at_20_diff1 value: 46.03439421346578 - type: nauc_ndcg_at_20_max value: 24.662945449517622 - type: nauc_ndcg_at_20_std value: -3.9914141082400856 - type: nauc_ndcg_at_3_diff1 value: 48.8952101806421 - type: nauc_ndcg_at_3_max value: 24.69438869549227 - type: nauc_ndcg_at_3_std value: -6.72590864779706 - type: nauc_ndcg_at_5_diff1 value: 46.96218364320126 - type: nauc_ndcg_at_5_max value: 23.840619604798928 - type: nauc_ndcg_at_5_std value: -5.911631557623504 - type: nauc_precision_at_1000_diff1 value: -5.259769046120934 - type: nauc_precision_at_1000_max value: 7.77398082268069 - type: nauc_precision_at_1000_std value: 7.725023686139027 - type: nauc_precision_at_100_diff1 value: 0.3924024674134793 - type: nauc_precision_at_100_max value: 12.127926352704103 - type: nauc_precision_at_100_std value: 11.61834703672354 - type: nauc_precision_at_10_diff1 value: 21.700387677953874 - type: nauc_precision_at_10_max value: 19.29513949184622 - type: nauc_precision_at_10_std value: 6.76455697187228 - type: nauc_precision_at_1_diff1 value: 53.18508134661776 - type: nauc_precision_at_1_max value: 21.57319946504392 - type: nauc_precision_at_1_std value: -12.568495954713804 - type: nauc_precision_at_20_diff1 value: 14.121601618575566 - type: nauc_precision_at_20_max value: 17.77278160909811 - type: nauc_precision_at_20_std value: 9.485065181651544 - type: nauc_precision_at_3_diff1 value: 38.470442649709234 - type: nauc_precision_at_3_max value: 24.501832724342997 - type: nauc_precision_at_3_std value: 1.257862017122565 - type: nauc_precision_at_5_diff1 value: 29.04902895925557 - type: nauc_precision_at_5_max value: 20.37995942681232 - type: nauc_precision_at_5_std value: 3.9785670157785153 - type: nauc_recall_at_1000_diff1 value: 29.180134104376016 - type: nauc_recall_at_1000_max value: 39.58508196111224 - type: nauc_recall_at_1000_std value: 49.640447720927014 - type: nauc_recall_at_100_diff1 value: 25.015242287605478 - type: nauc_recall_at_100_max value: 25.41142412711157 - type: nauc_recall_at_100_std value: 17.01713705545484 - type: nauc_recall_at_10_diff1 value: 38.41834403630272 - type: nauc_recall_at_10_max value: 25.456355090308207 - type: nauc_recall_at_10_std value: 1.6016826682218543 - type: nauc_recall_at_1_diff1 value: 55.720545221216035 - type: nauc_recall_at_1_max value: 21.884565094683943 - type: nauc_recall_at_1_std value: -13.63682159495347 - type: nauc_recall_at_20_diff1 value: 34.11058809840644 - type: nauc_recall_at_20_max value: 26.171187577249295 - type: nauc_recall_at_20_std value: 6.257854847772304 - type: nauc_recall_at_3_diff1 value: 44.85525802097949 - type: nauc_recall_at_3_max value: 25.69790963257969 - type: nauc_recall_at_3_std value: -3.324058300190332 - type: nauc_recall_at_5_diff1 value: 40.23014704198238 - type: nauc_recall_at_5_max value: 23.656548188532724 - type: nauc_recall_at_5_std value: -1.2559137136259997 - type: ndcg_at_1 value: 31.747999999999998 - type: ndcg_at_10 value: 40.875 - type: ndcg_at_100 value: 46.045 - type: ndcg_at_1000 value: 48.42 - type: ndcg_at_20 value: 43.077 - type: ndcg_at_3 value: 36.537 - type: ndcg_at_5 value: 38.608 - type: precision_at_1 value: 31.747999999999998 - type: precision_at_10 value: 6.426 - type: precision_at_100 value: 0.9820000000000001 - type: precision_at_1000 value: 0.126 - type: precision_at_20 value: 3.773 - type: precision_at_3 value: 15.542 - type: precision_at_5 value: 10.798 - type: recall_at_1 value: 28.319 - type: recall_at_10 value: 51.919000000000004 - type: recall_at_100 value: 74.936 - type: recall_at_1000 value: 92.427 - type: recall_at_20 value: 60.143 - type: recall_at_3 value: 40.062 - type: recall_at_5 value: 45.054 - task: type: Retrieval dataset: name: MTEB CQADupstackTexRetrieval (default) type: mteb/cqadupstack-tex config: default split: test revision: 46989137a86843e03a6195de44b09deda022eec7 metrics: - type: main_score value: 34.2 - type: map_at_1 value: 20.092 - type: map_at_10 value: 28.712 - type: map_at_100 value: 29.921 - type: map_at_1000 value: 30.041 - type: map_at_20 value: 29.332 - type: map_at_3 value: 25.777 - type: map_at_5 value: 27.428 - type: mrr_at_1 value: 24.50103234686855 - type: mrr_at_10 value: 32.96439222626432 - type: mrr_at_100 value: 33.88721950310386 - type: mrr_at_1000 value: 33.95367795392008 - type: mrr_at_20 value: 33.45683056028936 - type: mrr_at_3 value: 30.391144757972093 - type: mrr_at_5 value: 31.862239045652814 - type: nauc_map_at_1000_diff1 value: 38.801227014503354 - type: nauc_map_at_1000_max value: 16.46796294205988 - type: nauc_map_at_1000_std value: -0.908142016099861 - type: nauc_map_at_100_diff1 value: 38.78957944215079 - type: nauc_map_at_100_max value: 16.50619731580655 - type: nauc_map_at_100_std value: -0.926462803246373 - type: nauc_map_at_10_diff1 value: 38.88120477365666 - type: nauc_map_at_10_max value: 16.410694924626107 - type: nauc_map_at_10_std value: -1.3751589416775003 - type: nauc_map_at_1_diff1 value: 44.55793750345654 - type: nauc_map_at_1_max value: 13.70418129051785 - type: nauc_map_at_1_std value: -3.694894814523306 - type: nauc_map_at_20_diff1 value: 38.75834963539743 - type: nauc_map_at_20_max value: 16.46548307725419 - type: nauc_map_at_20_std value: -1.1365836446579263 - type: nauc_map_at_3_diff1 value: 39.67246188177096 - type: nauc_map_at_3_max value: 15.922647336168257 - type: nauc_map_at_3_std value: -2.0893896556203697 - type: nauc_map_at_5_diff1 value: 39.159630741375295 - type: nauc_map_at_5_max value: 15.990267953177318 - type: nauc_map_at_5_std value: -1.9327557417769199 - type: nauc_mrr_at_1000_diff1 value: 39.41374781251273 - type: nauc_mrr_at_1000_max value: 16.23733662056267 - type: nauc_mrr_at_1000_std value: -0.789244388066649 - type: nauc_mrr_at_100_diff1 value: 39.398404217138996 - type: nauc_mrr_at_100_max value: 16.253672412822844 - type: nauc_mrr_at_100_std value: -0.7867600591234057 - type: nauc_mrr_at_10_diff1 value: 39.49536833660625 - type: nauc_mrr_at_10_max value: 16.09754737159101 - type: nauc_mrr_at_10_std value: -1.0499409433596656 - type: nauc_mrr_at_1_diff1 value: 44.69124979406609 - type: nauc_mrr_at_1_max value: 13.868640405541807 - type: nauc_mrr_at_1_std value: -2.8447162730313504 - type: nauc_mrr_at_20_diff1 value: 39.346756835821175 - type: nauc_mrr_at_20_max value: 16.152912289746453 - type: nauc_mrr_at_20_std value: -0.889143863344697 - type: nauc_mrr_at_3_diff1 value: 39.955156271984485 - type: nauc_mrr_at_3_max value: 15.96558750579031 - type: nauc_mrr_at_3_std value: -1.4914025339569823 - type: nauc_mrr_at_5_diff1 value: 39.70804365174353 - type: nauc_mrr_at_5_max value: 16.029697481995036 - type: nauc_mrr_at_5_std value: -1.4882483683367165 - type: nauc_ndcg_at_1000_diff1 value: 37.02046985751414 - type: nauc_ndcg_at_1000_max value: 17.563661846904715 - type: nauc_ndcg_at_1000_std value: 1.7951711284228404 - type: nauc_ndcg_at_100_diff1 value: 36.72051225526829 - type: nauc_ndcg_at_100_max value: 18.135735423154557 - type: nauc_ndcg_at_100_std value: 2.110250082343289 - type: nauc_ndcg_at_10_diff1 value: 36.99150581443631 - type: nauc_ndcg_at_10_max value: 17.39070636654169 - type: nauc_ndcg_at_10_std value: -0.05841298889148948 - type: nauc_ndcg_at_1_diff1 value: 44.69124979406609 - type: nauc_ndcg_at_1_max value: 13.868640405541807 - type: nauc_ndcg_at_1_std value: -2.8447162730313504 - type: nauc_ndcg_at_20_diff1 value: 36.414872205816216 - type: nauc_ndcg_at_20_max value: 17.586459197482483 - type: nauc_ndcg_at_20_std value: 0.7391668449676375 - type: nauc_ndcg_at_3_diff1 value: 38.38645136739687 - type: nauc_ndcg_at_3_max value: 16.34575680544228 - type: nauc_ndcg_at_3_std value: -1.3994988799204304 - type: nauc_ndcg_at_5_diff1 value: 37.74165837892759 - type: nauc_ndcg_at_5_max value: 16.608751402224897 - type: nauc_ndcg_at_5_std value: -1.257572415110036 - type: nauc_precision_at_1000_diff1 value: -0.7240380303030971 - type: nauc_precision_at_1000_max value: -3.5605303285911027 - type: nauc_precision_at_1000_std value: 8.125281566316204 - type: nauc_precision_at_100_diff1 value: 9.010538792969484 - type: nauc_precision_at_100_max value: 7.450960760684124 - type: nauc_precision_at_100_std value: 12.920657697122673 - type: nauc_precision_at_10_diff1 value: 22.147149186948635 - type: nauc_precision_at_10_max value: 15.630431972920569 - type: nauc_precision_at_10_std value: 5.4243370504055 - type: nauc_precision_at_1_diff1 value: 44.69124979406609 - type: nauc_precision_at_1_max value: 13.868640405541807 - type: nauc_precision_at_1_std value: -2.8447162730313504 - type: nauc_precision_at_20_diff1 value: 18.26160944313167 - type: nauc_precision_at_20_max value: 13.158574673294945 - type: nauc_precision_at_20_std value: 7.779571305755091 - type: nauc_precision_at_3_diff1 value: 31.986352691060247 - type: nauc_precision_at_3_max value: 17.41017664660374 - type: nauc_precision_at_3_std value: 1.023372093667892 - type: nauc_precision_at_5_diff1 value: 28.34452353578217 - type: nauc_precision_at_5_max value: 16.854077957561437 - type: nauc_precision_at_5_std value: 2.1330345009761507 - type: nauc_recall_at_1000_diff1 value: 22.088636874333005 - type: nauc_recall_at_1000_max value: 24.455861461567714 - type: nauc_recall_at_1000_std value: 26.92484429539892 - type: nauc_recall_at_100_diff1 value: 25.76058818863046 - type: nauc_recall_at_100_max value: 24.4436267552323 - type: nauc_recall_at_100_std value: 16.649562153485977 - type: nauc_recall_at_10_diff1 value: 29.478028454664805 - type: nauc_recall_at_10_max value: 19.394729057696786 - type: nauc_recall_at_10_std value: 2.9975150306525613 - type: nauc_recall_at_1_diff1 value: 44.55793750345654 - type: nauc_recall_at_1_max value: 13.70418129051785 - type: nauc_recall_at_1_std value: -3.694894814523306 - type: nauc_recall_at_20_diff1 value: 26.56532137638919 - type: nauc_recall_at_20_max value: 19.987133789175047 - type: nauc_recall_at_20_std value: 6.108194576328286 - type: nauc_recall_at_3_diff1 value: 33.38390307648489 - type: nauc_recall_at_3_max value: 17.77954593170338 - type: nauc_recall_at_3_std value: -0.669865037316041 - type: nauc_recall_at_5_diff1 value: 31.665576731111596 - type: nauc_recall_at_5_max value: 17.638679384754298 - type: nauc_recall_at_5_std value: -0.2623201148382 - type: ndcg_at_1 value: 24.501 - type: ndcg_at_10 value: 34.2 - type: ndcg_at_100 value: 39.806000000000004 - type: ndcg_at_1000 value: 42.359 - type: ndcg_at_20 value: 36.125 - type: ndcg_at_3 value: 29.093999999999998 - type: ndcg_at_5 value: 31.52 - type: precision_at_1 value: 24.501 - type: precision_at_10 value: 6.370000000000001 - type: precision_at_100 value: 1.076 - type: precision_at_1000 value: 0.148 - type: precision_at_20 value: 3.7900000000000005 - type: precision_at_3 value: 13.844999999999999 - type: precision_at_5 value: 10.151 - type: recall_at_1 value: 20.092 - type: recall_at_10 value: 46.457 - type: recall_at_100 value: 71.533 - type: recall_at_1000 value: 89.357 - type: recall_at_20 value: 53.410999999999994 - type: recall_at_3 value: 32.255 - type: recall_at_5 value: 38.474000000000004 - task: type: Retrieval dataset: name: MTEB CQADupstackUnixRetrieval (default) type: mteb/cqadupstack-unix config: default split: test revision: 6c6430d3a6d36f8d2a829195bc5dc94d7e063e53 metrics: - type: main_score value: 49.925000000000004 - type: map_at_1 value: 32.965 - type: map_at_10 value: 43.951 - type: map_at_100 value: 45.144 - type: map_at_1000 value: 45.236 - type: map_at_20 value: 44.611000000000004 - type: map_at_3 value: 40.605999999999995 - type: map_at_5 value: 42.337 - type: mrr_at_1 value: 38.61940298507462 - type: mrr_at_10 value: 48.268664119876746 - type: mrr_at_100 value: 49.03962456559843 - type: mrr_at_1000 value: 49.08668338220201 - type: mrr_at_20 value: 48.723583454385846 - type: mrr_at_3 value: 45.69340796019895 - type: mrr_at_5 value: 47.06934079601983 - type: nauc_map_at_1000_diff1 value: 46.45044356860005 - type: nauc_map_at_1000_max value: 28.909610021231103 - type: nauc_map_at_1000_std value: -5.131074407893264 - type: nauc_map_at_100_diff1 value: 46.44234825841308 - type: nauc_map_at_100_max value: 28.90990148383219 - type: nauc_map_at_100_std value: -5.141434819001627 - type: nauc_map_at_10_diff1 value: 46.41373398076794 - type: nauc_map_at_10_max value: 28.558920730315734 - type: nauc_map_at_10_std value: -5.2528915713220865 - type: nauc_map_at_1_diff1 value: 51.98405236793469 - type: nauc_map_at_1_max value: 29.053460554358857 - type: nauc_map_at_1_std value: -5.2231285946480845 - type: nauc_map_at_20_diff1 value: 46.39242318241138 - type: nauc_map_at_20_max value: 28.796578425591363 - type: nauc_map_at_20_std value: -5.375091161868357 - type: nauc_map_at_3_diff1 value: 47.914252259189794 - type: nauc_map_at_3_max value: 28.18579536237961 - type: nauc_map_at_3_std value: -5.131658811230181 - type: nauc_map_at_5_diff1 value: 47.09619838991972 - type: nauc_map_at_5_max value: 28.422434919442928 - type: nauc_map_at_5_std value: -5.281011542114002 - type: nauc_mrr_at_1000_diff1 value: 46.81819041166582 - type: nauc_mrr_at_1000_max value: 30.148415140259104 - type: nauc_mrr_at_1000_std value: -4.821255276443201 - type: nauc_mrr_at_100_diff1 value: 46.79842646994142 - type: nauc_mrr_at_100_max value: 30.139669483277242 - type: nauc_mrr_at_100_std value: -4.816513089984477 - type: nauc_mrr_at_10_diff1 value: 46.711364929371236 - type: nauc_mrr_at_10_max value: 30.037443736174556 - type: nauc_mrr_at_10_std value: -4.717691978604977 - type: nauc_mrr_at_1_diff1 value: 51.440759829408854 - type: nauc_mrr_at_1_max value: 30.223550820620837 - type: nauc_mrr_at_1_std value: -5.087318776296894 - type: nauc_mrr_at_20_diff1 value: 46.793483872253226 - type: nauc_mrr_at_20_max value: 30.16268873819652 - type: nauc_mrr_at_20_std value: -5.01215299865215 - type: nauc_mrr_at_3_diff1 value: 47.6792606048565 - type: nauc_mrr_at_3_max value: 30.252407504386003 - type: nauc_mrr_at_3_std value: -4.951328714622018 - type: nauc_mrr_at_5_diff1 value: 47.08068341617418 - type: nauc_mrr_at_5_max value: 30.010955994145842 - type: nauc_mrr_at_5_std value: -4.8006009530708695 - type: nauc_ndcg_at_1000_diff1 value: 45.09722902533752 - type: nauc_ndcg_at_1000_max value: 29.781175279602127 - type: nauc_ndcg_at_1000_std value: -4.260098124856812 - type: nauc_ndcg_at_100_diff1 value: 44.614299417231415 - type: nauc_ndcg_at_100_max value: 29.744449124699358 - type: nauc_ndcg_at_100_std value: -4.087042833418112 - type: nauc_ndcg_at_10_diff1 value: 44.2584878876689 - type: nauc_ndcg_at_10_max value: 28.586996394544194 - type: nauc_ndcg_at_10_std value: -5.178666682774291 - type: nauc_ndcg_at_1_diff1 value: 51.440759829408854 - type: nauc_ndcg_at_1_max value: 30.223550820620837 - type: nauc_ndcg_at_1_std value: -5.087318776296894 - type: nauc_ndcg_at_20_diff1 value: 44.281589674181625 - type: nauc_ndcg_at_20_max value: 29.36490051594007 - type: nauc_ndcg_at_20_std value: -5.682904047831825 - type: nauc_ndcg_at_3_diff1 value: 46.43618085964397 - type: nauc_ndcg_at_3_max value: 28.411951238312618 - type: nauc_ndcg_at_3_std value: -5.350564037791816 - type: nauc_ndcg_at_5_diff1 value: 45.44136119207941 - type: nauc_ndcg_at_5_max value: 28.265958681020614 - type: nauc_ndcg_at_5_std value: -5.412966841119856 - type: nauc_precision_at_1000_diff1 value: -15.119149939557817 - type: nauc_precision_at_1000_max value: -3.784690736781482 - type: nauc_precision_at_1000_std value: -0.0813183769707101 - type: nauc_precision_at_100_diff1 value: -4.5616868446163945 - type: nauc_precision_at_100_max value: 8.135454448790497 - type: nauc_precision_at_100_std value: 3.819831209731323 - type: nauc_precision_at_10_diff1 value: 13.204205281825043 - type: nauc_precision_at_10_max value: 16.406917431583654 - type: nauc_precision_at_10_std value: -4.82389517876021 - type: nauc_precision_at_1_diff1 value: 51.440759829408854 - type: nauc_precision_at_1_max value: 30.223550820620837 - type: nauc_precision_at_1_std value: -5.087318776296894 - type: nauc_precision_at_20_diff1 value: 6.8274935971270185 - type: nauc_precision_at_20_max value: 15.317043149767647 - type: nauc_precision_at_20_std value: -4.066331816936127 - type: nauc_precision_at_3_diff1 value: 32.06782113672368 - type: nauc_precision_at_3_max value: 23.87593843126193 - type: nauc_precision_at_3_std value: -4.212494828749518 - type: nauc_precision_at_5_diff1 value: 23.978658995152873 - type: nauc_precision_at_5_max value: 21.532720008223542 - type: nauc_precision_at_5_std value: -4.3052844712664236 - type: nauc_recall_at_1000_diff1 value: 35.9864247724948 - type: nauc_recall_at_1000_max value: 47.06513336878949 - type: nauc_recall_at_1000_std value: 30.273808634870708 - type: nauc_recall_at_100_diff1 value: 32.10192474481891 - type: nauc_recall_at_100_max value: 30.669264116528367 - type: nauc_recall_at_100_std value: 5.15787243359946 - type: nauc_recall_at_10_diff1 value: 34.104862759569485 - type: nauc_recall_at_10_max value: 24.84533402808042 - type: nauc_recall_at_10_std value: -4.853992316710289 - type: nauc_recall_at_1_diff1 value: 51.98405236793469 - type: nauc_recall_at_1_max value: 29.053460554358857 - type: nauc_recall_at_1_std value: -5.2231285946480845 - type: nauc_recall_at_20_diff1 value: 33.179237838086536 - type: nauc_recall_at_20_max value: 27.528148034429883 - type: nauc_recall_at_20_std value: -7.801302133684411 - type: nauc_recall_at_3_diff1 value: 43.042762158279935 - type: nauc_recall_at_3_max value: 25.969613842446375 - type: nauc_recall_at_3_std value: -5.221121416249563 - type: nauc_recall_at_5_diff1 value: 39.479797215381254 - type: nauc_recall_at_5_max value: 25.367693620484733 - type: nauc_recall_at_5_std value: -5.201233398741834 - type: ndcg_at_1 value: 38.619 - type: ndcg_at_10 value: 49.925000000000004 - type: ndcg_at_100 value: 54.900000000000006 - type: ndcg_at_1000 value: 56.71000000000001 - type: ndcg_at_20 value: 51.9 - type: ndcg_at_3 value: 44.383 - type: ndcg_at_5 value: 46.697 - type: precision_at_1 value: 38.619 - type: precision_at_10 value: 8.526 - type: precision_at_100 value: 1.2229999999999999 - type: precision_at_1000 value: 0.148 - type: precision_at_20 value: 4.841 - type: precision_at_3 value: 20.336000000000002 - type: precision_at_5 value: 14.030000000000001 - type: recall_at_1 value: 32.965 - type: recall_at_10 value: 63.56 - type: recall_at_100 value: 84.621 - type: recall_at_1000 value: 96.74000000000001 - type: recall_at_20 value: 70.48100000000001 - type: recall_at_3 value: 48.191 - type: recall_at_5 value: 54.205999999999996 - task: type: Retrieval dataset: name: MTEB CQADupstackWebmastersRetrieval (default) type: mteb/cqadupstack-webmasters config: default split: test revision: 160c094312a0e1facb97e55eeddb698c0abe3571 metrics: - type: main_score value: 47.074 - type: map_at_1 value: 29.403000000000002 - type: map_at_10 value: 40.573 - type: map_at_100 value: 42.484 - type: map_at_1000 value: 42.716 - type: map_at_20 value: 41.599000000000004 - type: map_at_3 value: 37.339 - type: map_at_5 value: 39.193 - type: mrr_at_1 value: 36.36363636363637 - type: mrr_at_10 value: 45.757183637618425 - type: mrr_at_100 value: 46.770766133898064 - type: mrr_at_1000 value: 46.80980211476349 - type: mrr_at_20 value: 46.423621646963234 - type: mrr_at_3 value: 43.28063241106722 - type: mrr_at_5 value: 44.565217391304365 - type: nauc_map_at_1000_diff1 value: 49.50553138260385 - type: nauc_map_at_1000_max value: 19.358051251729698 - type: nauc_map_at_1000_std value: -2.011789122501399 - type: nauc_map_at_100_diff1 value: 49.44915583782217 - type: nauc_map_at_100_max value: 19.60901120148682 - type: nauc_map_at_100_std value: -2.41751136217268 - type: nauc_map_at_10_diff1 value: 49.51594423618221 - type: nauc_map_at_10_max value: 19.619242889635906 - type: nauc_map_at_10_std value: -3.8273122325339166 - type: nauc_map_at_1_diff1 value: 54.215223726227 - type: nauc_map_at_1_max value: 17.977025381159507 - type: nauc_map_at_1_std value: -9.062375244843237 - type: nauc_map_at_20_diff1 value: 49.304332449390536 - type: nauc_map_at_20_max value: 19.716118401380907 - type: nauc_map_at_20_std value: -3.4025172547840516 - type: nauc_map_at_3_diff1 value: 49.68092213465047 - type: nauc_map_at_3_max value: 18.97116061804881 - type: nauc_map_at_3_std value: -5.829584927881651 - type: nauc_map_at_5_diff1 value: 49.324006273400514 - type: nauc_map_at_5_max value: 19.112945761695528 - type: nauc_map_at_5_std value: -4.314461382420533 - type: nauc_mrr_at_1000_diff1 value: 49.92775633194418 - type: nauc_mrr_at_1000_max value: 19.49081993588585 - type: nauc_mrr_at_1000_std value: -0.7332025146393024 - type: nauc_mrr_at_100_diff1 value: 49.9260204058528 - type: nauc_mrr_at_100_max value: 19.484679786390437 - type: nauc_mrr_at_100_std value: -0.7212280673024297 - type: nauc_mrr_at_10_diff1 value: 50.148096993162575 - type: nauc_mrr_at_10_max value: 19.608544665563997 - type: nauc_mrr_at_10_std value: -0.8945343475848919 - type: nauc_mrr_at_1_diff1 value: 52.42661281910124 - type: nauc_mrr_at_1_max value: 18.7638434458577 - type: nauc_mrr_at_1_std value: -3.301275325073689 - type: nauc_mrr_at_20_diff1 value: 49.928792615111725 - type: nauc_mrr_at_20_max value: 19.49421015513326 - type: nauc_mrr_at_20_std value: -0.8668232758964624 - type: nauc_mrr_at_3_diff1 value: 50.024454026256436 - type: nauc_mrr_at_3_max value: 19.396056559912353 - type: nauc_mrr_at_3_std value: -1.0587164638878859 - type: nauc_mrr_at_5_diff1 value: 49.616273296977376 - type: nauc_mrr_at_5_max value: 19.71000744166205 - type: nauc_mrr_at_5_std value: -0.6138430434879517 - type: nauc_ndcg_at_1000_diff1 value: 48.83183056294745 - type: nauc_ndcg_at_1000_max value: 20.353466850695327 - type: nauc_ndcg_at_1000_std value: 1.3370177783952768 - type: nauc_ndcg_at_100_diff1 value: 48.317913647798306 - type: nauc_ndcg_at_100_max value: 20.486233597321966 - type: nauc_ndcg_at_100_std value: 1.6024818316441263 - type: nauc_ndcg_at_10_diff1 value: 49.429974356858 - type: nauc_ndcg_at_10_max value: 19.98920775777258 - type: nauc_ndcg_at_10_std value: 0.13520294668378868 - type: nauc_ndcg_at_1_diff1 value: 52.42661281910124 - type: nauc_ndcg_at_1_max value: 18.7638434458577 - type: nauc_ndcg_at_1_std value: -3.301275325073689 - type: nauc_ndcg_at_20_diff1 value: 48.35717513159501 - type: nauc_ndcg_at_20_max value: 19.79290006404417 - type: nauc_ndcg_at_20_std value: -0.21211495034087513 - type: nauc_ndcg_at_3_diff1 value: 48.94049508726953 - type: nauc_ndcg_at_3_max value: 19.754511844072315 - type: nauc_ndcg_at_3_std value: -1.239268184711068 - type: nauc_ndcg_at_5_diff1 value: 48.68008869717381 - type: nauc_ndcg_at_5_max value: 19.834309152953693 - type: nauc_ndcg_at_5_std value: 0.45890567595906867 - type: nauc_precision_at_1000_diff1 value: 1.528748334837327 - type: nauc_precision_at_1000_max value: -16.835799237789626 - type: nauc_precision_at_1000_std value: 34.62519837993304 - type: nauc_precision_at_100_diff1 value: 5.696350053173634 - type: nauc_precision_at_100_max value: -5.710692318003089 - type: nauc_precision_at_100_std value: 34.02642773668347 - type: nauc_precision_at_10_diff1 value: 20.45272065905327 - type: nauc_precision_at_10_max value: 13.6845586511117 - type: nauc_precision_at_10_std value: 19.996180339815485 - type: nauc_precision_at_1_diff1 value: 52.42661281910124 - type: nauc_precision_at_1_max value: 18.7638434458577 - type: nauc_precision_at_1_std value: -3.301275325073689 - type: nauc_precision_at_20_diff1 value: 12.379225968221377 - type: nauc_precision_at_20_max value: 7.784360168570556 - type: nauc_precision_at_20_std value: 24.002184594605836 - type: nauc_precision_at_3_diff1 value: 35.06666393942347 - type: nauc_precision_at_3_max value: 19.1969737141229 - type: nauc_precision_at_3_std value: 7.035106494502798 - type: nauc_precision_at_5_diff1 value: 26.452797864599155 - type: nauc_precision_at_5_max value: 16.996610036767233 - type: nauc_precision_at_5_std value: 14.13488894394845 - type: nauc_recall_at_1000_diff1 value: 23.629309076650372 - type: nauc_recall_at_1000_max value: 42.887014532907784 - type: nauc_recall_at_1000_std value: 63.21580949416875 - type: nauc_recall_at_100_diff1 value: 32.834443660172994 - type: nauc_recall_at_100_max value: 24.560781544131917 - type: nauc_recall_at_100_std value: 26.79732543660201 - type: nauc_recall_at_10_diff1 value: 43.460970105956974 - type: nauc_recall_at_10_max value: 19.78367858184654 - type: nauc_recall_at_10_std value: 2.6444312947437942 - type: nauc_recall_at_1_diff1 value: 54.215223726227 - type: nauc_recall_at_1_max value: 17.977025381159507 - type: nauc_recall_at_1_std value: -9.062375244843237 - type: nauc_recall_at_20_diff1 value: 37.448338600203414 - type: nauc_recall_at_20_max value: 19.563467772733254 - type: nauc_recall_at_20_std value: 3.2582843244021595 - type: nauc_recall_at_3_diff1 value: 44.31564553420772 - type: nauc_recall_at_3_max value: 18.579411558999265 - type: nauc_recall_at_3_std value: -1.1280366390946754 - type: nauc_recall_at_5_diff1 value: 42.83689602925977 - type: nauc_recall_at_5_max value: 18.765879640964766 - type: nauc_recall_at_5_std value: 2.0808172197592643 - type: ndcg_at_1 value: 36.364000000000004 - type: ndcg_at_10 value: 47.074 - type: ndcg_at_100 value: 53.307 - type: ndcg_at_1000 value: 55.043 - type: ndcg_at_20 value: 49.658 - type: ndcg_at_3 value: 42.105 - type: ndcg_at_5 value: 44.348 - type: precision_at_1 value: 36.364000000000004 - type: precision_at_10 value: 9.15 - type: precision_at_100 value: 1.814 - type: precision_at_1000 value: 0.254 - type: precision_at_20 value: 5.800000000000001 - type: precision_at_3 value: 19.829 - type: precision_at_5 value: 14.347999999999999 - type: recall_at_1 value: 29.403000000000002 - type: recall_at_10 value: 58.984 - type: recall_at_100 value: 86.32300000000001 - type: recall_at_1000 value: 96.58 - type: recall_at_20 value: 68.732 - type: recall_at_3 value: 44.994 - type: recall_at_5 value: 50.763999999999996 - task: type: Retrieval dataset: name: MTEB CQADupstackWordpressRetrieval (default) type: mteb/cqadupstack-wordpress config: default split: test revision: 4ffe81d471b1924886b33c7567bfb200e9eec5c4 metrics: - type: main_score value: 37.698 - type: map_at_1 value: 24.407999999999998 - type: map_at_10 value: 32.778 - type: map_at_100 value: 33.906 - type: map_at_1000 value: 33.994 - type: map_at_20 value: 33.366 - type: map_at_3 value: 29.938 - type: map_at_5 value: 31.488 - type: mrr_at_1 value: 27.171903881700555 - type: mrr_at_10 value: 35.30535750960891 - type: mrr_at_100 value: 36.27455343485727 - type: mrr_at_1000 value: 36.33637412393027 - type: mrr_at_20 value: 35.80660552098608 - type: mrr_at_3 value: 32.8712261244609 - type: mrr_at_5 value: 34.17436845348122 - type: nauc_map_at_1000_diff1 value: 41.79130051151335 - type: nauc_map_at_1000_max value: 15.250449334225705 - type: nauc_map_at_1000_std value: -5.6234146471967765 - type: nauc_map_at_100_diff1 value: 41.82119879391254 - type: nauc_map_at_100_max value: 15.260249318276955 - type: nauc_map_at_100_std value: -5.639782460411048 - type: nauc_map_at_10_diff1 value: 41.721172949121275 - type: nauc_map_at_10_max value: 15.343655917007446 - type: nauc_map_at_10_std value: -6.3555379684374556 - type: nauc_map_at_1_diff1 value: 48.96263429221846 - type: nauc_map_at_1_max value: 13.267518186717433 - type: nauc_map_at_1_std value: -5.684641885998431 - type: nauc_map_at_20_diff1 value: 41.752248673504624 - type: nauc_map_at_20_max value: 15.290823323080769 - type: nauc_map_at_20_std value: -5.961260903195126 - type: nauc_map_at_3_diff1 value: 43.24524104365662 - type: nauc_map_at_3_max value: 15.338745716345509 - type: nauc_map_at_3_std value: -7.764073533482427 - type: nauc_map_at_5_diff1 value: 42.46453798349899 - type: nauc_map_at_5_max value: 15.155487409682902 - type: nauc_map_at_5_std value: -6.9294813277791425 - type: nauc_mrr_at_1000_diff1 value: 41.88993733381641 - type: nauc_mrr_at_1000_max value: 14.380317334991677 - type: nauc_mrr_at_1000_std value: -5.493497006316366 - type: nauc_mrr_at_100_diff1 value: 41.90155799714766 - type: nauc_mrr_at_100_max value: 14.393105389960365 - type: nauc_mrr_at_100_std value: -5.497732224764066 - type: nauc_mrr_at_10_diff1 value: 41.7378563303707 - type: nauc_mrr_at_10_max value: 14.519730688823824 - type: nauc_mrr_at_10_std value: -6.151769146963003 - type: nauc_mrr_at_1_diff1 value: 48.59336779471185 - type: nauc_mrr_at_1_max value: 11.835618866086927 - type: nauc_mrr_at_1_std value: -5.363816756961067 - type: nauc_mrr_at_20_diff1 value: 41.76639087883366 - type: nauc_mrr_at_20_max value: 14.386536877009048 - type: nauc_mrr_at_20_std value: -5.76729589265512 - type: nauc_mrr_at_3_diff1 value: 42.83069407175199 - type: nauc_mrr_at_3_max value: 14.40817310712037 - type: nauc_mrr_at_3_std value: -6.592772453987663 - type: nauc_mrr_at_5_diff1 value: 42.54301828369426 - type: nauc_mrr_at_5_max value: 14.342399211173499 - type: nauc_mrr_at_5_std value: -6.3321746980792595 - type: nauc_ndcg_at_1000_diff1 value: 39.40808457780407 - type: nauc_ndcg_at_1000_max value: 15.145857892041088 - type: nauc_ndcg_at_1000_std value: -2.7042348869395165 - type: nauc_ndcg_at_100_diff1 value: 39.55061510297866 - type: nauc_ndcg_at_100_max value: 15.398423830498615 - type: nauc_ndcg_at_100_std value: -2.3882074074350785 - type: nauc_ndcg_at_10_diff1 value: 38.853209168604295 - type: nauc_ndcg_at_10_max value: 15.586851775746844 - type: nauc_ndcg_at_10_std value: -5.991925052638258 - type: nauc_ndcg_at_1_diff1 value: 48.59336779471185 - type: nauc_ndcg_at_1_max value: 11.835618866086927 - type: nauc_ndcg_at_1_std value: -5.363816756961067 - type: nauc_ndcg_at_20_diff1 value: 38.93226271515869 - type: nauc_ndcg_at_20_max value: 15.296458747902639 - type: nauc_ndcg_at_20_std value: -4.497710162570065 - type: nauc_ndcg_at_3_diff1 value: 41.49750925284279 - type: nauc_ndcg_at_3_max value: 15.584012047259218 - type: nauc_ndcg_at_3_std value: -8.132156848664613 - type: nauc_ndcg_at_5_diff1 value: 40.66048340734899 - type: nauc_ndcg_at_5_max value: 15.235394896999058 - type: nauc_ndcg_at_5_std value: -7.014796420860427 - type: nauc_precision_at_1000_diff1 value: -8.359237580495055 - type: nauc_precision_at_1000_max value: -11.584395503763362 - type: nauc_precision_at_1000_std value: 2.5543652208263263 - type: nauc_precision_at_100_diff1 value: 8.200754749985318 - type: nauc_precision_at_100_max value: 7.720780751540077 - type: nauc_precision_at_100_std value: 17.971083174211202 - type: nauc_precision_at_10_diff1 value: 21.257328495191853 - type: nauc_precision_at_10_max value: 14.344312062245008 - type: nauc_precision_at_10_std value: -0.9364852661474388 - type: nauc_precision_at_1_diff1 value: 48.59336779471185 - type: nauc_precision_at_1_max value: 11.835618866086927 - type: nauc_precision_at_1_std value: -5.363816756961067 - type: nauc_precision_at_20_diff1 value: 18.779172747828625 - type: nauc_precision_at_20_max value: 11.834784068449874 - type: nauc_precision_at_20_std value: 4.75265567501331 - type: nauc_precision_at_3_diff1 value: 35.68627977314586 - type: nauc_precision_at_3_max value: 16.09945138219868 - type: nauc_precision_at_3_std value: -9.141703662048604 - type: nauc_precision_at_5_diff1 value: 29.08631278869783 - type: nauc_precision_at_5_max value: 14.729434184074591 - type: nauc_precision_at_5_std value: -5.440719877914668 - type: nauc_recall_at_1000_diff1 value: 16.747172572611 - type: nauc_recall_at_1000_max value: 12.79769669067411 - type: nauc_recall_at_1000_std value: 37.13714022788794 - type: nauc_recall_at_100_diff1 value: 28.633698519131812 - type: nauc_recall_at_100_max value: 15.169044663120046 - type: nauc_recall_at_100_std value: 16.82928212478329 - type: nauc_recall_at_10_diff1 value: 29.30601441948013 - type: nauc_recall_at_10_max value: 16.436996254205454 - type: nauc_recall_at_10_std value: -4.785626170786634 - type: nauc_recall_at_1_diff1 value: 48.96263429221846 - type: nauc_recall_at_1_max value: 13.267518186717433 - type: nauc_recall_at_1_std value: -5.684641885998431 - type: nauc_recall_at_20_diff1 value: 28.941113801073513 - type: nauc_recall_at_20_max value: 15.03690157531021 - type: nauc_recall_at_20_std value: 0.952082171289431 - type: nauc_recall_at_3_diff1 value: 37.16097228939549 - type: nauc_recall_at_3_max value: 16.791181013520706 - type: nauc_recall_at_3_std value: -10.07433254635823 - type: nauc_recall_at_5_diff1 value: 34.819091777714114 - type: nauc_recall_at_5_max value: 15.563789505647332 - type: nauc_recall_at_5_std value: -7.539816172515026 - type: ndcg_at_1 value: 27.172 - type: ndcg_at_10 value: 37.698 - type: ndcg_at_100 value: 43.267 - type: ndcg_at_1000 value: 45.421 - type: ndcg_at_20 value: 39.661 - type: ndcg_at_3 value: 32.439 - type: ndcg_at_5 value: 34.867 - type: precision_at_1 value: 27.172 - type: precision_at_10 value: 5.970000000000001 - type: precision_at_100 value: 0.9299999999999999 - type: precision_at_1000 value: 0.124 - type: precision_at_20 value: 3.447 - type: precision_at_3 value: 13.555 - type: precision_at_5 value: 9.722999999999999 - type: recall_at_1 value: 24.407999999999998 - type: recall_at_10 value: 50.354 - type: recall_at_100 value: 76.347 - type: recall_at_1000 value: 92.06400000000001 - type: recall_at_20 value: 57.757000000000005 - type: recall_at_3 value: 36.503 - type: recall_at_5 value: 42.129 - task: type: Retrieval dataset: name: MTEB ClimateFEVER (default) type: mteb/climate-fever config: default split: test revision: 47f2ac6acb640fc46020b02a5b59fdda04d39380 metrics: - type: main_score value: 44.617000000000004 - type: map_at_1 value: 19.104 - type: map_at_10 value: 33.802 - type: map_at_100 value: 36.079 - type: map_at_1000 value: 36.248999999999995 - type: map_at_20 value: 35.131 - type: map_at_3 value: 27.991 - type: map_at_5 value: 31.118000000000002 - type: mrr_at_1 value: 43.583061889250814 - type: mrr_at_10 value: 56.87275735484211 - type: mrr_at_100 value: 57.39889418793922 - type: mrr_at_1000 value: 57.41794142620278 - type: mrr_at_20 value: 57.22771005837437 - type: mrr_at_3 value: 53.97394136807826 - type: mrr_at_5 value: 55.73615635179164 - type: nauc_map_at_1000_diff1 value: 26.630926110315777 - type: nauc_map_at_1000_max value: 39.630984824231405 - type: nauc_map_at_1000_std value: 12.021698649336102 - type: nauc_map_at_100_diff1 value: 26.65686177968633 - type: nauc_map_at_100_max value: 39.66934127737377 - type: nauc_map_at_100_std value: 12.012347759378558 - type: nauc_map_at_10_diff1 value: 26.5456237749527 - type: nauc_map_at_10_max value: 38.686799553195236 - type: nauc_map_at_10_std value: 10.436328337171096 - type: nauc_map_at_1_diff1 value: 37.70092410090826 - type: nauc_map_at_1_max value: 32.706727035298314 - type: nauc_map_at_1_std value: 3.5831967427511726 - type: nauc_map_at_20_diff1 value: 26.681209225651724 - type: nauc_map_at_20_max value: 39.284145121514825 - type: nauc_map_at_20_std value: 11.222202700116116 - type: nauc_map_at_3_diff1 value: 28.59667648558309 - type: nauc_map_at_3_max value: 36.25805661700928 - type: nauc_map_at_3_std value: 6.464598115667347 - type: nauc_map_at_5_diff1 value: 27.430543578903876 - type: nauc_map_at_5_max value: 37.364734786367364 - type: nauc_map_at_5_std value: 8.655542089806918 - type: nauc_mrr_at_1000_diff1 value: 28.479201441412773 - type: nauc_mrr_at_1000_max value: 39.804512389170306 - type: nauc_mrr_at_1000_std value: 15.975422602158526 - type: nauc_mrr_at_100_diff1 value: 28.472760240011453 - type: nauc_mrr_at_100_max value: 39.817824846642495 - type: nauc_mrr_at_100_std value: 15.995615089727696 - type: nauc_mrr_at_10_diff1 value: 28.30497282698149 - type: nauc_mrr_at_10_max value: 39.8750223929803 - type: nauc_mrr_at_10_std value: 16.056752560910738 - type: nauc_mrr_at_1_diff1 value: 32.14744915860195 - type: nauc_mrr_at_1_max value: 36.1460298415032 - type: nauc_mrr_at_1_std value: 11.071199685007258 - type: nauc_mrr_at_20_diff1 value: 28.4669940506914 - type: nauc_mrr_at_20_max value: 39.8689906588194 - type: nauc_mrr_at_20_std value: 16.063644389310987 - type: nauc_mrr_at_3_diff1 value: 27.763789833382084 - type: nauc_mrr_at_3_max value: 38.90694044105185 - type: nauc_mrr_at_3_std value: 14.930859774821496 - type: nauc_mrr_at_5_diff1 value: 28.518810945056806 - type: nauc_mrr_at_5_max value: 39.44652822929369 - type: nauc_mrr_at_5_std value: 15.601758278493936 - type: nauc_ndcg_at_1000_diff1 value: 24.917943936491202 - type: nauc_ndcg_at_1000_max value: 42.41689422016377 - type: nauc_ndcg_at_1000_std value: 18.56325105262615 - type: nauc_ndcg_at_100_diff1 value: 24.660217650539824 - type: nauc_ndcg_at_100_max value: 43.0556058064233 - type: nauc_ndcg_at_100_std value: 18.72349248160942 - type: nauc_ndcg_at_10_diff1 value: 24.946942918886105 - type: nauc_ndcg_at_10_max value: 40.95725387267496 - type: nauc_ndcg_at_10_std value: 14.517400662235858 - type: nauc_ndcg_at_1_diff1 value: 32.14744915860195 - type: nauc_ndcg_at_1_max value: 36.1460298415032 - type: nauc_ndcg_at_1_std value: 11.071199685007258 - type: nauc_ndcg_at_20_diff1 value: 25.188740918902763 - type: nauc_ndcg_at_20_max value: 42.00683229592938 - type: nauc_ndcg_at_20_std value: 16.240449187324334 - type: nauc_ndcg_at_3_diff1 value: 25.96917668130959 - type: nauc_ndcg_at_3_max value: 37.30870453176644 - type: nauc_ndcg_at_3_std value: 10.242190353093983 - type: nauc_ndcg_at_5_diff1 value: 26.21449841406056 - type: nauc_ndcg_at_5_max value: 38.92176118293679 - type: nauc_ndcg_at_5_std value: 11.725909337459568 - type: nauc_precision_at_1000_diff1 value: -16.01323462215506 - type: nauc_precision_at_1000_max value: 0.5840156344246599 - type: nauc_precision_at_1000_std value: 21.984332405839837 - type: nauc_precision_at_100_diff1 value: -9.61411849608862 - type: nauc_precision_at_100_max value: 17.504216512517146 - type: nauc_precision_at_100_std value: 28.629825648562633 - type: nauc_precision_at_10_diff1 value: 1.2355527780976312 - type: nauc_precision_at_10_max value: 29.790400398455517 - type: nauc_precision_at_10_std value: 23.76365693234943 - type: nauc_precision_at_1_diff1 value: 32.14744915860195 - type: nauc_precision_at_1_max value: 36.1460298415032 - type: nauc_precision_at_1_std value: 11.071199685007258 - type: nauc_precision_at_20_diff1 value: -0.9865443887144156 - type: nauc_precision_at_20_max value: 26.54154724938248 - type: nauc_precision_at_20_std value: 25.754372573612518 - type: nauc_precision_at_3_diff1 value: 10.765411595295998 - type: nauc_precision_at_3_max value: 34.70130346122421 - type: nauc_precision_at_3_std value: 16.215878443176408 - type: nauc_precision_at_5_diff1 value: 7.299668818561885 - type: nauc_precision_at_5_max value: 32.31206228488871 - type: nauc_precision_at_5_std value: 19.51441846964699 - type: nauc_recall_at_1000_diff1 value: 8.901785484325925 - type: nauc_recall_at_1000_max value: 43.24767978941463 - type: nauc_recall_at_1000_std value: 40.29307286192429 - type: nauc_recall_at_100_diff1 value: 11.465889365702624 - type: nauc_recall_at_100_max value: 43.24309504524636 - type: nauc_recall_at_100_std value: 30.118511303782835 - type: nauc_recall_at_10_diff1 value: 16.52685883767623 - type: nauc_recall_at_10_max value: 37.23567264572587 - type: nauc_recall_at_10_std value: 15.22055626936892 - type: nauc_recall_at_1_diff1 value: 37.70092410090826 - type: nauc_recall_at_1_max value: 32.706727035298314 - type: nauc_recall_at_1_std value: 3.5831967427511726 - type: nauc_recall_at_20_diff1 value: 15.927701447484194 - type: nauc_recall_at_20_max value: 38.26123449652692 - type: nauc_recall_at_20_std value: 18.992297231330056 - type: nauc_recall_at_3_diff1 value: 22.625663839144014 - type: nauc_recall_at_3_max value: 35.0258971497311 - type: nauc_recall_at_3_std value: 6.906428909460032 - type: nauc_recall_at_5_diff1 value: 20.52738451430066 - type: nauc_recall_at_5_max value: 35.50121476875723 - type: nauc_recall_at_5_std value: 10.743371408711585 - type: ndcg_at_1 value: 43.583 - type: ndcg_at_10 value: 44.617000000000004 - type: ndcg_at_100 value: 51.849999999999994 - type: ndcg_at_1000 value: 54.383 - type: ndcg_at_20 value: 47.751 - type: ndcg_at_3 value: 37.474000000000004 - type: ndcg_at_5 value: 39.967999999999996 - type: precision_at_1 value: 43.583 - type: precision_at_10 value: 13.966999999999999 - type: precision_at_100 value: 2.191 - type: precision_at_1000 value: 0.267 - type: precision_at_20 value: 8.391 - type: precision_at_3 value: 28.404 - type: precision_at_5 value: 21.694 - type: recall_at_1 value: 19.104 - type: recall_at_10 value: 51.498999999999995 - type: recall_at_100 value: 75.32 - type: recall_at_1000 value: 89.036 - type: recall_at_20 value: 60.089000000000006 - type: recall_at_3 value: 33.672999999999995 - type: recall_at_5 value: 41.306 - task: type: Retrieval dataset: name: MTEB DBPedia (default) type: mteb/dbpedia config: default split: test revision: c0f706b76e590d620bd6618b3ca8efdd34e2d659 metrics: - type: main_score value: 50.096 - type: map_at_1 value: 9.366 - type: map_at_10 value: 23.432 - type: map_at_100 value: 34.835 - type: map_at_1000 value: 36.675000000000004 - type: map_at_20 value: 28.027 - type: map_at_3 value: 15.825 - type: map_at_5 value: 18.94 - type: mrr_at_1 value: 71.75 - type: mrr_at_10 value: 79.6294642857143 - type: mrr_at_100 value: 79.92590099939845 - type: mrr_at_1000 value: 79.93391140733975 - type: mrr_at_20 value: 79.85458152958155 - type: mrr_at_3 value: 78.25000000000001 - type: mrr_at_5 value: 79.28750000000001 - type: nauc_map_at_1000_diff1 value: 25.472104119169913 - type: nauc_map_at_1000_max value: 41.56911605863819 - type: nauc_map_at_1000_std value: 26.96384045959376 - type: nauc_map_at_100_diff1 value: 26.776312077270624 - type: nauc_map_at_100_max value: 39.7545285058008 - type: nauc_map_at_100_std value: 23.490115682443395 - type: nauc_map_at_10_diff1 value: 29.688988181768742 - type: nauc_map_at_10_max value: 23.599686148007372 - type: nauc_map_at_10_std value: -2.743236101939126 - type: nauc_map_at_1_diff1 value: 40.19961160931266 - type: nauc_map_at_1_max value: 11.484500166682448 - type: nauc_map_at_1_std value: -20.998662627418653 - type: nauc_map_at_20_diff1 value: 28.589067878029685 - type: nauc_map_at_20_max value: 30.617557831619052 - type: nauc_map_at_20_std value: 8.40108528296231 - type: nauc_map_at_3_diff1 value: 33.289088478610246 - type: nauc_map_at_3_max value: 14.779109597775575 - type: nauc_map_at_3_std value: -15.48554361705479 - type: nauc_map_at_5_diff1 value: 31.37536261149692 - type: nauc_map_at_5_max value: 17.420722132646357 - type: nauc_map_at_5_std value: -11.533685762302074 - type: nauc_mrr_at_1000_diff1 value: 51.65831287063808 - type: nauc_mrr_at_1000_max value: 68.60039697252385 - type: nauc_mrr_at_1000_std value: 47.101684802168755 - type: nauc_mrr_at_100_diff1 value: 51.62318787423693 - type: nauc_mrr_at_100_max value: 68.6016051934096 - type: nauc_mrr_at_100_std value: 47.084125499520056 - type: nauc_mrr_at_10_diff1 value: 51.58457220757248 - type: nauc_mrr_at_10_max value: 68.52135361188292 - type: nauc_mrr_at_10_std value: 47.09512630117651 - type: nauc_mrr_at_1_diff1 value: 56.12302389553575 - type: nauc_mrr_at_1_max value: 67.25359948108763 - type: nauc_mrr_at_1_std value: 44.65155697383184 - type: nauc_mrr_at_20_diff1 value: 51.557612960835066 - type: nauc_mrr_at_20_max value: 68.62925036486892 - type: nauc_mrr_at_20_std value: 47.23452793919026 - type: nauc_mrr_at_3_diff1 value: 53.111622365148456 - type: nauc_mrr_at_3_max value: 68.96353991501803 - type: nauc_mrr_at_3_std value: 47.21923770237274 - type: nauc_mrr_at_5_diff1 value: 51.49932506601612 - type: nauc_mrr_at_5_max value: 68.47321777065385 - type: nauc_mrr_at_5_std value: 47.02157292074972 - type: nauc_ndcg_at_1000_diff1 value: 34.063946651439196 - type: nauc_ndcg_at_1000_max value: 56.37662421606667 - type: nauc_ndcg_at_1000_std value: 43.79623286366516 - type: nauc_ndcg_at_100_diff1 value: 34.621015007290914 - type: nauc_ndcg_at_100_max value: 51.10262571522196 - type: nauc_ndcg_at_100_std value: 33.99194547177918 - type: nauc_ndcg_at_10_diff1 value: 32.20831767471151 - type: nauc_ndcg_at_10_max value: 52.421069203710665 - type: nauc_ndcg_at_10_std value: 31.34328336300653 - type: nauc_ndcg_at_1_diff1 value: 55.233112065599386 - type: nauc_ndcg_at_1_max value: 57.40609137055842 - type: nauc_ndcg_at_1_std value: 33.612846544318614 - type: nauc_ndcg_at_20_diff1 value: 34.38267610887372 - type: nauc_ndcg_at_20_max value: 51.27192996137325 - type: nauc_ndcg_at_20_std value: 29.490347416111018 - type: nauc_ndcg_at_3_diff1 value: 35.56589169628291 - type: nauc_ndcg_at_3_max value: 51.063647622751475 - type: nauc_ndcg_at_3_std value: 31.597143875818784 - type: nauc_ndcg_at_5_diff1 value: 33.096556103749776 - type: nauc_ndcg_at_5_max value: 52.684632250399055 - type: nauc_ndcg_at_5_std value: 31.94245475071079 - type: nauc_precision_at_1000_diff1 value: -23.30986038644832 - type: nauc_precision_at_1000_max value: 7.747092580070645 - type: nauc_precision_at_1000_std value: 19.187233987218818 - type: nauc_precision_at_100_diff1 value: -10.321698436669498 - type: nauc_precision_at_100_max value: 30.042614796744584 - type: nauc_precision_at_100_std value: 43.69817919859801 - type: nauc_precision_at_10_diff1 value: -3.4879804241496686 - type: nauc_precision_at_10_max value: 39.952270729206084 - type: nauc_precision_at_10_std value: 47.57201846870389 - type: nauc_precision_at_1_diff1 value: 56.12302389553575 - type: nauc_precision_at_1_max value: 67.25359948108763 - type: nauc_precision_at_1_std value: 44.65155697383184 - type: nauc_precision_at_20_diff1 value: -4.21774580806289 - type: nauc_precision_at_20_max value: 39.45950542146115 - type: nauc_precision_at_20_std value: 49.38702305013535 - type: nauc_precision_at_3_diff1 value: 11.039231236525476 - type: nauc_precision_at_3_max value: 45.333325600850166 - type: nauc_precision_at_3_std value: 41.939828715832725 - type: nauc_precision_at_5_diff1 value: 1.8456345569611392 - type: nauc_precision_at_5_max value: 43.11574070733236 - type: nauc_precision_at_5_std value: 44.90121015752974 - type: nauc_recall_at_1000_diff1 value: 23.838392673402637 - type: nauc_recall_at_1000_max value: 40.22494505597155 - type: nauc_recall_at_1000_std value: 50.059930290604 - type: nauc_recall_at_100_diff1 value: 23.568582606097046 - type: nauc_recall_at_100_max value: 32.41458448276608 - type: nauc_recall_at_100_std value: 25.09362121206938 - type: nauc_recall_at_10_diff1 value: 24.640435183950647 - type: nauc_recall_at_10_max value: 16.76741621891125 - type: nauc_recall_at_10_std value: -6.568863340739497 - type: nauc_recall_at_1_diff1 value: 40.19961160931266 - type: nauc_recall_at_1_max value: 11.484500166682448 - type: nauc_recall_at_1_std value: -20.998662627418653 - type: nauc_recall_at_20_diff1 value: 26.120527114451036 - type: nauc_recall_at_20_max value: 24.44767559629039 - type: nauc_recall_at_20_std value: 4.470254335170874 - type: nauc_recall_at_3_diff1 value: 28.810186428560264 - type: nauc_recall_at_3_max value: 10.53337981630349 - type: nauc_recall_at_3_std value: -17.974352774667004 - type: nauc_recall_at_5_diff1 value: 26.026910831426207 - type: nauc_recall_at_5_max value: 11.363004529751835 - type: nauc_recall_at_5_std value: -15.15116848181691 - type: ndcg_at_1 value: 60.5 - type: ndcg_at_10 value: 50.096 - type: ndcg_at_100 value: 54.769999999999996 - type: ndcg_at_1000 value: 61.514 - type: ndcg_at_20 value: 49.234 - type: ndcg_at_3 value: 54.065 - type: ndcg_at_5 value: 52.053000000000004 - type: precision_at_1 value: 71.75 - type: precision_at_10 value: 41.6 - type: precision_at_100 value: 13.13 - type: precision_at_1000 value: 2.2929999999999997 - type: precision_at_20 value: 31.862000000000002 - type: precision_at_3 value: 58.333 - type: precision_at_5 value: 51.15 - type: recall_at_1 value: 9.366 - type: recall_at_10 value: 28.716 - type: recall_at_100 value: 61.72 - type: recall_at_1000 value: 84.068 - type: recall_at_20 value: 37.822 - type: recall_at_3 value: 17.268 - type: recall_at_5 value: 21.714 - task: type: Classification dataset: name: MTEB EmotionClassification (default) type: mteb/emotion config: default split: test revision: 4f58c6b202a23cf9a4da393831edf4f9183cad37 metrics: - type: accuracy value: 92.32999999999998 - type: f1 value: 88.85700702856039 - type: f1_weighted value: 92.5429163779549 - type: main_score value: 92.32999999999998 - task: type: Retrieval dataset: name: MTEB FEVER (default) type: mteb/fever config: default split: test revision: bea83ef9e8fb933d90a2f1d5515737465d613e12 metrics: - type: main_score value: 92.37100000000001 - type: map_at_1 value: 82.069 - type: map_at_10 value: 89.774 - type: map_at_100 value: 89.956 - type: map_at_1000 value: 89.96499999999999 - type: map_at_20 value: 89.884 - type: map_at_3 value: 88.874 - type: map_at_5 value: 89.46499999999999 - type: mrr_at_1 value: 88.47884788478848 - type: mrr_at_10 value: 93.24922373189693 - type: mrr_at_100 value: 93.27576071279056 - type: mrr_at_1000 value: 93.27598653822768 - type: mrr_at_20 value: 93.26572152369198 - type: mrr_at_3 value: 92.9642964296429 - type: mrr_at_5 value: 93.18031803180308 - type: nauc_map_at_1000_diff1 value: 53.44800282062128 - type: nauc_map_at_1000_max value: 25.692315057440663 - type: nauc_map_at_1000_std value: -8.499624248136191 - type: nauc_map_at_100_diff1 value: 53.40850939797177 - type: nauc_map_at_100_max value: 25.69149215182398 - type: nauc_map_at_100_std value: -8.478801691853757 - type: nauc_map_at_10_diff1 value: 52.77776630751276 - type: nauc_map_at_10_max value: 25.47185015678825 - type: nauc_map_at_10_std value: -8.650753717531858 - type: nauc_map_at_1_diff1 value: 60.26354815835753 - type: nauc_map_at_1_max value: 22.74462336067897 - type: nauc_map_at_1_std value: -16.108392412428454 - type: nauc_map_at_20_diff1 value: 53.1486265423326 - type: nauc_map_at_20_max value: 25.61664977000182 - type: nauc_map_at_20_std value: -8.439815052390303 - type: nauc_map_at_3_diff1 value: 52.076993516597994 - type: nauc_map_at_3_max value: 25.42590262106662 - type: nauc_map_at_3_std value: -9.278602622044712 - type: nauc_map_at_5_diff1 value: 52.4000583320808 - type: nauc_map_at_5_max value: 25.598725240878334 - type: nauc_map_at_5_std value: -9.133132016496823 - type: nauc_mrr_at_1000_diff1 value: 72.07226829644607 - type: nauc_mrr_at_1000_max value: 26.824687617477917 - type: nauc_mrr_at_1000_std value: -15.031219990840263 - type: nauc_mrr_at_100_diff1 value: 72.0722639914847 - type: nauc_mrr_at_100_max value: 26.827181857499184 - type: nauc_mrr_at_100_std value: -15.0272990581751 - type: nauc_mrr_at_10_diff1 value: 71.88186000146027 - type: nauc_mrr_at_10_max value: 26.866725052241648 - type: nauc_mrr_at_10_std value: -14.880144349709168 - type: nauc_mrr_at_1_diff1 value: 74.99261641490762 - type: nauc_mrr_at_1_max value: 24.577652209089802 - type: nauc_mrr_at_1_std value: -17.962989923113483 - type: nauc_mrr_at_20_diff1 value: 72.00705741047372 - type: nauc_mrr_at_20_max value: 26.845088961240588 - type: nauc_mrr_at_20_std value: -14.951590639028053 - type: nauc_mrr_at_3_diff1 value: 71.72981635442622 - type: nauc_mrr_at_3_max value: 27.60475378976304 - type: nauc_mrr_at_3_std value: -14.267663080088363 - type: nauc_mrr_at_5_diff1 value: 71.75159172925191 - type: nauc_mrr_at_5_max value: 27.18216122597638 - type: nauc_mrr_at_5_std value: -14.880763833075017 - type: nauc_ndcg_at_1000_diff1 value: 56.012708886338515 - type: nauc_ndcg_at_1000_max value: 26.6685187848308 - type: nauc_ndcg_at_1000_std value: -7.9204231247691 - type: nauc_ndcg_at_100_diff1 value: 55.07605611733334 - type: nauc_ndcg_at_100_max value: 26.66544320914918 - type: nauc_ndcg_at_100_std value: -7.3329739415918835 - type: nauc_ndcg_at_10_diff1 value: 52.38279200045294 - type: nauc_ndcg_at_10_max value: 26.158254031690486 - type: nauc_ndcg_at_10_std value: -6.917412962602917 - type: nauc_ndcg_at_1_diff1 value: 74.99261641490762 - type: nauc_ndcg_at_1_max value: 24.577652209089802 - type: nauc_ndcg_at_1_std value: -17.962989923113483 - type: nauc_ndcg_at_20_diff1 value: 53.55480506425911 - type: nauc_ndcg_at_20_max value: 26.44888968883975 - type: nauc_ndcg_at_20_std value: -6.689374217452845 - type: nauc_ndcg_at_3_diff1 value: 52.99829218384083 - type: nauc_ndcg_at_3_max value: 27.195877668865897 - type: nauc_ndcg_at_3_std value: -7.631243613468632 - type: nauc_ndcg_at_5_diff1 value: 52.093433279185454 - type: nauc_ndcg_at_5_max value: 26.875927407667096 - type: nauc_ndcg_at_5_std value: -7.672851999155562 - type: nauc_precision_at_1000_diff1 value: -14.088579730952224 - type: nauc_precision_at_1000_max value: -7.709742274489245 - type: nauc_precision_at_1000_std value: 10.896362369744665 - type: nauc_precision_at_100_diff1 value: -15.966065119243305 - type: nauc_precision_at_100_max value: -6.7544255700500875 - type: nauc_precision_at_100_std value: 14.006504085813082 - type: nauc_precision_at_10_diff1 value: -19.197833304284874 - type: nauc_precision_at_10_max value: -6.195782167441997 - type: nauc_precision_at_10_std value: 16.738029717682736 - type: nauc_precision_at_1_diff1 value: 74.99261641490762 - type: nauc_precision_at_1_max value: 24.577652209089802 - type: nauc_precision_at_1_std value: -17.962989923113483 - type: nauc_precision_at_20_diff1 value: -17.730920041303605 - type: nauc_precision_at_20_max value: -6.282642825602588 - type: nauc_precision_at_20_std value: 16.760589186930645 - type: nauc_precision_at_3_diff1 value: -12.82096325616898 - type: nauc_precision_at_3_max value: 0.5738252973481384 - type: nauc_precision_at_3_std value: 15.711283966445086 - type: nauc_precision_at_5_diff1 value: -18.160752260997064 - type: nauc_precision_at_5_max value: -3.2988677909840636 - type: nauc_precision_at_5_std value: 15.908059262820377 - type: nauc_recall_at_1000_diff1 value: -1.2226334843233062 - type: nauc_recall_at_1000_max value: 45.75344815857464 - type: nauc_recall_at_1000_std value: 49.9547310437849 - type: nauc_recall_at_100_diff1 value: 4.712867741103275 - type: nauc_recall_at_100_max value: 33.31506548135591 - type: nauc_recall_at_100_std value: 33.367671550361266 - type: nauc_recall_at_10_diff1 value: 13.139120724433717 - type: nauc_recall_at_10_max value: 26.526014011664007 - type: nauc_recall_at_10_std value: 15.180542333855318 - type: nauc_recall_at_1_diff1 value: 60.26354815835753 - type: nauc_recall_at_1_max value: 22.74462336067897 - type: nauc_recall_at_1_std value: -16.108392412428454 - type: nauc_recall_at_20_diff1 value: 11.605408151649485 - type: nauc_recall_at_20_max value: 28.818790457107845 - type: nauc_recall_at_20_std value: 23.189835498467282 - type: nauc_recall_at_3_diff1 value: 28.978376255351453 - type: nauc_recall_at_3_max value: 28.709312217507023 - type: nauc_recall_at_3_std value: 3.3468081584960694 - type: nauc_recall_at_5_diff1 value: 20.66333263411127 - type: nauc_recall_at_5_max value: 29.29110697161188 - type: nauc_recall_at_5_std value: 6.216755665132335 - type: ndcg_at_1 value: 88.479 - type: ndcg_at_10 value: 92.37100000000001 - type: ndcg_at_100 value: 92.914 - type: ndcg_at_1000 value: 93.053 - type: ndcg_at_20 value: 92.617 - type: ndcg_at_3 value: 91.281 - type: ndcg_at_5 value: 91.919 - type: precision_at_1 value: 88.479 - type: precision_at_10 value: 10.972 - type: precision_at_100 value: 1.149 - type: precision_at_1000 value: 0.11800000000000001 - type: precision_at_20 value: 5.584 - type: precision_at_3 value: 34.703 - type: precision_at_5 value: 21.41 - type: recall_at_1 value: 82.069 - type: recall_at_10 value: 96.75399999999999 - type: recall_at_100 value: 98.729 - type: recall_at_1000 value: 99.536 - type: recall_at_20 value: 97.512 - type: recall_at_3 value: 93.821 - type: recall_at_5 value: 95.486 - task: type: Retrieval dataset: name: MTEB FiQA2018 (default) type: mteb/fiqa config: default split: test revision: 27a168819829fe9bcd655c2df245fb19452e8e06 metrics: - type: main_score value: 60.42999999999999 - type: map_at_1 value: 30.766 - type: map_at_10 value: 52.068000000000005 - type: map_at_100 value: 54.2 - type: map_at_1000 value: 54.308 - type: map_at_20 value: 53.288999999999994 - type: map_at_3 value: 45.641 - type: map_at_5 value: 49.442 - type: mrr_at_1 value: 58.95061728395061 - type: mrr_at_10 value: 67.74832206545169 - type: mrr_at_100 value: 68.25571872540776 - type: mrr_at_1000 value: 68.26964825240877 - type: mrr_at_20 value: 68.03892810907128 - type: mrr_at_3 value: 65.94650205761315 - type: mrr_at_5 value: 66.9032921810699 - type: nauc_map_at_1000_diff1 value: 53.25138886960556 - type: nauc_map_at_1000_max value: 33.83714322021819 - type: nauc_map_at_1000_std value: -9.055870848544616 - type: nauc_map_at_100_diff1 value: 53.248601161572964 - type: nauc_map_at_100_max value: 33.78714143967792 - type: nauc_map_at_100_std value: -8.983994394402767 - type: nauc_map_at_10_diff1 value: 52.85062870865907 - type: nauc_map_at_10_max value: 32.25777960858311 - type: nauc_map_at_10_std value: -10.5817219579484 - type: nauc_map_at_1_diff1 value: 57.07597952448575 - type: nauc_map_at_1_max value: 22.47576453804885 - type: nauc_map_at_1_std value: -11.138477677983802 - type: nauc_map_at_20_diff1 value: 53.28560745224268 - type: nauc_map_at_20_max value: 33.31293437760806 - type: nauc_map_at_20_std value: -9.68022013019077 - type: nauc_map_at_3_diff1 value: 53.83817068427451 - type: nauc_map_at_3_max value: 28.226029979555946 - type: nauc_map_at_3_std value: -10.511596854053773 - type: nauc_map_at_5_diff1 value: 53.54419976556406 - type: nauc_map_at_5_max value: 30.884976067620812 - type: nauc_map_at_5_std value: -10.883740710308967 - type: nauc_mrr_at_1000_diff1 value: 63.23864183325618 - type: nauc_mrr_at_1000_max value: 42.06744031582232 - type: nauc_mrr_at_1000_std value: -4.348234033944706 - type: nauc_mrr_at_100_diff1 value: 63.23118000086403 - type: nauc_mrr_at_100_max value: 42.06993881716349 - type: nauc_mrr_at_100_std value: -4.326734696101004 - type: nauc_mrr_at_10_diff1 value: 63.21554979549312 - type: nauc_mrr_at_10_max value: 42.32026148394012 - type: nauc_mrr_at_10_std value: -4.329477060956577 - type: nauc_mrr_at_1_diff1 value: 66.19833424508124 - type: nauc_mrr_at_1_max value: 41.93477154197192 - type: nauc_mrr_at_1_std value: -5.585740476007292 - type: nauc_mrr_at_20_diff1 value: 63.180624307827124 - type: nauc_mrr_at_20_max value: 42.14306310699489 - type: nauc_mrr_at_20_std value: -4.2116656149704115 - type: nauc_mrr_at_3_diff1 value: 63.54697496826244 - type: nauc_mrr_at_3_max value: 42.098758868920015 - type: nauc_mrr_at_3_std value: -5.329540121219919 - type: nauc_mrr_at_5_diff1 value: 63.17114683933978 - type: nauc_mrr_at_5_max value: 41.89940589437386 - type: nauc_mrr_at_5_std value: -5.251542190078123 - type: nauc_ndcg_at_1000_diff1 value: 55.18103415433243 - type: nauc_ndcg_at_1000_max value: 37.91951492655493 - type: nauc_ndcg_at_1000_std value: -5.404512479926153 - type: nauc_ndcg_at_100_diff1 value: 55.19455786554701 - type: nauc_ndcg_at_100_max value: 37.590709019932476 - type: nauc_ndcg_at_100_std value: -3.8032018105475434 - type: nauc_ndcg_at_10_diff1 value: 54.279922825158465 - type: nauc_ndcg_at_10_max value: 34.81622507536537 - type: nauc_ndcg_at_10_std value: -7.999546114277306 - type: nauc_ndcg_at_1_diff1 value: 66.19833424508124 - type: nauc_ndcg_at_1_max value: 41.93477154197192 - type: nauc_ndcg_at_1_std value: -5.585740476007292 - type: nauc_ndcg_at_20_diff1 value: 54.80576412827867 - type: nauc_ndcg_at_20_max value: 36.17913890066836 - type: nauc_ndcg_at_20_std value: -6.12502064111656 - type: nauc_ndcg_at_3_diff1 value: 54.2817506224585 - type: nauc_ndcg_at_3_max value: 37.06639697981944 - type: nauc_ndcg_at_3_std value: -6.891280077636147 - type: nauc_ndcg_at_5_diff1 value: 54.571252643462145 - type: nauc_ndcg_at_5_max value: 35.69460683404712 - type: nauc_ndcg_at_5_std value: -9.036434403536218 - type: nauc_precision_at_1000_diff1 value: -13.500194207099536 - type: nauc_precision_at_1000_max value: 18.740652839335294 - type: nauc_precision_at_1000_std value: 9.072398289027925 - type: nauc_precision_at_100_diff1 value: -7.592976565996694 - type: nauc_precision_at_100_max value: 23.613915598372913 - type: nauc_precision_at_100_std value: 13.90071301009494 - type: nauc_precision_at_10_diff1 value: 7.365592183215444 - type: nauc_precision_at_10_max value: 29.800185256342587 - type: nauc_precision_at_10_std value: 3.744510273082381 - type: nauc_precision_at_1_diff1 value: 66.19833424508124 - type: nauc_precision_at_1_max value: 41.93477154197192 - type: nauc_precision_at_1_std value: -5.585740476007292 - type: nauc_precision_at_20_diff1 value: 2.9850912031223027 - type: nauc_precision_at_20_max value: 28.444251916249858 - type: nauc_precision_at_20_std value: 8.710112231041764 - type: nauc_precision_at_3_diff1 value: 26.468793636735395 - type: nauc_precision_at_3_max value: 34.12528658732306 - type: nauc_precision_at_3_std value: 1.0607476235257753 - type: nauc_precision_at_5_diff1 value: 17.428849864167322 - type: nauc_precision_at_5_max value: 32.54536063105265 - type: nauc_precision_at_5_std value: 0.5060864305275099 - type: nauc_recall_at_1000_diff1 value: 12.950063661735578 - type: nauc_recall_at_1000_max value: 47.36046143913833 - type: nauc_recall_at_1000_std value: 49.79519393795783 - type: nauc_recall_at_100_diff1 value: 39.55288657109844 - type: nauc_recall_at_100_max value: 31.082583029607243 - type: nauc_recall_at_100_std value: 28.12812468144137 - type: nauc_recall_at_10_diff1 value: 43.38913585480907 - type: nauc_recall_at_10_max value: 25.76872144337921 - type: nauc_recall_at_10_std value: -6.201327950535028 - type: nauc_recall_at_1_diff1 value: 57.07597952448575 - type: nauc_recall_at_1_max value: 22.47576453804885 - type: nauc_recall_at_1_std value: -11.138477677983802 - type: nauc_recall_at_20_diff1 value: 41.98286618167725 - type: nauc_recall_at_20_max value: 28.781076750132904 - type: nauc_recall_at_20_std value: 1.541293472651189 - type: nauc_recall_at_3_diff1 value: 47.23473958532499 - type: nauc_recall_at_3_max value: 24.40930398363932 - type: nauc_recall_at_3_std value: -8.745405558227192 - type: nauc_recall_at_5_diff1 value: 46.34649884272527 - type: nauc_recall_at_5_max value: 25.46104522262028 - type: nauc_recall_at_5_std value: -9.245514565573428 - type: ndcg_at_1 value: 58.951 - type: ndcg_at_10 value: 60.42999999999999 - type: ndcg_at_100 value: 66.648 - type: ndcg_at_1000 value: 68.122 - type: ndcg_at_20 value: 63.037 - type: ndcg_at_3 value: 56.279 - type: ndcg_at_5 value: 57.75 - type: precision_at_1 value: 58.951 - type: precision_at_10 value: 16.543 - type: precision_at_100 value: 2.318 - type: precision_at_1000 value: 0.259 - type: precision_at_20 value: 9.46 - type: precision_at_3 value: 37.551 - type: precision_at_5 value: 27.468999999999998 - type: recall_at_1 value: 30.766 - type: recall_at_10 value: 67.881 - type: recall_at_100 value: 89.97 - type: recall_at_1000 value: 98.42699999999999 - type: recall_at_20 value: 75.8 - type: recall_at_3 value: 51.664 - type: recall_at_5 value: 59.146 - task: type: Retrieval dataset: name: MTEB HotpotQA (default) type: mteb/hotpotqa config: default split: test revision: ab518f4d6fcca38d87c25209f94beba119d02014 metrics: - type: main_score value: 85.06899999999999 - type: map_at_1 value: 44.646 - type: map_at_10 value: 79.423 - type: map_at_100 value: 80.023 - type: map_at_1000 value: 80.05 - type: map_at_20 value: 79.815 - type: map_at_3 value: 76.40400000000001 - type: map_at_5 value: 78.40700000000001 - type: mrr_at_1 value: 89.29101958136394 - type: mrr_at_10 value: 92.83030020470923 - type: mrr_at_100 value: 92.89619922624517 - type: mrr_at_1000 value: 92.8982900364215 - type: mrr_at_20 value: 92.87809305530763 - type: mrr_at_3 value: 92.4217870808011 - type: mrr_at_5 value: 92.69660139545331 - type: nauc_map_at_1000_diff1 value: 11.476753852342698 - type: nauc_map_at_1000_max value: 37.1657630288012 - type: nauc_map_at_1000_std value: -3.7230839618420775 - type: nauc_map_at_100_diff1 value: 11.45987221301917 - type: nauc_map_at_100_max value: 37.17835533633194 - type: nauc_map_at_100_std value: -3.6753524305543226 - type: nauc_map_at_10_diff1 value: 11.121152245183044 - type: nauc_map_at_10_max value: 37.15389218586682 - type: nauc_map_at_10_std value: -3.974058008911142 - type: nauc_map_at_1_diff1 value: 69.17067611683119 - type: nauc_map_at_1_max value: 54.248581019963346 - type: nauc_map_at_1_std value: -17.406304778196827 - type: nauc_map_at_20_diff1 value: 11.366094255795083 - type: nauc_map_at_20_max value: 37.16465497648482 - type: nauc_map_at_20_std value: -3.743886201207907 - type: nauc_map_at_3_diff1 value: 8.771079390164775 - type: nauc_map_at_3_max value: 34.45003060836015 - type: nauc_map_at_3_std value: -6.387997670782977 - type: nauc_map_at_5_diff1 value: 10.444404966316284 - type: nauc_map_at_5_max value: 36.24043817560293 - type: nauc_map_at_5_std value: -5.290879700364108 - type: nauc_mrr_at_1000_diff1 value: 69.31182134007688 - type: nauc_mrr_at_1000_max value: 58.10794883688435 - type: nauc_mrr_at_1000_std value: -14.437763262320157 - type: nauc_mrr_at_100_diff1 value: 69.31321132810844 - type: nauc_mrr_at_100_max value: 58.11528031674926 - type: nauc_mrr_at_100_std value: -14.418418407245253 - type: nauc_mrr_at_10_diff1 value: 69.33058951672848 - type: nauc_mrr_at_10_max value: 58.23233090153042 - type: nauc_mrr_at_10_std value: -14.459635862635956 - type: nauc_mrr_at_1_diff1 value: 69.17067611683119 - type: nauc_mrr_at_1_max value: 54.248581019963346 - type: nauc_mrr_at_1_std value: -17.406304778196827 - type: nauc_mrr_at_20_diff1 value: 69.31865474824586 - type: nauc_mrr_at_20_max value: 58.18045383095476 - type: nauc_mrr_at_20_std value: -14.424502166417044 - type: nauc_mrr_at_3_diff1 value: 69.34758038136334 - type: nauc_mrr_at_3_max value: 58.686180523896546 - type: nauc_mrr_at_3_std value: -13.927532406967845 - type: nauc_mrr_at_5_diff1 value: 69.36483170710724 - type: nauc_mrr_at_5_max value: 58.484546807832686 - type: nauc_mrr_at_5_std value: -14.302123574689945 - type: nauc_ndcg_at_1000_diff1 value: 19.316858213200383 - type: nauc_ndcg_at_1000_max value: 41.65496813740413 - type: nauc_ndcg_at_1000_std value: -1.8237712154543186 - type: nauc_ndcg_at_100_diff1 value: 18.835417436212417 - type: nauc_ndcg_at_100_max value: 42.003405491546374 - type: nauc_ndcg_at_100_std value: -0.5039819051709964 - type: nauc_ndcg_at_10_diff1 value: 17.10161831308711 - type: nauc_ndcg_at_10_max value: 41.65636177933502 - type: nauc_ndcg_at_10_std value: -1.8940003609702956 - type: nauc_ndcg_at_1_diff1 value: 69.17067611683119 - type: nauc_ndcg_at_1_max value: 54.248581019963346 - type: nauc_ndcg_at_1_std value: -17.406304778196827 - type: nauc_ndcg_at_20_diff1 value: 17.919100102088148 - type: nauc_ndcg_at_20_max value: 41.83808380933462 - type: nauc_ndcg_at_20_std value: -0.9834118681730851 - type: nauc_ndcg_at_3_diff1 value: 13.580700957247146 - type: nauc_ndcg_at_3_max value: 37.427835255465794 - type: nauc_ndcg_at_3_std value: -6.036599818188543 - type: nauc_ndcg_at_5_diff1 value: 15.7394452739053 - type: nauc_ndcg_at_5_max value: 39.831631744584726 - type: nauc_ndcg_at_5_std value: -4.565403203776222 - type: nauc_precision_at_1000_diff1 value: 9.235701933899772 - type: nauc_precision_at_1000_max value: 57.686852406131074 - type: nauc_precision_at_1000_std value: 62.58999953276888 - type: nauc_precision_at_100_diff1 value: 7.6147702038230065 - type: nauc_precision_at_100_max value: 50.50811678765654 - type: nauc_precision_at_100_std value: 40.352220780618 - type: nauc_precision_at_10_diff1 value: 6.150235928002104 - type: nauc_precision_at_10_max value: 43.58417791580419 - type: nauc_precision_at_10_std value: 10.657139747169161 - type: nauc_precision_at_1_diff1 value: 69.17067611683119 - type: nauc_precision_at_1_max value: 54.248581019963346 - type: nauc_precision_at_1_std value: -17.406304778196827 - type: nauc_precision_at_20_diff1 value: 6.702558645159402 - type: nauc_precision_at_20_max value: 45.473374190530286 - type: nauc_precision_at_20_std value: 18.88950984539904 - type: nauc_precision_at_3_diff1 value: 3.3627793700917166 - type: nauc_precision_at_3_max value: 35.118907155393146 - type: nauc_precision_at_3_std value: -2.6939749063973712 - type: nauc_precision_at_5_diff1 value: 5.219236477058579 - type: nauc_precision_at_5_max value: 38.8780249665403 - type: nauc_precision_at_5_std value: 0.9525242312426645 - type: nauc_recall_at_1000_diff1 value: 9.235701933900325 - type: nauc_recall_at_1000_max value: 57.68685240613232 - type: nauc_recall_at_1000_std value: 62.58999953276827 - type: nauc_recall_at_100_diff1 value: 7.614770203822959 - type: nauc_recall_at_100_max value: 50.50811678765649 - type: nauc_recall_at_100_std value: 40.35222078061809 - type: nauc_recall_at_10_diff1 value: 6.1502359280022505 - type: nauc_recall_at_10_max value: 43.58417791580417 - type: nauc_recall_at_10_std value: 10.65713974716921 - type: nauc_recall_at_1_diff1 value: 69.17067611683119 - type: nauc_recall_at_1_max value: 54.248581019963346 - type: nauc_recall_at_1_std value: -17.406304778196827 - type: nauc_recall_at_20_diff1 value: 6.70255864515986 - type: nauc_recall_at_20_max value: 45.473374190530464 - type: nauc_recall_at_20_std value: 18.889509845399168 - type: nauc_recall_at_3_diff1 value: 3.3627793700916224 - type: nauc_recall_at_3_max value: 35.11890715539309 - type: nauc_recall_at_3_std value: -2.6939749063974934 - type: nauc_recall_at_5_diff1 value: 5.219236477058641 - type: nauc_recall_at_5_max value: 38.878024966540394 - type: nauc_recall_at_5_std value: 0.9525242312426386 - type: ndcg_at_1 value: 89.291 - type: ndcg_at_10 value: 85.06899999999999 - type: ndcg_at_100 value: 86.92800000000001 - type: ndcg_at_1000 value: 87.396 - type: ndcg_at_20 value: 85.98400000000001 - type: ndcg_at_3 value: 81.142 - type: ndcg_at_5 value: 83.482 - type: precision_at_1 value: 89.291 - type: precision_at_10 value: 17.721999999999998 - type: precision_at_100 value: 1.913 - type: precision_at_1000 value: 0.197 - type: precision_at_20 value: 9.154 - type: precision_at_3 value: 53.374 - type: precision_at_5 value: 33.858 - type: recall_at_1 value: 44.646 - type: recall_at_10 value: 88.60900000000001 - type: recall_at_100 value: 95.652 - type: recall_at_1000 value: 98.677 - type: recall_at_20 value: 91.53999999999999 - type: recall_at_3 value: 80.061 - type: recall_at_5 value: 84.646 - task: type: Classification dataset: name: MTEB ImdbClassification (default) type: mteb/imdb config: default split: test revision: 3d86128a09e091d6018b6d26cad27f2739fc2db7 metrics: - type: accuracy value: 97.116 - type: ap value: 95.61597246771862 - type: ap_weighted value: 95.61597246771862 - type: f1 value: 97.11581660865501 - type: f1_weighted value: 97.11581660865501 - type: main_score value: 97.116 - task: type: Retrieval dataset: name: MTEB MSMARCO (default) type: mteb/msmarco config: default split: dev revision: c5a29a104738b98a9e76336939199e264163d4a0 metrics: - type: main_score value: 46.953 - type: map_at_1 value: 25.959 - type: map_at_10 value: 39.51 - type: map_at_100 value: 40.609 - type: map_at_1000 value: 40.644999999999996 - type: map_at_20 value: 40.224 - type: map_at_3 value: 35.392 - type: map_at_5 value: 37.766 - type: mrr_at_1 value: 26.690544412607448 - type: mrr_at_10 value: 40.106966616637266 - type: mrr_at_100 value: 41.13755847370217 - type: mrr_at_1000 value: 41.16777847481098 - type: mrr_at_20 value: 40.783076766593986 - type: mrr_at_3 value: 36.11270296084052 - type: mrr_at_5 value: 38.40926456542512 - type: nauc_map_at_1000_diff1 value: 39.6710067056517 - type: nauc_map_at_1000_max value: 10.87483897704713 - type: nauc_map_at_1000_std value: -20.730869459512995 - type: nauc_map_at_100_diff1 value: 39.66563776358951 - type: nauc_map_at_100_max value: 10.886783544343782 - type: nauc_map_at_100_std value: -20.696514188474595 - type: nauc_map_at_10_diff1 value: 39.54848621356586 - type: nauc_map_at_10_max value: 10.782501977081461 - type: nauc_map_at_10_std value: -21.339251906153176 - type: nauc_map_at_1_diff1 value: 42.46125655190777 - type: nauc_map_at_1_max value: 9.516075109194649 - type: nauc_map_at_1_std value: -19.320865814866934 - type: nauc_map_at_20_diff1 value: 39.63763950480564 - type: nauc_map_at_20_max value: 10.908897979009476 - type: nauc_map_at_20_std value: -20.809764811321074 - type: nauc_map_at_3_diff1 value: 39.624950980846016 - type: nauc_map_at_3_max value: 10.144965056588857 - type: nauc_map_at_3_std value: -21.70567699834146 - type: nauc_map_at_5_diff1 value: 39.493819680266576 - type: nauc_map_at_5_max value: 10.543659965042384 - type: nauc_map_at_5_std value: -21.9436321207301 - type: nauc_mrr_at_1000_diff1 value: 39.555522822191925 - type: nauc_mrr_at_1000_max value: 10.882072789273344 - type: nauc_mrr_at_1000_std value: -20.367806652930685 - type: nauc_mrr_at_100_diff1 value: 39.551190743623195 - type: nauc_mrr_at_100_max value: 10.894696967303437 - type: nauc_mrr_at_100_std value: -20.33697245843275 - type: nauc_mrr_at_10_diff1 value: 39.4294463675503 - type: nauc_mrr_at_10_max value: 10.836505973867053 - type: nauc_mrr_at_10_std value: -20.905185948930928 - type: nauc_mrr_at_1_diff1 value: 42.34067329761878 - type: nauc_mrr_at_1_max value: 9.456565176636124 - type: nauc_mrr_at_1_std value: -19.185583377889582 - type: nauc_mrr_at_20_diff1 value: 39.51157053032385 - type: nauc_mrr_at_20_max value: 10.941992137373491 - type: nauc_mrr_at_20_std value: -20.39093359912575 - type: nauc_mrr_at_3_diff1 value: 39.461678432819255 - type: nauc_mrr_at_3_max value: 10.068035799968815 - type: nauc_mrr_at_3_std value: -21.453050719235225 - type: nauc_mrr_at_5_diff1 value: 39.37721854572811 - type: nauc_mrr_at_5_max value: 10.605120811071991 - type: nauc_mrr_at_5_std value: -21.497967828146017 - type: nauc_ndcg_at_1000_diff1 value: 39.09202192742238 - type: nauc_ndcg_at_1000_max value: 11.761638529928815 - type: nauc_ndcg_at_1000_std value: -19.444831289565442 - type: nauc_ndcg_at_100_diff1 value: 38.97152012838735 - type: nauc_ndcg_at_100_max value: 12.214050153970273 - type: nauc_ndcg_at_100_std value: -18.26732665014131 - type: nauc_ndcg_at_10_diff1 value: 38.52063032636739 - type: nauc_ndcg_at_10_max value: 11.849852212561581 - type: nauc_ndcg_at_10_std value: -21.097994229230267 - type: nauc_ndcg_at_1_diff1 value: 42.34067329761878 - type: nauc_ndcg_at_1_max value: 9.456565176636124 - type: nauc_ndcg_at_1_std value: -19.185583377889582 - type: nauc_ndcg_at_20_diff1 value: 38.767174897150305 - type: nauc_ndcg_at_20_max value: 12.40151859998878 - type: nauc_ndcg_at_20_std value: -19.035740590846835 - type: nauc_ndcg_at_3_diff1 value: 38.71388245401873 - type: nauc_ndcg_at_3_max value: 10.312874860273876 - type: nauc_ndcg_at_3_std value: -22.27404790838238 - type: nauc_ndcg_at_5_diff1 value: 38.492038959591866 - type: nauc_ndcg_at_5_max value: 11.149342404425768 - type: nauc_ndcg_at_5_std value: -22.61234546512237 - type: nauc_precision_at_1000_diff1 value: -5.486841447788213 - type: nauc_precision_at_1000_max value: 6.615718354544881 - type: nauc_precision_at_1000_std value: 10.04243842006635 - type: nauc_precision_at_100_diff1 value: 11.232939816771065 - type: nauc_precision_at_100_max value: 17.67576270524247 - type: nauc_precision_at_100_std value: 19.047573617399472 - type: nauc_precision_at_10_diff1 value: 30.676072013454835 - type: nauc_precision_at_10_max value: 14.75809562209961 - type: nauc_precision_at_10_std value: -17.50476619193468 - type: nauc_precision_at_1_diff1 value: 42.34067329761878 - type: nauc_precision_at_1_max value: 9.456565176636124 - type: nauc_precision_at_1_std value: -19.185583377889582 - type: nauc_precision_at_20_diff1 value: 26.920657296632438 - type: nauc_precision_at_20_max value: 18.094903531906745 - type: nauc_precision_at_20_std value: -4.18147102678863 - type: nauc_precision_at_3_diff1 value: 35.519174964589084 - type: nauc_precision_at_3_max value: 10.714660070402955 - type: nauc_precision_at_3_std value: -23.38311934318837 - type: nauc_precision_at_5_diff1 value: 33.433375991740064 - type: nauc_precision_at_5_max value: 12.530653255416915 - type: nauc_precision_at_5_std value: -23.477121059826484 - type: nauc_recall_at_1000_diff1 value: 15.038131020872218 - type: nauc_recall_at_1000_max value: 52.333218716630604 - type: nauc_recall_at_1000_std value: 70.13549930950583 - type: nauc_recall_at_100_diff1 value: 32.31373486930844 - type: nauc_recall_at_100_max value: 30.66821001579242 - type: nauc_recall_at_100_std value: 27.438512640941344 - type: nauc_recall_at_10_diff1 value: 34.46014073487322 - type: nauc_recall_at_10_max value: 16.008148715322253 - type: nauc_recall_at_10_std value: -19.644412273123198 - type: nauc_recall_at_1_diff1 value: 42.46125655190777 - type: nauc_recall_at_1_max value: 9.516075109194649 - type: nauc_recall_at_1_std value: -19.320865814866934 - type: nauc_recall_at_20_diff1 value: 34.50974688479106 - type: nauc_recall_at_20_max value: 20.71543588804981 - type: nauc_recall_at_20_std value: -7.0665364765778085 - type: nauc_recall_at_3_diff1 value: 36.00358545418893 - type: nauc_recall_at_3_max value: 10.757101630957122 - type: nauc_recall_at_3_std value: -23.925615346200278 - type: nauc_recall_at_5_diff1 value: 35.27203980660536 - type: nauc_recall_at_5_max value: 12.885225206795074 - type: nauc_recall_at_5_std value: -24.826504608491927 - type: ndcg_at_1 value: 26.691 - type: ndcg_at_10 value: 46.953 - type: ndcg_at_100 value: 52.064 - type: ndcg_at_1000 value: 52.884 - type: ndcg_at_20 value: 49.453 - type: ndcg_at_3 value: 38.635000000000005 - type: ndcg_at_5 value: 42.845 - type: precision_at_1 value: 26.691 - type: precision_at_10 value: 7.3069999999999995 - type: precision_at_100 value: 0.985 - type: precision_at_1000 value: 0.106 - type: precision_at_20 value: 4.1739999999999995 - type: precision_at_3 value: 16.366 - type: precision_at_5 value: 11.968 - type: recall_at_1 value: 25.959 - type: recall_at_10 value: 69.827 - type: recall_at_100 value: 93.106 - type: recall_at_1000 value: 99.202 - type: recall_at_20 value: 79.47800000000001 - type: recall_at_3 value: 47.291 - type: recall_at_5 value: 57.410000000000004 - task: type: Classification dataset: name: MTEB MTOPDomainClassification (en) type: mteb/mtop_domain config: en split: test revision: d80d48c1eb48d3562165c59d59d0034df9fff0bf metrics: - type: accuracy value: 97.4874601003192 - type: f1 value: 97.36031471758784 - type: f1_weighted value: 97.49998375560376 - type: main_score value: 97.4874601003192 - task: type: Classification dataset: name: MTEB MTOPIntentClassification (en) type: mteb/mtop_intent config: en split: test revision: ae001d0e6b1228650b7bd1c2c65fb50ad11a8aba metrics: - type: accuracy value: 92.5923392612859 - type: f1 value: 74.64775105973234 - type: f1_weighted value: 93.15766481161462 - type: main_score value: 92.5923392612859 - task: type: Classification dataset: name: MTEB MassiveIntentClassification (en) type: mteb/amazon_massive_intent config: en split: test revision: 4672e20407010da34463acc759c162ca9734bca6 metrics: - type: accuracy value: 79.64694014794888 - type: f1 value: 78.00188559408035 - type: f1_weighted value: 78.42211161866344 - type: main_score value: 79.64694014794888 - task: type: Classification dataset: name: MTEB MassiveScenarioClassification (en) type: mteb/amazon_massive_scenario config: en split: test revision: fad2c6e8459f9e1c45d9315f4953d921437d70f8 metrics: - type: accuracy value: 81.97377269670477 - type: f1 value: 81.03593934499202 - type: f1_weighted value: 81.75486881920237 - type: main_score value: 81.97377269670477 - task: type: Clustering dataset: name: MTEB MedrxivClusteringP2P (default) type: mteb/medrxiv-clustering-p2p config: default split: test revision: e7a26af6f3ae46b30dde8737f02c07b1505bcc73 metrics: - type: main_score value: 46.35330699948092 - type: v_measure value: 46.35330699948092 - type: v_measure_std value: 1.206330851326003 - task: type: Clustering dataset: name: MTEB MedrxivClusteringS2S (default) type: mteb/medrxiv-clustering-s2s config: default split: test revision: 35191c8c0dca72d8ff3efcd72aa802307d469663 metrics: - type: main_score value: 44.544395689448606 - type: v_measure value: 44.544395689448606 - type: v_measure_std value: 1.4379792647567593 - task: type: Reranking dataset: name: MTEB MindSmallReranking (default) type: mteb/mind_small config: default split: test revision: 59042f120c80e8afa9cdbb224f67076cec0fc9a7 metrics: - type: main_score value: 31.915803712485374 - type: map value: 31.915803712485374 - type: mrr value: 33.09027679883215 - type: nAUC_map_diff1 value: 13.456673452431945 - type: nAUC_map_max value: -23.651825938757067 - type: nAUC_map_std value: 0.3375407219503167 - type: nAUC_mrr_diff1 value: 12.463118803762608 - type: nAUC_mrr_max value: -18.09342354713543 - type: nAUC_mrr_std value: 1.9293270887518033 - task: type: Retrieval dataset: name: MTEB NFCorpus (default) type: mteb/nfcorpus config: default split: test revision: ec0fa4fe99da2ff19ca1214b7966684033a58814 metrics: - type: main_score value: 41.637 - type: map_at_1 value: 7.53 - type: map_at_10 value: 16.336000000000002 - type: map_at_100 value: 20.443 - type: map_at_1000 value: 22.101000000000003 - type: map_at_20 value: 17.980999999999998 - type: map_at_3 value: 12.195 - type: map_at_5 value: 14.08 - type: mrr_at_1 value: 55.72755417956656 - type: mrr_at_10 value: 63.73077301095875 - type: mrr_at_100 value: 64.18980496826816 - type: mrr_at_1000 value: 64.21556725608919 - type: mrr_at_20 value: 63.982696837775464 - type: mrr_at_3 value: 61.764705882352956 - type: mrr_at_5 value: 62.9876160990712 - type: nauc_map_at_1000_diff1 value: 25.27684971161249 - type: nauc_map_at_1000_max value: 25.971058540375907 - type: nauc_map_at_1000_std value: 11.417157393562036 - type: nauc_map_at_100_diff1 value: 26.90424278756609 - type: nauc_map_at_100_max value: 25.92415379799161 - type: nauc_map_at_100_std value: 8.467096693052966 - type: nauc_map_at_10_diff1 value: 30.5684894966239 - type: nauc_map_at_10_max value: 20.655144100299378 - type: nauc_map_at_10_std value: -3.294957382654768 - type: nauc_map_at_1_diff1 value: 38.20412667064694 - type: nauc_map_at_1_max value: 6.517051875945019 - type: nauc_map_at_1_std value: -18.92343889282965 - type: nauc_map_at_20_diff1 value: 28.816861447370396 - type: nauc_map_at_20_max value: 23.253143352754986 - type: nauc_map_at_20_std value: 1.5352481409482799 - type: nauc_map_at_3_diff1 value: 34.120468697280565 - type: nauc_map_at_3_max value: 11.9666393349414 - type: nauc_map_at_3_std value: -12.952508225591602 - type: nauc_map_at_5_diff1 value: 32.62576277583343 - type: nauc_map_at_5_max value: 16.473606487319906 - type: nauc_map_at_5_std value: -9.576801793019232 - type: nauc_mrr_at_1000_diff1 value: 28.448466196490195 - type: nauc_mrr_at_1000_max value: 43.81412526376393 - type: nauc_mrr_at_1000_std value: 28.42843028964146 - type: nauc_mrr_at_100_diff1 value: 28.460224532667095 - type: nauc_mrr_at_100_max value: 43.84593594974866 - type: nauc_mrr_at_100_std value: 28.46912827253933 - type: nauc_mrr_at_10_diff1 value: 28.690985631444615 - type: nauc_mrr_at_10_max value: 43.974770782378386 - type: nauc_mrr_at_10_std value: 28.51956491816473 - type: nauc_mrr_at_1_diff1 value: 27.3086209200488 - type: nauc_mrr_at_1_max value: 38.07303248469795 - type: nauc_mrr_at_1_std value: 20.709240613906527 - type: nauc_mrr_at_20_diff1 value: 28.552065964998235 - type: nauc_mrr_at_20_max value: 43.744458103169315 - type: nauc_mrr_at_20_std value: 28.402506194184905 - type: nauc_mrr_at_3_diff1 value: 27.730403059259608 - type: nauc_mrr_at_3_max value: 42.20585003585133 - type: nauc_mrr_at_3_std value: 26.718571419601084 - type: nauc_mrr_at_5_diff1 value: 29.33310055666175 - type: nauc_mrr_at_5_max value: 44.111733784327164 - type: nauc_mrr_at_5_std value: 28.29506653590657 - type: nauc_ndcg_at_1000_diff1 value: 21.26014183072658 - type: nauc_ndcg_at_1000_max value: 41.195088206883675 - type: nauc_ndcg_at_1000_std value: 30.278135548842954 - type: nauc_ndcg_at_100_diff1 value: 22.69557663224945 - type: nauc_ndcg_at_100_max value: 35.60418790750368 - type: nauc_ndcg_at_100_std value: 24.941317286797968 - type: nauc_ndcg_at_10_diff1 value: 20.20455534684691 - type: nauc_ndcg_at_10_max value: 34.770019783887086 - type: nauc_ndcg_at_10_std value: 25.044817711794632 - type: nauc_ndcg_at_1_diff1 value: 27.831501669232427 - type: nauc_ndcg_at_1_max value: 35.02366104222839 - type: nauc_ndcg_at_1_std value: 18.878543560031463 - type: nauc_ndcg_at_20_diff1 value: 19.95892426656778 - type: nauc_ndcg_at_20_max value: 33.56557032663233 - type: nauc_ndcg_at_20_std value: 24.58541457944349 - type: nauc_ndcg_at_3_diff1 value: 19.483573161365637 - type: nauc_ndcg_at_3_max value: 33.974627645090656 - type: nauc_ndcg_at_3_std value: 22.70199646954241 - type: nauc_ndcg_at_5_diff1 value: 20.12828926706299 - type: nauc_ndcg_at_5_max value: 35.99555106126075 - type: nauc_ndcg_at_5_std value: 23.96895850122589 - type: nauc_precision_at_1000_diff1 value: -16.228443380387436 - type: nauc_precision_at_1000_max value: -3.467317554835451 - type: nauc_precision_at_1000_std value: 28.504803419031216 - type: nauc_precision_at_100_diff1 value: -11.262295635836242 - type: nauc_precision_at_100_max value: 12.097064077938553 - type: nauc_precision_at_100_std value: 41.28475878388585 - type: nauc_precision_at_10_diff1 value: 0.7650314640190137 - type: nauc_precision_at_10_max value: 33.9585164733529 - type: nauc_precision_at_10_std value: 40.61329518683339 - type: nauc_precision_at_1_diff1 value: 27.3086209200488 - type: nauc_precision_at_1_max value: 38.07303248469795 - type: nauc_precision_at_1_std value: 20.709240613906527 - type: nauc_precision_at_20_diff1 value: -4.558564624271216 - type: nauc_precision_at_20_max value: 27.13856332906664 - type: nauc_precision_at_20_std value: 42.12140274064272 - type: nauc_precision_at_3_diff1 value: 9.588685288153918 - type: nauc_precision_at_3_max value: 35.29163422596201 - type: nauc_precision_at_3_std value: 29.995548006354767 - type: nauc_precision_at_5_diff1 value: 6.038684642831916 - type: nauc_precision_at_5_max value: 36.99198222019991 - type: nauc_precision_at_5_std value: 34.521935782921574 - type: nauc_recall_at_1000_diff1 value: 0.32576663148125407 - type: nauc_recall_at_1000_max value: 15.490494023869328 - type: nauc_recall_at_1000_std value: 11.569907814538599 - type: nauc_recall_at_100_diff1 value: 17.776482055874833 - type: nauc_recall_at_100_max value: 23.55537598931242 - type: nauc_recall_at_100_std value: 13.631439042523663 - type: nauc_recall_at_10_diff1 value: 27.501346681795606 - type: nauc_recall_at_10_max value: 20.1455575998851 - type: nauc_recall_at_10_std value: -2.442745213398243 - type: nauc_recall_at_1_diff1 value: 38.20412667064694 - type: nauc_recall_at_1_max value: 6.517051875945019 - type: nauc_recall_at_1_std value: -18.92343889282965 - type: nauc_recall_at_20_diff1 value: 22.327269466837716 - type: nauc_recall_at_20_max value: 20.64376748172875 - type: nauc_recall_at_20_std value: 1.935128530794907 - type: nauc_recall_at_3_diff1 value: 33.024923789813734 - type: nauc_recall_at_3_max value: 12.533910144337563 - type: nauc_recall_at_3_std value: -11.842839534832267 - type: nauc_recall_at_5_diff1 value: 30.470552340593475 - type: nauc_recall_at_5_max value: 16.84504072062648 - type: nauc_recall_at_5_std value: -9.11188909013892 - type: ndcg_at_1 value: 53.715 - type: ndcg_at_10 value: 41.637 - type: ndcg_at_100 value: 37.804 - type: ndcg_at_1000 value: 46.601 - type: ndcg_at_20 value: 38.717 - type: ndcg_at_3 value: 48.449999999999996 - type: ndcg_at_5 value: 45.457 - type: precision_at_1 value: 55.728 - type: precision_at_10 value: 30.248 - type: precision_at_100 value: 9.241000000000001 - type: precision_at_1000 value: 2.249 - type: precision_at_20 value: 21.873 - type: precision_at_3 value: 44.995000000000005 - type: precision_at_5 value: 38.638 - type: recall_at_1 value: 7.53 - type: recall_at_10 value: 20.596 - type: recall_at_100 value: 37.551 - type: recall_at_1000 value: 69.704 - type: recall_at_20 value: 24.898 - type: recall_at_3 value: 13.142999999999999 - type: recall_at_5 value: 16.273 - task: type: Retrieval dataset: name: MTEB NQ (default) type: mteb/nq config: default split: test revision: b774495ed302d8c44a3a7ea25c90dbce03968f31 metrics: - type: main_score value: 73.126 - type: map_at_1 value: 50.007000000000005 - type: map_at_10 value: 66.554 - type: map_at_100 value: 67.109 - type: map_at_1000 value: 67.116 - type: map_at_20 value: 66.971 - type: map_at_3 value: 63.007999999999996 - type: map_at_5 value: 65.265 - type: mrr_at_1 value: 55.64889918887601 - type: mrr_at_10 value: 68.83588303996768 - type: mrr_at_100 value: 69.1872068620609 - type: mrr_at_1000 value: 69.19179569860044 - type: mrr_at_20 value: 69.10463699451512 - type: mrr_at_3 value: 66.53147933565081 - type: mrr_at_5 value: 68.02481653147939 - type: nauc_map_at_1000_diff1 value: 47.18669570613092 - type: nauc_map_at_1000_max value: 18.68424064265635 - type: nauc_map_at_1000_std value: -11.863286002547126 - type: nauc_map_at_100_diff1 value: 47.18836757991195 - type: nauc_map_at_100_max value: 18.69174474288196 - type: nauc_map_at_100_std value: -11.856291584521376 - type: nauc_map_at_10_diff1 value: 47.039109334504936 - type: nauc_map_at_10_max value: 18.63000255416953 - type: nauc_map_at_10_std value: -12.078537835276322 - type: nauc_map_at_1_diff1 value: 50.754376398540025 - type: nauc_map_at_1_max value: 14.414068509351408 - type: nauc_map_at_1_std value: -12.056006802164365 - type: nauc_map_at_20_diff1 value: 47.165869662723814 - type: nauc_map_at_20_max value: 18.679726182048565 - type: nauc_map_at_20_std value: -11.882813446994122 - type: nauc_map_at_3_diff1 value: 47.17614567484428 - type: nauc_map_at_3_max value: 17.463092926606926 - type: nauc_map_at_3_std value: -13.948902704214555 - type: nauc_map_at_5_diff1 value: 46.72801881916766 - type: nauc_map_at_5_max value: 18.449893630335772 - type: nauc_map_at_5_std value: -12.841742668606646 - type: nauc_mrr_at_1000_diff1 value: 47.21047474253297 - type: nauc_mrr_at_1000_max value: 20.210274696727364 - type: nauc_mrr_at_1000_std value: -9.265366681393246 - type: nauc_mrr_at_100_diff1 value: 47.21127150996105 - type: nauc_mrr_at_100_max value: 20.214126957060877 - type: nauc_mrr_at_100_std value: -9.261994363035797 - type: nauc_mrr_at_10_diff1 value: 47.06402290476898 - type: nauc_mrr_at_10_max value: 20.31796371272899 - type: nauc_mrr_at_10_std value: -9.152764728116985 - type: nauc_mrr_at_1_diff1 value: 49.8307631122254 - type: nauc_mrr_at_1_max value: 17.47598417771661 - type: nauc_mrr_at_1_std value: -9.49674057336471 - type: nauc_mrr_at_20_diff1 value: 47.18513007335988 - type: nauc_mrr_at_20_max value: 20.252358573018245 - type: nauc_mrr_at_20_std value: -9.214801839221886 - type: nauc_mrr_at_3_diff1 value: 46.64614768046736 - type: nauc_mrr_at_3_max value: 19.87143239314646 - type: nauc_mrr_at_3_std value: -10.34232693967581 - type: nauc_mrr_at_5_diff1 value: 46.74884993767054 - type: nauc_mrr_at_5_max value: 20.574339243146493 - type: nauc_mrr_at_5_std value: -9.266854509630672 - type: nauc_ndcg_at_1000_diff1 value: 46.89639718474923 - type: nauc_ndcg_at_1000_max value: 20.133464082440813 - type: nauc_ndcg_at_1000_std value: -10.136403176776762 - type: nauc_ndcg_at_100_diff1 value: 46.94604670075998 - type: nauc_ndcg_at_100_max value: 20.30481020840327 - type: nauc_ndcg_at_100_std value: -9.992008548452375 - type: nauc_ndcg_at_10_diff1 value: 46.226034698994 - type: nauc_ndcg_at_10_max value: 20.372991777536704 - type: nauc_ndcg_at_10_std value: -10.330637856640887 - type: nauc_ndcg_at_1_diff1 value: 49.8307631122254 - type: nauc_ndcg_at_1_max value: 17.47598417771661 - type: nauc_ndcg_at_1_std value: -9.49674057336471 - type: nauc_ndcg_at_20_diff1 value: 46.78659214860835 - type: nauc_ndcg_at_20_max value: 20.48509335588056 - type: nauc_ndcg_at_20_std value: -9.898813769306736 - type: nauc_ndcg_at_3_diff1 value: 46.01631244510983 - type: nauc_ndcg_at_3_max value: 18.547006064547897 - type: nauc_ndcg_at_3_std value: -13.713131545462975 - type: nauc_ndcg_at_5_diff1 value: 45.46699495623331 - type: nauc_ndcg_at_5_max value: 20.19455543242537 - type: nauc_ndcg_at_5_std value: -11.735785669665546 - type: nauc_precision_at_1000_diff1 value: -19.6725027340972 - type: nauc_precision_at_1000_max value: 8.239500230778967 - type: nauc_precision_at_1000_std value: 18.185396232087044 - type: nauc_precision_at_100_diff1 value: -17.384676147750394 - type: nauc_precision_at_100_max value: 9.811312357772447 - type: nauc_precision_at_100_std value: 18.262922138203074 - type: nauc_precision_at_10_diff1 value: -4.565096979365089 - type: nauc_precision_at_10_max value: 14.723637734577657 - type: nauc_precision_at_10_std value: 11.738270443190999 - type: nauc_precision_at_1_diff1 value: 49.8307631122254 - type: nauc_precision_at_1_max value: 17.47598417771661 - type: nauc_precision_at_1_std value: -9.49674057336471 - type: nauc_precision_at_20_diff1 value: -11.093040956072567 - type: nauc_precision_at_20_max value: 12.812652426198307 - type: nauc_precision_at_20_std value: 16.31190423150337 - type: nauc_precision_at_3_diff1 value: 17.742178440333838 - type: nauc_precision_at_3_max value: 18.244307263178587 - type: nauc_precision_at_3_std value: -4.52095614953577 - type: nauc_precision_at_5_diff1 value: 4.2407791176053 - type: nauc_precision_at_5_max value: 17.730073855982013 - type: nauc_precision_at_5_std value: 4.421737575346646 - type: nauc_recall_at_1000_diff1 value: 64.75884284672502 - type: nauc_recall_at_1000_max value: 81.39134841048885 - type: nauc_recall_at_1000_std value: 68.94995040037654 - type: nauc_recall_at_100_diff1 value: 54.86836249976742 - type: nauc_recall_at_100_max value: 51.18334605792757 - type: nauc_recall_at_100_std value: 23.53425608322261 - type: nauc_recall_at_10_diff1 value: 39.74286222080068 - type: nauc_recall_at_10_max value: 27.07630794036662 - type: nauc_recall_at_10_std value: -4.704598869436552 - type: nauc_recall_at_1_diff1 value: 50.754376398540025 - type: nauc_recall_at_1_max value: 14.414068509351408 - type: nauc_recall_at_1_std value: -12.056006802164365 - type: nauc_recall_at_20_diff1 value: 44.47057728218783 - type: nauc_recall_at_20_max value: 34.00822507406391 - type: nauc_recall_at_20_std value: 3.8543138118661564 - type: nauc_recall_at_3_diff1 value: 41.26016286195595 - type: nauc_recall_at_3_max value: 18.702034417473406 - type: nauc_recall_at_3_std value: -16.646799273512976 - type: nauc_recall_at_5_diff1 value: 38.215211778009014 - type: nauc_recall_at_5_max value: 23.799941809149665 - type: nauc_recall_at_5_std value: -11.748013163677792 - type: ndcg_at_1 value: 55.649 - type: ndcg_at_10 value: 73.126 - type: ndcg_at_100 value: 75.149 - type: ndcg_at_1000 value: 75.298 - type: ndcg_at_20 value: 74.383 - type: ndcg_at_3 value: 67.092 - type: ndcg_at_5 value: 70.551 - type: precision_at_1 value: 55.649 - type: precision_at_10 value: 10.872 - type: precision_at_100 value: 1.2 - type: precision_at_1000 value: 0.121 - type: precision_at_20 value: 5.747 - type: precision_at_3 value: 29.297 - type: precision_at_5 value: 19.78 - type: recall_at_1 value: 50.007000000000005 - type: recall_at_10 value: 90.283 - type: recall_at_100 value: 98.581 - type: recall_at_1000 value: 99.667 - type: recall_at_20 value: 94.841 - type: recall_at_3 value: 75.285 - type: recall_at_5 value: 83.024 - task: type: Retrieval dataset: name: MTEB QuoraRetrieval (default) type: mteb/quora config: default split: test revision: e4e08e0b7dbe3c8700f0daef558ff32256715259 metrics: - type: main_score value: 90.839 - type: map_at_1 value: 73.421 - type: map_at_10 value: 87.639 - type: map_at_100 value: 88.221 - type: map_at_1000 value: 88.232 - type: map_at_20 value: 88.034 - type: map_at_3 value: 84.83000000000001 - type: map_at_5 value: 86.629 - type: mrr_at_1 value: 84.54 - type: mrr_at_10 value: 90.01742460317452 - type: mrr_at_100 value: 90.08279099064099 - type: mrr_at_1000 value: 90.08329964638419 - type: mrr_at_20 value: 90.06798960490823 - type: mrr_at_3 value: 89.27166666666653 - type: mrr_at_5 value: 89.80566666666653 - type: nauc_map_at_1000_diff1 value: 78.94450302318565 - type: nauc_map_at_1000_max value: 21.67625162872836 - type: nauc_map_at_1000_std value: -63.29675000755327 - type: nauc_map_at_100_diff1 value: 78.94646726364348 - type: nauc_map_at_100_max value: 21.6551828156032 - type: nauc_map_at_100_std value: -63.3829114435698 - type: nauc_map_at_10_diff1 value: 79.07130514924289 - type: nauc_map_at_10_max value: 21.029474388630167 - type: nauc_map_at_10_std value: -66.16145419623864 - type: nauc_map_at_1_diff1 value: 82.88901067630324 - type: nauc_map_at_1_max value: 12.573604392527388 - type: nauc_map_at_1_std value: -52.749186733215225 - type: nauc_map_at_20_diff1 value: 78.97298021230225 - type: nauc_map_at_20_max value: 21.43310499210088 - type: nauc_map_at_20_std value: -64.55215744604995 - type: nauc_map_at_3_diff1 value: 79.55992375077174 - type: nauc_map_at_3_max value: 17.83922225961089 - type: nauc_map_at_3_std value: -67.60995960703734 - type: nauc_map_at_5_diff1 value: 79.21164758703671 - type: nauc_map_at_5_max value: 19.59527815422847 - type: nauc_map_at_5_std value: -67.9287301041756 - type: nauc_mrr_at_1000_diff1 value: 80.01626963351147 - type: nauc_mrr_at_1000_max value: 24.919579847819673 - type: nauc_mrr_at_1000_std value: -58.43670285108221 - type: nauc_mrr_at_100_diff1 value: 80.01596090999094 - type: nauc_mrr_at_100_max value: 24.921581361750793 - type: nauc_mrr_at_100_std value: -58.43657210943033 - type: nauc_mrr_at_10_diff1 value: 80.01464640225531 - type: nauc_mrr_at_10_max value: 24.91237413825385 - type: nauc_mrr_at_10_std value: -58.71735238132195 - type: nauc_mrr_at_1_diff1 value: 80.84755775565024 - type: nauc_mrr_at_1_max value: 24.048296551820155 - type: nauc_mrr_at_1_std value: -53.884030276206516 - type: nauc_mrr_at_20_diff1 value: 79.99741034772767 - type: nauc_mrr_at_20_max value: 24.943728519243756 - type: nauc_mrr_at_20_std value: -58.51751322910784 - type: nauc_mrr_at_3_diff1 value: 79.7653220164068 - type: nauc_mrr_at_3_max value: 25.025848848269156 - type: nauc_mrr_at_3_std value: -59.493163015268316 - type: nauc_mrr_at_5_diff1 value: 79.96533560568444 - type: nauc_mrr_at_5_max value: 24.938296862422455 - type: nauc_mrr_at_5_std value: -59.26421531550765 - type: nauc_ndcg_at_1000_diff1 value: 79.0157385513832 - type: nauc_ndcg_at_1000_max value: 23.485590713985207 - type: nauc_ndcg_at_1000_std value: -61.02018587192127 - type: nauc_ndcg_at_100_diff1 value: 79.03718804775596 - type: nauc_ndcg_at_100_max value: 23.461097497821058 - type: nauc_ndcg_at_100_std value: -61.437170125531026 - type: nauc_ndcg_at_10_diff1 value: 79.03778030952117 - type: nauc_ndcg_at_10_max value: 22.306247124002667 - type: nauc_ndcg_at_10_std value: -66.37655652467825 - type: nauc_ndcg_at_1_diff1 value: 80.78698638087498 - type: nauc_ndcg_at_1_max value: 24.143135601982355 - type: nauc_ndcg_at_1_std value: -53.77140852744596 - type: nauc_ndcg_at_20_diff1 value: 78.95638373678379 - type: nauc_ndcg_at_20_max value: 23.041116927862166 - type: nauc_ndcg_at_20_std value: -64.2045609779128 - type: nauc_ndcg_at_3_diff1 value: 78.2352967181823 - type: nauc_ndcg_at_3_max value: 20.83099937618778 - type: nauc_ndcg_at_3_std value: -66.13364999506068 - type: nauc_ndcg_at_5_diff1 value: 78.79567409841862 - type: nauc_ndcg_at_5_max value: 21.21973275803562 - type: nauc_ndcg_at_5_std value: -67.8610915215582 - type: nauc_precision_at_1000_diff1 value: -45.759236311035494 - type: nauc_precision_at_1000_max value: 1.624346826365352 - type: nauc_precision_at_1000_std value: 50.35054240700859 - type: nauc_precision_at_100_diff1 value: -45.61750464531647 - type: nauc_precision_at_100_max value: 1.6882992676795647 - type: nauc_precision_at_100_std value: 48.51482129194453 - type: nauc_precision_at_10_diff1 value: -42.67864438722293 - type: nauc_precision_at_10_max value: 3.611102354794688 - type: nauc_precision_at_10_std value: 32.63229368884846 - type: nauc_precision_at_1_diff1 value: 80.78698638087498 - type: nauc_precision_at_1_max value: 24.143135601982355 - type: nauc_precision_at_1_std value: -53.77140852744596 - type: nauc_precision_at_20_diff1 value: -44.71362663840423 - type: nauc_precision_at_20_max value: 2.2677130284710976 - type: nauc_precision_at_20_std value: 40.43971067749938 - type: nauc_precision_at_3_diff1 value: -26.861947543051734 - type: nauc_precision_at_3_max value: 7.134339421476951 - type: nauc_precision_at_3_std value: 7.008861396866532 - type: nauc_precision_at_5_diff1 value: -37.10691793810955 - type: nauc_precision_at_5_max value: 5.040683622641268 - type: nauc_precision_at_5_std value: 20.608599055818505 - type: nauc_recall_at_1000_diff1 value: 6.192329873055151 - type: nauc_recall_at_1000_max value: -36.96483495363618 - type: nauc_recall_at_1000_std value: -41.34776459607992 - type: nauc_recall_at_100_diff1 value: 77.37809186979416 - type: nauc_recall_at_100_max value: 31.55427918142737 - type: nauc_recall_at_100_std value: -96.51410111206182 - type: nauc_recall_at_10_diff1 value: 76.0312700074355 - type: nauc_recall_at_10_max value: 16.91669426208751 - type: nauc_recall_at_10_std value: -106.12372635024161 - type: nauc_recall_at_1_diff1 value: 82.88901067630324 - type: nauc_recall_at_1_max value: 12.573604392527388 - type: nauc_recall_at_1_std value: -52.749186733215225 - type: nauc_recall_at_20_diff1 value: 73.49587098335563 - type: nauc_recall_at_20_max value: 22.323653643240327 - type: nauc_recall_at_20_std value: -111.38327429874822 - type: nauc_recall_at_3_diff1 value: 76.03399643505598 - type: nauc_recall_at_3_max value: 13.886956219033063 - type: nauc_recall_at_3_std value: -81.9281750750836 - type: nauc_recall_at_5_diff1 value: 75.17555824290534 - type: nauc_recall_at_5_max value: 14.122281249673318 - type: nauc_recall_at_5_std value: -94.53943602513391 - type: ndcg_at_1 value: 84.57000000000001 - type: ndcg_at_10 value: 90.839 - type: ndcg_at_100 value: 91.757 - type: ndcg_at_1000 value: 91.809 - type: ndcg_at_20 value: 91.36999999999999 - type: ndcg_at_3 value: 88.5 - type: ndcg_at_5 value: 89.838 - type: precision_at_1 value: 84.57000000000001 - type: precision_at_10 value: 13.758999999999999 - type: precision_at_100 value: 1.544 - type: precision_at_1000 value: 0.157 - type: precision_at_20 value: 7.268 - type: precision_at_3 value: 38.84 - type: precision_at_5 value: 25.428 - type: recall_at_1 value: 73.421 - type: recall_at_10 value: 96.808 - type: recall_at_100 value: 99.785 - type: recall_at_1000 value: 99.995 - type: recall_at_20 value: 98.482 - type: recall_at_3 value: 89.87 - type: recall_at_5 value: 93.813 - task: type: Clustering dataset: name: MTEB RedditClustering (default) type: mteb/reddit-clustering config: default split: test revision: 24640382cdbf8abc73003fb0fa6d111a705499eb metrics: - type: main_score value: 72.31956703115198 - type: v_measure value: 72.31956703115198 - type: v_measure_std value: 2.6728641413421994 - task: type: Clustering dataset: name: MTEB RedditClusteringP2P (default) type: mteb/reddit-clustering-p2p config: default split: test revision: 385e3cb46b4cfa89021f56c4380204149d0efe33 metrics: - type: main_score value: 73.19857589812344 - type: v_measure value: 73.19857589812344 - type: v_measure_std value: 12.845755797705918 - task: type: Retrieval dataset: name: MTEB SCIDOCS (default) type: mteb/scidocs config: default split: test revision: f8c2fcf00f625baaa80f62ec5bd9e1fff3b8ae88 metrics: - type: main_score value: 27.511999999999997 - type: map_at_1 value: 6.273 - type: map_at_10 value: 17.108 - type: map_at_100 value: 20.195 - type: map_at_1000 value: 20.589 - type: map_at_20 value: 18.683 - type: map_at_3 value: 11.855 - type: map_at_5 value: 14.457 - type: mrr_at_1 value: 30.9 - type: mrr_at_10 value: 44.24674603174597 - type: mrr_at_100 value: 45.32635060201483 - type: mrr_at_1000 value: 45.347831033779634 - type: mrr_at_20 value: 45.026240806836675 - type: mrr_at_3 value: 40.48333333333329 - type: mrr_at_5 value: 42.83333333333325 - type: nauc_map_at_1000_diff1 value: 13.747655770236225 - type: nauc_map_at_1000_max value: 31.223661693790223 - type: nauc_map_at_1000_std value: 15.886563579045221 - type: nauc_map_at_100_diff1 value: 13.71435331929968 - type: nauc_map_at_100_max value: 31.080863571394453 - type: nauc_map_at_100_std value: 15.785267773740896 - type: nauc_map_at_10_diff1 value: 13.790060894019742 - type: nauc_map_at_10_max value: 30.03655524565939 - type: nauc_map_at_10_std value: 12.280310681648675 - type: nauc_map_at_1_diff1 value: 22.3814913947547 - type: nauc_map_at_1_max value: 23.719991394973757 - type: nauc_map_at_1_std value: 2.8049800493956516 - type: nauc_map_at_20_diff1 value: 13.712619579289667 - type: nauc_map_at_20_max value: 30.324598336820223 - type: nauc_map_at_20_std value: 14.121946680951606 - type: nauc_map_at_3_diff1 value: 16.992084439290416 - type: nauc_map_at_3_max value: 27.358138408688493 - type: nauc_map_at_3_std value: 5.786253517779604 - type: nauc_map_at_5_diff1 value: 14.738933140636526 - type: nauc_map_at_5_max value: 27.825328120128432 - type: nauc_map_at_5_std value: 9.041135537664314 - type: nauc_mrr_at_1000_diff1 value: 19.83087365514557 - type: nauc_mrr_at_1000_max value: 25.801065652005573 - type: nauc_mrr_at_1000_std value: 7.3384785848646645 - type: nauc_mrr_at_100_diff1 value: 19.8286668140047 - type: nauc_mrr_at_100_max value: 25.813986643191488 - type: nauc_mrr_at_100_std value: 7.3750422568877445 - type: nauc_mrr_at_10_diff1 value: 19.78000542708269 - type: nauc_mrr_at_10_max value: 25.778614758390695 - type: nauc_mrr_at_10_std value: 7.394908840787731 - type: nauc_mrr_at_1_diff1 value: 22.802033352031128 - type: nauc_mrr_at_1_max value: 24.21876156001524 - type: nauc_mrr_at_1_std value: 2.98142461087729 - type: nauc_mrr_at_20_diff1 value: 19.8846401290781 - type: nauc_mrr_at_20_max value: 25.84776690911097 - type: nauc_mrr_at_20_std value: 7.421879871925152 - type: nauc_mrr_at_3_diff1 value: 18.925200162278294 - type: nauc_mrr_at_3_max value: 25.145957384682287 - type: nauc_mrr_at_3_std value: 6.257065754774556 - type: nauc_mrr_at_5_diff1 value: 19.941778349778893 - type: nauc_mrr_at_5_max value: 25.381438123852814 - type: nauc_mrr_at_5_std value: 6.610135974208344 - type: nauc_ndcg_at_1000_diff1 value: 15.060522593908921 - type: nauc_ndcg_at_1000_max value: 33.040413676455096 - type: nauc_ndcg_at_1000_std value: 20.529145075296498 - type: nauc_ndcg_at_100_diff1 value: 14.93838154527601 - type: nauc_ndcg_at_100_max value: 32.84354243075032 - type: nauc_ndcg_at_100_std value: 21.496012772659228 - type: nauc_ndcg_at_10_diff1 value: 14.785241848843627 - type: nauc_ndcg_at_10_max value: 30.08554427695474 - type: nauc_ndcg_at_10_std value: 14.269404725478992 - type: nauc_ndcg_at_1_diff1 value: 22.802033352031128 - type: nauc_ndcg_at_1_max value: 24.21876156001524 - type: nauc_ndcg_at_1_std value: 2.98142461087729 - type: nauc_ndcg_at_20_diff1 value: 15.01656763549395 - type: nauc_ndcg_at_20_max value: 30.883627008565284 - type: nauc_ndcg_at_20_std value: 16.94912353681998 - type: nauc_ndcg_at_3_diff1 value: 17.297499190613213 - type: nauc_ndcg_at_3_max value: 27.357890164110664 - type: nauc_ndcg_at_3_std value: 6.893804534662216 - type: nauc_ndcg_at_5_diff1 value: 15.924309842520637 - type: nauc_ndcg_at_5_max value: 27.479136064733765 - type: nauc_ndcg_at_5_std value: 9.948267317903682 - type: nauc_precision_at_1000_diff1 value: 2.9505514993324202 - type: nauc_precision_at_1000_max value: 28.097522763631076 - type: nauc_precision_at_1000_std value: 34.87676966934099 - type: nauc_precision_at_100_diff1 value: 8.102514216525794 - type: nauc_precision_at_100_max value: 31.104482194200216 - type: nauc_precision_at_100_std value: 35.09394894296658 - type: nauc_precision_at_10_diff1 value: 9.973864747113952 - type: nauc_precision_at_10_max value: 29.806997016747637 - type: nauc_precision_at_10_std value: 19.687557911796002 - type: nauc_precision_at_1_diff1 value: 22.802033352031128 - type: nauc_precision_at_1_max value: 24.21876156001524 - type: nauc_precision_at_1_std value: 2.98142461087729 - type: nauc_precision_at_20_diff1 value: 10.181594464083945 - type: nauc_precision_at_20_max value: 30.011941337125787 - type: nauc_precision_at_20_std value: 24.349813617177965 - type: nauc_precision_at_3_diff1 value: 15.133902637180615 - type: nauc_precision_at_3_max value: 27.96188889214405 - type: nauc_precision_at_3_std value: 8.460528750892308 - type: nauc_precision_at_5_diff1 value: 12.936142554150104 - type: nauc_precision_at_5_max value: 27.411606756811253 - type: nauc_precision_at_5_std value: 13.169657188017908 - type: nauc_recall_at_1000_diff1 value: 2.512433310192269 - type: nauc_recall_at_1000_max value: 30.177030038941073 - type: nauc_recall_at_1000_std value: 38.312954102427724 - type: nauc_recall_at_100_diff1 value: 7.823448451909615 - type: nauc_recall_at_100_max value: 31.19432389386968 - type: nauc_recall_at_100_std value: 35.52197719733696 - type: nauc_recall_at_10_diff1 value: 9.827206383532387 - type: nauc_recall_at_10_max value: 29.537065984308487 - type: nauc_recall_at_10_std value: 19.695443424011145 - type: nauc_recall_at_1_diff1 value: 22.3814913947547 - type: nauc_recall_at_1_max value: 23.719991394973757 - type: nauc_recall_at_1_std value: 2.8049800493956516 - type: nauc_recall_at_20_diff1 value: 10.030101302198451 - type: nauc_recall_at_20_max value: 29.624570420528862 - type: nauc_recall_at_20_std value: 24.383550437133433 - type: nauc_recall_at_3_diff1 value: 14.694309974964243 - type: nauc_recall_at_3_max value: 27.534902291293147 - type: nauc_recall_at_3_std value: 8.299178907366707 - type: nauc_recall_at_5_diff1 value: 12.701200029350348 - type: nauc_recall_at_5_max value: 26.96005349769535 - type: nauc_recall_at_5_std value: 13.083449511827958 - type: ndcg_at_1 value: 30.9 - type: ndcg_at_10 value: 27.511999999999997 - type: ndcg_at_100 value: 38.072 - type: ndcg_at_1000 value: 43.501 - type: ndcg_at_20 value: 31.517 - type: ndcg_at_3 value: 25.804 - type: ndcg_at_5 value: 22.836000000000002 - type: precision_at_1 value: 30.9 - type: precision_at_10 value: 14.360000000000001 - type: precision_at_100 value: 2.94 - type: precision_at_1000 value: 0.422 - type: precision_at_20 value: 9.504999999999999 - type: precision_at_3 value: 24.166999999999998 - type: precision_at_5 value: 20.22 - type: recall_at_1 value: 6.273 - type: recall_at_10 value: 29.095 - type: recall_at_100 value: 59.667 - type: recall_at_1000 value: 85.68 - type: recall_at_20 value: 38.512 - type: recall_at_3 value: 14.703 - type: recall_at_5 value: 20.52 - task: type: STS dataset: name: MTEB SICK-R (default) type: mteb/sickr-sts config: default split: test revision: 20a6d6f312dd54037fe07a32d58e5e168867909d metrics: - type: cosine_pearson value: 85.22165753437119 - type: cosine_spearman value: 83.8127407315392 - type: euclidean_pearson value: 82.44103477882439 - type: euclidean_spearman value: 83.81273507696754 - type: main_score value: 83.8127407315392 - type: manhattan_pearson value: 81.92652274443019 - type: manhattan_spearman value: 82.3715754389135 - type: pearson value: 85.22165753437119 - type: spearman value: 83.8127407315392 - task: type: STS dataset: name: MTEB STS12 (default) type: mteb/sts12-sts config: default split: test revision: a0d554a64d88156834ff5ae9920b964011b16384 metrics: - type: cosine_pearson value: 85.37993195563598 - type: cosine_spearman value: 79.06871408688198 - type: euclidean_pearson value: 80.69951951053761 - type: euclidean_spearman value: 79.06873064755126 - type: main_score value: 79.06871408688198 - type: manhattan_pearson value: 77.95412896760531 - type: manhattan_spearman value: 75.49651289323124 - type: pearson value: 85.37993195563598 - type: spearman value: 79.06871408688198 - task: type: STS dataset: name: MTEB STS13 (default) type: mteb/sts13-sts config: default split: test revision: 7e90230a92c190f1bf69ae9002b8cea547a64cca metrics: - type: cosine_pearson value: 86.17030892013604 - type: cosine_spearman value: 86.54428612066569 - type: euclidean_pearson value: 86.42021459238435 - type: euclidean_spearman value: 86.54428612066569 - type: main_score value: 86.54428612066569 - type: manhattan_pearson value: 84.64899940139117 - type: manhattan_spearman value: 84.37528077160499 - type: pearson value: 86.17030892013604 - type: spearman value: 86.54428612066569 - task: type: STS dataset: name: MTEB STS14 (default) type: mteb/sts14-sts config: default split: test revision: 6031580fec1f6af667f0bd2da0a551cf4f0b2375 metrics: - type: cosine_pearson value: 84.46097652298906 - type: cosine_spearman value: 84.31789230181545 - type: euclidean_pearson value: 83.53652229013105 - type: euclidean_spearman value: 84.31787771751202 - type: main_score value: 84.31789230181545 - type: manhattan_pearson value: 82.40679358381392 - type: manhattan_spearman value: 82.56529092906449 - type: pearson value: 84.46097652298906 - type: spearman value: 84.31789230181545 - task: type: STS dataset: name: MTEB STS15 (default) type: mteb/sts15-sts config: default split: test revision: ae752c7c21bf194d8b67fd573edf7ae58183cbe3 metrics: - type: cosine_pearson value: 89.580101203536 - type: cosine_spearman value: 89.69254612113068 - type: euclidean_pearson value: 88.78501564809129 - type: euclidean_spearman value: 89.69254607130148 - type: main_score value: 89.69254612113068 - type: manhattan_pearson value: 87.37048209358335 - type: manhattan_spearman value: 87.3836150196757 - type: pearson value: 89.580101203536 - type: spearman value: 89.69254612113068 - task: type: STS dataset: name: MTEB STS16 (default) type: mteb/sts16-sts config: default split: test revision: 4d8694f8f0e0100860b497b999b3dbed754a0513 metrics: - type: cosine_pearson value: 86.34409195642276 - type: cosine_spearman value: 87.22893647955566 - type: euclidean_pearson value: 86.47233799859978 - type: euclidean_spearman value: 87.22893647955566 - type: main_score value: 87.22893647955566 - type: manhattan_pearson value: 86.28871397722244 - type: manhattan_spearman value: 86.54681756151196 - type: pearson value: 86.34409195642276 - type: spearman value: 87.22893647955566 - task: type: STS dataset: name: MTEB STS17 (en-en) type: mteb/sts17-crosslingual-sts config: en-en split: test revision: faeb762787bd10488a50c8b5be4a3b82e411949c metrics: - type: cosine_pearson value: 91.6309842235666 - type: cosine_spearman value: 91.55107309513775 - type: euclidean_pearson value: 91.66305652923727 - type: euclidean_spearman value: 91.55107309513775 - type: main_score value: 91.55107309513775 - type: manhattan_pearson value: 92.34412264807419 - type: manhattan_spearman value: 91.76106893098941 - type: pearson value: 91.6309842235666 - type: spearman value: 91.55107309513775 - task: type: STS dataset: name: MTEB STS22 (en) type: mteb/sts22-crosslingual-sts config: en split: test revision: de9d86b3b84231dc21f76c7b7af1f28e2f57f6e3 metrics: - type: cosine_pearson value: 69.34620080035205 - type: cosine_spearman value: 68.68546006466259 - type: euclidean_pearson value: 68.92323864900831 - type: euclidean_spearman value: 68.68546006466259 - type: main_score value: 68.68546006466259 - type: manhattan_pearson value: 69.50252696626819 - type: manhattan_spearman value: 68.6026900249137 - type: pearson value: 69.34620080035205 - type: spearman value: 68.68546006466259 - task: type: STS dataset: name: MTEB STSBenchmark (default) type: mteb/stsbenchmark-sts config: default split: test revision: b0fddb56ed78048fa8b90373c8a3cfc37b684831 metrics: - type: cosine_pearson value: 87.72211485964012 - type: cosine_spearman value: 88.21684368202543 - type: euclidean_pearson value: 87.4152174509492 - type: euclidean_spearman value: 88.21684358110474 - type: main_score value: 88.21684368202543 - type: manhattan_pearson value: 86.18736905144627 - type: manhattan_spearman value: 86.2967005957272 - type: pearson value: 87.72211485964012 - type: spearman value: 88.21684368202543 - task: type: Reranking dataset: name: MTEB SciDocsRR (default) type: mteb/scidocs-reranking config: default split: test revision: d3c5e1fc0b855ab6097bf1cda04dd73947d7caab metrics: - type: main_score value: 87.91887485668568 - type: map value: 87.91887485668568 - type: mrr value: 96.79923108844677 - type: nAUC_map_diff1 value: -9.544019533700576 - type: nAUC_map_max value: 51.305518546271486 - type: nAUC_map_std value: 68.93338639531362 - type: nAUC_mrr_diff1 value: 28.20896050152944 - type: nAUC_mrr_max value: 84.08480139020106 - type: nAUC_mrr_std value: 81.66707142756775 - task: type: Retrieval dataset: name: MTEB SciFact (default) type: mteb/scifact config: default split: test revision: 0228b52cf27578f30900b9e5271d331663a030d7 metrics: - type: main_score value: 78.385 - type: map_at_1 value: 65.261 - type: map_at_10 value: 74.265 - type: map_at_100 value: 74.68900000000001 - type: map_at_1000 value: 74.7 - type: map_at_20 value: 74.61 - type: map_at_3 value: 71.485 - type: map_at_5 value: 73.07000000000001 - type: mrr_at_1 value: 68.33333333333333 - type: mrr_at_10 value: 75.00052910052908 - type: mrr_at_100 value: 75.36791603863972 - type: mrr_at_1000 value: 75.37836387206892 - type: mrr_at_20 value: 75.28900989361513 - type: mrr_at_3 value: 73.16666666666667 - type: mrr_at_5 value: 74.06666666666666 - type: nauc_map_at_1000_diff1 value: 71.95557264143025 - type: nauc_map_at_1000_max value: 57.668494594642375 - type: nauc_map_at_1000_std value: 0.9160722241006062 - type: nauc_map_at_100_diff1 value: 71.96131086694861 - type: nauc_map_at_100_max value: 57.67394651480537 - type: nauc_map_at_100_std value: 0.9111248974804423 - type: nauc_map_at_10_diff1 value: 71.8005197158061 - type: nauc_map_at_10_max value: 57.926225509350296 - type: nauc_map_at_10_std value: 1.0289591605730695 - type: nauc_map_at_1_diff1 value: 74.04117350009464 - type: nauc_map_at_1_max value: 46.01270356681121 - type: nauc_map_at_1_std value: -12.34453479186478 - type: nauc_map_at_20_diff1 value: 71.79288203065293 - type: nauc_map_at_20_max value: 57.748067223890466 - type: nauc_map_at_20_std value: 1.0471868877436754 - type: nauc_map_at_3_diff1 value: 73.12655880469308 - type: nauc_map_at_3_max value: 53.170175466998955 - type: nauc_map_at_3_std value: -2.841120496331886 - type: nauc_map_at_5_diff1 value: 72.37537625825152 - type: nauc_map_at_5_max value: 57.22646320702063 - type: nauc_map_at_5_std value: 0.08993845130894543 - type: nauc_mrr_at_1000_diff1 value: 72.33151450517484 - type: nauc_mrr_at_1000_max value: 59.05887764321693 - type: nauc_mrr_at_1000_std value: 2.978447313200519 - type: nauc_mrr_at_100_diff1 value: 72.3371689393142 - type: nauc_mrr_at_100_max value: 59.063748264607554 - type: nauc_mrr_at_100_std value: 2.9724134206438007 - type: nauc_mrr_at_10_diff1 value: 72.15441848985677 - type: nauc_mrr_at_10_max value: 59.323659507427315 - type: nauc_mrr_at_10_std value: 3.202392266950175 - type: nauc_mrr_at_1_diff1 value: 74.70791175021019 - type: nauc_mrr_at_1_max value: 54.890557504421224 - type: nauc_mrr_at_1_std value: -3.1003391992577676 - type: nauc_mrr_at_20_diff1 value: 72.16447875028192 - type: nauc_mrr_at_20_max value: 59.13406185965151 - type: nauc_mrr_at_20_std value: 3.1032769225166454 - type: nauc_mrr_at_3_diff1 value: 72.74517143863574 - type: nauc_mrr_at_3_max value: 58.78449780863764 - type: nauc_mrr_at_3_std value: 3.1947844580560276 - type: nauc_mrr_at_5_diff1 value: 72.55041655786376 - type: nauc_mrr_at_5_max value: 59.379628404843956 - type: nauc_mrr_at_5_std value: 3.0807485088011655 - type: nauc_ndcg_at_1000_diff1 value: 71.3780675347069 - type: nauc_ndcg_at_1000_max value: 59.48945646166557 - type: nauc_ndcg_at_1000_std value: 3.4914501826426503 - type: nauc_ndcg_at_100_diff1 value: 71.53734704134561 - type: nauc_ndcg_at_100_max value: 59.745110507117275 - type: nauc_ndcg_at_100_std value: 3.783265578398072 - type: nauc_ndcg_at_10_diff1 value: 69.96639696430987 - type: nauc_ndcg_at_10_max value: 60.93159115976958 - type: nauc_ndcg_at_10_std value: 4.90530364691378 - type: nauc_ndcg_at_1_diff1 value: 74.70791175021019 - type: nauc_ndcg_at_1_max value: 54.890557504421224 - type: nauc_ndcg_at_1_std value: -3.1003391992577676 - type: nauc_ndcg_at_20_diff1 value: 69.89569028363886 - type: nauc_ndcg_at_20_max value: 60.270211929349834 - type: nauc_ndcg_at_20_std value: 4.838097933264383 - type: nauc_ndcg_at_3_diff1 value: 71.97085051507173 - type: nauc_ndcg_at_3_max value: 57.05247760108673 - type: nauc_ndcg_at_3_std value: 1.342308002922158 - type: nauc_ndcg_at_5_diff1 value: 71.34405011749429 - type: nauc_ndcg_at_5_max value: 60.15875062308923 - type: nauc_ndcg_at_5_std value: 3.0796119978456793 - type: nauc_precision_at_1000_diff1 value: -29.157292935130375 - type: nauc_precision_at_1000_max value: 26.889021898412864 - type: nauc_precision_at_1000_std value: 49.35914635404835 - type: nauc_precision_at_100_diff1 value: -18.882174284520445 - type: nauc_precision_at_100_max value: 31.615184568467097 - type: nauc_precision_at_100_std value: 47.60363461742358 - type: nauc_precision_at_10_diff1 value: -2.8344355852237415 - type: nauc_precision_at_10_max value: 44.568061478871776 - type: nauc_precision_at_10_std value: 43.421513484558055 - type: nauc_precision_at_1_diff1 value: 74.70791175021019 - type: nauc_precision_at_1_max value: 54.890557504421224 - type: nauc_precision_at_1_std value: -3.1003391992577676 - type: nauc_precision_at_20_diff1 value: -15.053027139874736 - type: nauc_precision_at_20_max value: 36.35184411969381 - type: nauc_precision_at_20_std value: 46.455671537926236 - type: nauc_precision_at_3_diff1 value: 36.35406984818856 - type: nauc_precision_at_3_max value: 51.10573379058357 - type: nauc_precision_at_3_std value: 24.525389143510285 - type: nauc_precision_at_5_diff1 value: 17.185063562924082 - type: nauc_precision_at_5_max value: 51.988011211557364 - type: nauc_precision_at_5_std value: 34.07769395557144 - type: nauc_recall_at_1000_diff1 value: .nan - type: nauc_recall_at_1000_max value: .nan - type: nauc_recall_at_1000_std value: .nan - type: nauc_recall_at_100_diff1 value: 80.72562358276696 - type: nauc_recall_at_100_max value: 77.43097238895595 - type: nauc_recall_at_100_std value: 30.585567560357074 - type: nauc_recall_at_10_diff1 value: 55.594191699668386 - type: nauc_recall_at_10_max value: 73.93911587623553 - type: nauc_recall_at_10_std value: 22.56028848320937 - type: nauc_recall_at_1_diff1 value: 74.04117350009464 - type: nauc_recall_at_1_max value: 46.01270356681121 - type: nauc_recall_at_1_std value: -12.34453479186478 - type: nauc_recall_at_20_diff1 value: 44.765406162464785 - type: nauc_recall_at_20_max value: 76.48517740429489 - type: nauc_recall_at_20_std value: 34.141573295985225 - type: nauc_recall_at_3_diff1 value: 69.04055599707765 - type: nauc_recall_at_3_max value: 57.037557037556965 - type: nauc_recall_at_3_std value: 3.6347123303645557 - type: nauc_recall_at_5_diff1 value: 66.13492482259224 - type: nauc_recall_at_5_max value: 67.49828930893953 - type: nauc_recall_at_5_std value: 9.62641835622894 - type: ndcg_at_1 value: 68.333 - type: ndcg_at_10 value: 78.385 - type: ndcg_at_100 value: 80.097 - type: ndcg_at_1000 value: 80.382 - type: ndcg_at_20 value: 79.532 - type: ndcg_at_3 value: 73.96000000000001 - type: ndcg_at_5 value: 75.922 - type: precision_at_1 value: 68.333 - type: precision_at_10 value: 10.267 - type: precision_at_100 value: 1.107 - type: precision_at_1000 value: 0.11299999999999999 - type: precision_at_20 value: 5.383 - type: precision_at_3 value: 28.666999999999998 - type: precision_at_5 value: 18.733 - type: recall_at_1 value: 65.261 - type: recall_at_10 value: 90.333 - type: recall_at_100 value: 97.667 - type: recall_at_1000 value: 100.0 - type: recall_at_20 value: 94.667 - type: recall_at_3 value: 78.35 - type: recall_at_5 value: 83.217 - task: type: PairClassification dataset: name: MTEB SprintDuplicateQuestions (default) type: mteb/sprintduplicatequestions-pairclassification config: default split: test revision: d66bd1f72af766a5cc4b0ca5e00c162f89e8cc46 metrics: - type: cosine_accuracy value: 99.86435643564356 - type: cosine_accuracy_threshold value: 75.08841156959534 - type: cosine_ap value: 96.9830495771001 - type: cosine_f1 value: 93.24790537210448 - type: cosine_f1_threshold value: 73.8122820854187 - type: cosine_precision value: 91.93391642371235 - type: cosine_recall value: 94.6 - type: dot_accuracy value: 99.86435643564356 - type: dot_accuracy_threshold value: 75.08841156959534 - type: dot_ap value: 96.9830495771001 - type: dot_f1 value: 93.24790537210448 - type: dot_f1_threshold value: 73.81229400634766 - type: dot_precision value: 91.93391642371235 - type: dot_recall value: 94.6 - type: euclidean_accuracy value: 99.86435643564356 - type: euclidean_accuracy_threshold value: 70.58552503585815 - type: euclidean_ap value: 96.9830495771001 - type: euclidean_f1 value: 93.24790537210448 - type: euclidean_f1_threshold value: 72.37086296081543 - type: euclidean_precision value: 91.93391642371235 - type: euclidean_recall value: 94.6 - type: main_score value: 96.9830495771001 - type: manhattan_accuracy value: 99.85544554455446 - type: manhattan_accuracy_threshold value: 2102.3300170898438 - type: manhattan_ap value: 96.69996535175346 - type: manhattan_f1 value: 92.60385005065855 - type: manhattan_f1_threshold value: 2106.606674194336 - type: manhattan_precision value: 93.83983572895276 - type: manhattan_recall value: 91.4 - type: max_accuracy value: 99.86435643564356 - type: max_ap value: 96.9830495771001 - type: max_f1 value: 93.24790537210448 - type: max_precision value: 93.83983572895276 - type: max_recall value: 94.6 - type: similarity_accuracy value: 99.86435643564356 - type: similarity_accuracy_threshold value: 75.08841156959534 - type: similarity_ap value: 96.9830495771001 - type: similarity_f1 value: 93.24790537210448 - type: similarity_f1_threshold value: 73.8122820854187 - type: similarity_precision value: 91.93391642371235 - type: similarity_recall value: 94.6 - task: type: Clustering dataset: name: MTEB StackExchangeClustering (default) type: mteb/stackexchange-clustering config: default split: test revision: 6cbc1f7b2bc0622f2e39d2c77fa502909748c259 metrics: - type: main_score value: 81.69710673313244 - type: v_measure value: 81.69710673313244 - type: v_measure_std value: 2.655167436381706 - task: type: Clustering dataset: name: MTEB StackExchangeClusteringP2P (default) type: mteb/stackexchange-clustering-p2p config: default split: test revision: 815ca46b2622cec33ccafc3735d572c266efdb44 metrics: - type: main_score value: 43.72862700989394 - type: v_measure value: 43.72862700989394 - type: v_measure_std value: 1.3902399715070008 - task: type: Reranking dataset: name: MTEB StackOverflowDupQuestions (default) type: mteb/stackoverflowdupquestions-reranking config: default split: test revision: e185fbe320c72810689fc5848eb6114e1ef5ec69 metrics: - type: main_score value: 58.147527904806864 - type: map value: 58.147527904806864 - type: mrr value: 59.21841842797725 - type: nAUC_map_diff1 value: 41.30339453892422 - type: nAUC_map_max value: 12.414607439479719 - type: nAUC_map_std value: 7.9053349557289 - type: nAUC_mrr_diff1 value: 41.419127589177954 - type: nAUC_mrr_max value: 13.51513956670511 - type: nAUC_mrr_std value: 8.670528870027399 - task: type: Summarization dataset: name: MTEB SummEval (default) type: mteb/summeval config: default split: test revision: cda12ad7615edc362dbf25a00fdd61d3b1eaf93c metrics: - type: cosine_pearson value: 31.491060079270405 - type: cosine_spearman value: 31.554541555025118 - type: dot_pearson value: 31.491058716856347 - type: dot_spearman value: 31.554541555025118 - type: main_score value: 31.554541555025118 - type: pearson value: 31.491060079270405 - type: spearman value: 31.554541555025118 - task: type: Retrieval dataset: name: MTEB TRECCOVID (default) type: mteb/trec-covid config: default split: test revision: bb9466bac8153a0349341eb1b22e06409e78ef4e metrics: - type: main_score value: 69.733 - type: map_at_1 value: 0.213 - type: map_at_10 value: 1.737 - type: map_at_100 value: 10.327 - type: map_at_1000 value: 28.267999999999997 - type: map_at_20 value: 3.0020000000000002 - type: map_at_3 value: 0.5950000000000001 - type: map_at_5 value: 0.9369999999999999 - type: mrr_at_1 value: 80.0 - type: mrr_at_10 value: 89.0 - type: mrr_at_100 value: 89.0 - type: mrr_at_1000 value: 89.0 - type: mrr_at_20 value: 89.0 - type: mrr_at_3 value: 88.66666666666667 - type: mrr_at_5 value: 88.66666666666667 - type: nauc_map_at_1000_diff1 value: 7.530064816522635 - type: nauc_map_at_1000_max value: 55.23519081314714 - type: nauc_map_at_1000_std value: 69.88336126732227 - type: nauc_map_at_100_diff1 value: -11.58802769875123 - type: nauc_map_at_100_max value: 45.65439990209477 - type: nauc_map_at_100_std value: 54.44015403236353 - type: nauc_map_at_10_diff1 value: -33.33082523706407 - type: nauc_map_at_10_max value: 13.817336112350398 - type: nauc_map_at_10_std value: 11.79843765110203 - type: nauc_map_at_1_diff1 value: -24.172683196985325 - type: nauc_map_at_1_max value: 0.812998404669278 - type: nauc_map_at_1_std value: -5.722318547866086 - type: nauc_map_at_20_diff1 value: -26.665749957105188 - type: nauc_map_at_20_max value: 21.228342017724675 - type: nauc_map_at_20_std value: 22.263210043854528 - type: nauc_map_at_3_diff1 value: -29.596285662079545 - type: nauc_map_at_3_max value: 9.991968954179255 - type: nauc_map_at_3_std value: 3.42447296457675 - type: nauc_map_at_5_diff1 value: -29.475843534692352 - type: nauc_map_at_5_max value: 14.459021545403486 - type: nauc_map_at_5_std value: 7.995420002675463 - type: nauc_mrr_at_1000_diff1 value: -26.337875642992593 - type: nauc_mrr_at_1000_max value: 29.579159522305375 - type: nauc_mrr_at_1000_std value: 28.12622206178732 - type: nauc_mrr_at_100_diff1 value: -26.337875642992593 - type: nauc_mrr_at_100_max value: 29.579159522305375 - type: nauc_mrr_at_100_std value: 28.12622206178732 - type: nauc_mrr_at_10_diff1 value: -26.337875642992593 - type: nauc_mrr_at_10_max value: 29.579159522305375 - type: nauc_mrr_at_10_std value: 28.12622206178732 - type: nauc_mrr_at_1_diff1 value: -25.92407592407588 - type: nauc_mrr_at_1_max value: 28.301698301698252 - type: nauc_mrr_at_1_std value: 26.693306693306656 - type: nauc_mrr_at_20_diff1 value: -26.337875642992593 - type: nauc_mrr_at_20_max value: 29.579159522305375 - type: nauc_mrr_at_20_std value: 28.12622206178732 - type: nauc_mrr_at_3_diff1 value: -26.050591595267147 - type: nauc_mrr_at_3_max value: 29.03916768665867 - type: nauc_mrr_at_3_std value: 27.631578947368535 - type: nauc_mrr_at_5_diff1 value: -26.050591595267147 - type: nauc_mrr_at_5_max value: 29.03916768665867 - type: nauc_mrr_at_5_std value: 27.631578947368535 - type: nauc_ndcg_at_1000_diff1 value: 13.313313351002273 - type: nauc_ndcg_at_1000_max value: 51.15076909707446 - type: nauc_ndcg_at_1000_std value: 64.84776628015508 - type: nauc_ndcg_at_100_diff1 value: 3.7846451010204216 - type: nauc_ndcg_at_100_max value: 49.0721051387502 - type: nauc_ndcg_at_100_std value: 65.97894701747119 - type: nauc_ndcg_at_10_diff1 value: -25.309415375177647 - type: nauc_ndcg_at_10_max value: 43.68557432763264 - type: nauc_ndcg_at_10_std value: 47.90146365089116 - type: nauc_ndcg_at_1_diff1 value: -22.744222217790963 - type: nauc_ndcg_at_1_max value: 31.31069413148822 - type: nauc_ndcg_at_1_std value: 21.059243454505594 - type: nauc_ndcg_at_20_diff1 value: -11.686356003814897 - type: nauc_ndcg_at_20_max value: 47.21608544472201 - type: nauc_ndcg_at_20_std value: 56.721660150841934 - type: nauc_ndcg_at_3_diff1 value: -22.60324298042963 - type: nauc_ndcg_at_3_max value: 37.29214797900573 - type: nauc_ndcg_at_3_std value: 31.069444337406544 - type: nauc_ndcg_at_5_diff1 value: -23.092470045715576 - type: nauc_ndcg_at_5_max value: 45.28716833477456 - type: nauc_ndcg_at_5_std value: 41.746096468983836 - type: nauc_precision_at_1000_diff1 value: 11.936194396568526 - type: nauc_precision_at_1000_max value: 35.73984401090955 - type: nauc_precision_at_1000_std value: 47.45009555269865 - type: nauc_precision_at_100_diff1 value: 7.53944614850939 - type: nauc_precision_at_100_max value: 51.11150228319469 - type: nauc_precision_at_100_std value: 69.37024506529535 - type: nauc_precision_at_10_diff1 value: -35.338375024238914 - type: nauc_precision_at_10_max value: 46.01734120058722 - type: nauc_precision_at_10_std value: 54.076844233912325 - type: nauc_precision_at_1_diff1 value: -25.92407592407588 - type: nauc_precision_at_1_max value: 28.301698301698252 - type: nauc_precision_at_1_std value: 26.693306693306656 - type: nauc_precision_at_20_diff1 value: -12.78008420928654 - type: nauc_precision_at_20_max value: 48.11647969849543 - type: nauc_precision_at_20_std value: 61.91708624090925 - type: nauc_precision_at_3_diff1 value: -33.641360921891206 - type: nauc_precision_at_3_max value: 46.65887466442645 - type: nauc_precision_at_3_std value: 45.65443687565056 - type: nauc_precision_at_5_diff1 value: -30.684095323241937 - type: nauc_precision_at_5_max value: 54.23744489317759 - type: nauc_precision_at_5_std value: 53.2087842073353 - type: nauc_recall_at_1000_diff1 value: 17.996439669690247 - type: nauc_recall_at_1000_max value: 46.5940045697732 - type: nauc_recall_at_1000_std value: 57.28734391628304 - type: nauc_recall_at_100_diff1 value: -9.913571369031885 - type: nauc_recall_at_100_max value: 34.727478455899956 - type: nauc_recall_at_100_std value: 39.274245119901806 - type: nauc_recall_at_10_diff1 value: -33.34086532993325 - type: nauc_recall_at_10_max value: 8.520973060014345 - type: nauc_recall_at_10_std value: 6.516939825125482 - type: nauc_recall_at_1_diff1 value: -24.172683196985325 - type: nauc_recall_at_1_max value: 0.812998404669278 - type: nauc_recall_at_1_std value: -5.722318547866086 - type: nauc_recall_at_20_diff1 value: -24.094516713201198 - type: nauc_recall_at_20_max value: 14.107431429537327 - type: nauc_recall_at_20_std value: 13.738982786212887 - type: nauc_recall_at_3_diff1 value: -29.84992741698517 - type: nauc_recall_at_3_max value: 8.045792723460073 - type: nauc_recall_at_3_std value: 2.4923805102265644 - type: nauc_recall_at_5_diff1 value: -29.96523188989537 - type: nauc_recall_at_5_max value: 11.604276438289629 - type: nauc_recall_at_5_std value: 5.131565461597982 - type: ndcg_at_1 value: 73.0 - type: ndcg_at_10 value: 69.733 - type: ndcg_at_100 value: 55.591 - type: ndcg_at_1000 value: 54.852999999999994 - type: ndcg_at_20 value: 66.318 - type: ndcg_at_3 value: 74.195 - type: ndcg_at_5 value: 72.2 - type: precision_at_1 value: 80.0 - type: precision_at_10 value: 74.8 - type: precision_at_100 value: 57.42 - type: precision_at_1000 value: 24.772 - type: precision_at_20 value: 70.19999999999999 - type: precision_at_3 value: 79.333 - type: precision_at_5 value: 76.4 - type: recall_at_1 value: 0.213 - type: recall_at_10 value: 1.968 - type: recall_at_100 value: 13.965 - type: recall_at_1000 value: 53.185 - type: recall_at_20 value: 3.6029999999999998 - type: recall_at_3 value: 0.63 - type: recall_at_5 value: 1.012 - task: type: Retrieval dataset: name: MTEB Touche2020 (default) type: mteb/touche2020 config: default split: test revision: a34f9a33db75fa0cbb21bb5cfc3dae8dc8bec93f metrics: - type: main_score value: 25.855 - type: map_at_1 value: 2.3720000000000003 - type: map_at_10 value: 10.761 - type: map_at_100 value: 16.883 - type: map_at_1000 value: 18.419 - type: map_at_20 value: 13.234000000000002 - type: map_at_3 value: 5.305 - type: map_at_5 value: 7.7909999999999995 - type: mrr_at_1 value: 30.612244897959183 - type: mrr_at_10 value: 44.44930353093618 - type: mrr_at_100 value: 45.4833016949092 - type: mrr_at_1000 value: 45.4833016949092 - type: mrr_at_20 value: 45.107658781128166 - type: mrr_at_3 value: 41.156462585034014 - type: mrr_at_5 value: 42.585034013605444 - type: nauc_map_at_1000_diff1 value: 13.258679267701162 - type: nauc_map_at_1000_max value: -20.533359275963978 - type: nauc_map_at_1000_std value: 5.195756690535686 - type: nauc_map_at_100_diff1 value: 13.131743244043795 - type: nauc_map_at_100_max value: -21.492974221123553 - type: nauc_map_at_100_std value: 2.3596492252552466 - type: nauc_map_at_10_diff1 value: 13.225110960782512 - type: nauc_map_at_10_max value: -15.955538570111544 - type: nauc_map_at_10_std value: -9.960230793465525 - type: nauc_map_at_1_diff1 value: 10.715203611651038 - type: nauc_map_at_1_max value: -22.738676941331217 - type: nauc_map_at_1_std value: -12.157109615038761 - type: nauc_map_at_20_diff1 value: 14.274852478638012 - type: nauc_map_at_20_max value: -17.121737352210666 - type: nauc_map_at_20_std value: -8.245512758810355 - type: nauc_map_at_3_diff1 value: 10.257756467121387 - type: nauc_map_at_3_max value: -24.68709623139807 - type: nauc_map_at_3_std value: -4.467589369944418 - type: nauc_map_at_5_diff1 value: 11.668204447419454 - type: nauc_map_at_5_max value: -20.960140274793357 - type: nauc_map_at_5_std value: -12.40170876103286 - type: nauc_mrr_at_1000_diff1 value: 5.190215790331544 - type: nauc_mrr_at_1000_max value: -39.787191589591906 - type: nauc_mrr_at_1000_std value: 5.674646076011233 - type: nauc_mrr_at_100_diff1 value: 5.190215790331544 - type: nauc_mrr_at_100_max value: -39.787191589591906 - type: nauc_mrr_at_100_std value: 5.674646076011233 - type: nauc_mrr_at_10_diff1 value: 5.265068861137356 - type: nauc_mrr_at_10_max value: -39.452907737584766 - type: nauc_mrr_at_10_std value: 4.21949027692033 - type: nauc_mrr_at_1_diff1 value: 5.764128953008387 - type: nauc_mrr_at_1_max value: -34.396988502985046 - type: nauc_mrr_at_1_std value: -3.20168662726788 - type: nauc_mrr_at_20_diff1 value: 4.987530247680915 - type: nauc_mrr_at_20_max value: -40.229139478533966 - type: nauc_mrr_at_20_std value: 5.977348987000782 - type: nauc_mrr_at_3_diff1 value: 9.610125583884257 - type: nauc_mrr_at_3_max value: -36.029841466645934 - type: nauc_mrr_at_3_std value: 7.59968816692639 - type: nauc_mrr_at_5_diff1 value: 6.12878627948545 - type: nauc_mrr_at_5_max value: -39.53677644419165 - type: nauc_mrr_at_5_std value: 4.7057108704387645 - type: nauc_ndcg_at_1000_diff1 value: 5.592968185470883 - type: nauc_ndcg_at_1000_max value: -28.23746134880031 - type: nauc_ndcg_at_1000_std value: 27.884534247724062 - type: nauc_ndcg_at_100_diff1 value: 6.3640586707803575 - type: nauc_ndcg_at_100_max value: -37.75625065480638 - type: nauc_ndcg_at_100_std value: 18.401240235775717 - type: nauc_ndcg_at_10_diff1 value: 8.51329926083278 - type: nauc_ndcg_at_10_max value: -29.840137893584263 - type: nauc_ndcg_at_10_std value: -0.04663104264974505 - type: nauc_ndcg_at_1_diff1 value: 4.649199383837089 - type: nauc_ndcg_at_1_max value: -36.34289129705041 - type: nauc_ndcg_at_1_std value: -3.44317820875297 - type: nauc_ndcg_at_20_diff1 value: 9.392629877698923 - type: nauc_ndcg_at_20_max value: -31.820651294588924 - type: nauc_ndcg_at_20_std value: -0.9972668750497783 - type: nauc_ndcg_at_3_diff1 value: 5.874210450563947 - type: nauc_ndcg_at_3_max value: -34.78563048938306 - type: nauc_ndcg_at_3_std value: 8.851987228864013 - type: nauc_ndcg_at_5_diff1 value: 7.673481918619619 - type: nauc_ndcg_at_5_max value: -34.878421907064144 - type: nauc_ndcg_at_5_std value: -1.0432441077995342 - type: nauc_precision_at_1000_diff1 value: -4.361789669513903 - type: nauc_precision_at_1000_max value: 36.01384363218954 - type: nauc_precision_at_1000_std value: 44.87523889822509 - type: nauc_precision_at_100_diff1 value: 3.509004969666037 - type: nauc_precision_at_100_max value: -26.953843995648196 - type: nauc_precision_at_100_std value: 60.28357323451904 - type: nauc_precision_at_10_diff1 value: 13.319423093878294 - type: nauc_precision_at_10_max value: -24.788053794110258 - type: nauc_precision_at_10_std value: 2.075713700632348 - type: nauc_precision_at_1_diff1 value: 5.764128953008387 - type: nauc_precision_at_1_max value: -34.396988502985046 - type: nauc_precision_at_1_std value: -3.20168662726788 - type: nauc_precision_at_20_diff1 value: 12.157863432105996 - type: nauc_precision_at_20_max value: -28.577513527223473 - type: nauc_precision_at_20_std value: 13.947153923691271 - type: nauc_precision_at_3_diff1 value: 11.019134712127137 - type: nauc_precision_at_3_max value: -35.903911078806004 - type: nauc_precision_at_3_std value: 9.77624599901155 - type: nauc_precision_at_5_diff1 value: 14.312631954702205 - type: nauc_precision_at_5_max value: -34.35871806483499 - type: nauc_precision_at_5_std value: -5.002004889336612 - type: nauc_recall_at_1000_diff1 value: -6.239317795705283 - type: nauc_recall_at_1000_max value: -2.1964262972170188 - type: nauc_recall_at_1000_std value: 72.01699027100997 - type: nauc_recall_at_100_diff1 value: 4.996923455885459 - type: nauc_recall_at_100_max value: -37.796123663830905 - type: nauc_recall_at_100_std value: 28.077209635317868 - type: nauc_recall_at_10_diff1 value: 9.265263065395837 - type: nauc_recall_at_10_max value: -21.186015180676115 - type: nauc_recall_at_10_std value: -13.238588244011387 - type: nauc_recall_at_1_diff1 value: 10.715203611651038 - type: nauc_recall_at_1_max value: -22.738676941331217 - type: nauc_recall_at_1_std value: -12.157109615038761 - type: nauc_recall_at_20_diff1 value: 8.973261788296464 - type: nauc_recall_at_20_max value: -26.822255453044697 - type: nauc_recall_at_20_std value: -6.786380551168297 - type: nauc_recall_at_3_diff1 value: 10.447511653664204 - type: nauc_recall_at_3_max value: -27.65849592208199 - type: nauc_recall_at_3_std value: -3.8950536550559502 - type: nauc_recall_at_5_diff1 value: 7.188322256567744 - type: nauc_recall_at_5_max value: -25.957944490064744 - type: nauc_recall_at_5_std value: -17.745642764320777 - type: ndcg_at_1 value: 28.571 - type: ndcg_at_10 value: 25.855 - type: ndcg_at_100 value: 37.69 - type: ndcg_at_1000 value: 48.808 - type: ndcg_at_20 value: 26.883000000000003 - type: ndcg_at_3 value: 28.904000000000003 - type: ndcg_at_5 value: 27.901999999999997 - type: precision_at_1 value: 30.612000000000002 - type: precision_at_10 value: 23.265 - type: precision_at_100 value: 7.878 - type: precision_at_1000 value: 1.522 - type: precision_at_20 value: 17.653 - type: precision_at_3 value: 29.932 - type: precision_at_5 value: 28.163 - type: recall_at_1 value: 2.3720000000000003 - type: recall_at_10 value: 17.071 - type: recall_at_100 value: 48.829 - type: recall_at_1000 value: 81.194 - type: recall_at_20 value: 24.882 - type: recall_at_3 value: 6.578 - type: recall_at_5 value: 10.951 - task: type: Classification dataset: name: MTEB ToxicConversationsClassification (default) type: mteb/toxic_conversations_50k config: default split: test revision: edfaf9da55d3dd50d43143d90c1ac476895ae6de metrics: - type: accuracy value: 92.28515625 - type: ap value: 43.967425546221364 - type: ap_weighted value: 43.967425546221364 - type: f1 value: 79.48199263483515 - type: f1_weighted value: 93.11764775204445 - type: main_score value: 92.28515625 - task: type: Classification dataset: name: MTEB TweetSentimentExtractionClassification (default) type: mteb/tweet_sentiment_extraction config: default split: test revision: d604517c81ca91fe16a244d1248fc021f9ecee7a metrics: - type: accuracy value: 80.1726089417091 - type: f1 value: 80.44865150205347 - type: f1_weighted value: 80.01110885829492 - type: main_score value: 80.1726089417091 - task: type: Clustering dataset: name: MTEB TwentyNewsgroupsClustering (default) type: mteb/twentynewsgroups-clustering config: default split: test revision: 6125ec4e24fa026cec8a478383ee943acfbd5449 metrics: - type: main_score value: 69.43612684622691 - type: v_measure value: 69.43612684622691 - type: v_measure_std value: 0.65287690596996 - task: type: PairClassification dataset: name: MTEB TwitterSemEval2015 (default) type: mteb/twittersemeval2015-pairclassification config: default split: test revision: 70970daeab8776df92f5ea462b6173c0b46fd2d1 metrics: - type: cosine_accuracy value: 87.65571913929784 - type: cosine_accuracy_threshold value: 75.32307505607605 - type: cosine_ap value: 79.12498779374573 - type: cosine_f1 value: 72.19822109275731 - type: cosine_f1_threshold value: 73.05474281311035 - type: cosine_precision value: 69.63235294117646 - type: cosine_recall value: 74.96042216358839 - type: dot_accuracy value: 87.65571913929784 - type: dot_accuracy_threshold value: 75.32307505607605 - type: dot_ap value: 79.1249879229425 - type: dot_f1 value: 72.19822109275731 - type: dot_f1_threshold value: 73.0547547340393 - type: dot_precision value: 69.63235294117646 - type: dot_recall value: 74.96042216358839 - type: euclidean_accuracy value: 87.65571913929784 - type: euclidean_accuracy_threshold value: 70.2522873878479 - type: euclidean_ap value: 79.12498501352084 - type: euclidean_f1 value: 72.19822109275731 - type: euclidean_f1_threshold value: 73.4101414680481 - type: euclidean_precision value: 69.63235294117646 - type: euclidean_recall value: 74.96042216358839 - type: main_score value: 79.31400852296694 - type: manhattan_accuracy value: 87.78685104607499 - type: manhattan_accuracy_threshold value: 2240.8660888671875 - type: manhattan_ap value: 79.31400852296694 - type: manhattan_f1 value: 72.46414265408968 - type: manhattan_f1_threshold value: 2333.853530883789 - type: manhattan_precision value: 71.00531780197518 - type: manhattan_recall value: 73.98416886543535 - type: max_accuracy value: 87.78685104607499 - type: max_ap value: 79.31400852296694 - type: max_f1 value: 72.46414265408968 - type: max_precision value: 71.00531780197518 - type: max_recall value: 74.96042216358839 - type: similarity_accuracy value: 87.65571913929784 - type: similarity_accuracy_threshold value: 75.32307505607605 - type: similarity_ap value: 79.12498779374573 - type: similarity_f1 value: 72.19822109275731 - type: similarity_f1_threshold value: 73.05474281311035 - type: similarity_precision value: 69.63235294117646 - type: similarity_recall value: 74.96042216358839 - task: type: PairClassification dataset: name: MTEB TwitterURLCorpus (default) type: mteb/twitterurlcorpus-pairclassification config: default split: test revision: 8b6510b0b1fa4e4c4f879467980e9be563ec1cdf metrics: - type: cosine_accuracy value: 89.58939729110878 - type: cosine_accuracy_threshold value: 74.62999820709229 - type: cosine_ap value: 87.2110644450708 - type: cosine_f1 value: 79.70716766892018 - type: cosine_f1_threshold value: 72.82971739768982 - type: cosine_precision value: 77.40297424737032 - type: cosine_recall value: 82.15275639051433 - type: dot_accuracy value: 89.58939729110878 - type: dot_accuracy_threshold value: 74.62999224662781 - type: dot_ap value: 87.21106174391684 - type: dot_f1 value: 79.70716766892018 - type: dot_f1_threshold value: 72.8297233581543 - type: dot_precision value: 77.40297424737032 - type: dot_recall value: 82.15275639051433 - type: euclidean_accuracy value: 89.58939729110878 - type: euclidean_accuracy_threshold value: 71.23202085494995 - type: euclidean_ap value: 87.21106341739784 - type: euclidean_f1 value: 79.70716766892018 - type: euclidean_f1_threshold value: 73.71604442596436 - type: euclidean_precision value: 77.40297424737032 - type: euclidean_recall value: 82.15275639051433 - type: main_score value: 87.50024399984746 - type: manhattan_accuracy value: 89.72522994527885 - type: manhattan_accuracy_threshold value: 2315.9988403320312 - type: manhattan_ap value: 87.50024399984746 - type: manhattan_f1 value: 80.0 - type: manhattan_f1_threshold value: 2496.2581634521484 - type: manhattan_precision value: 76.38859704419696 - type: manhattan_recall value: 83.96981829380967 - type: max_accuracy value: 89.72522994527885 - type: max_ap value: 87.50024399984746 - type: max_f1 value: 80.0 - type: max_precision value: 77.40297424737032 - type: max_recall value: 83.96981829380967 - type: similarity_accuracy value: 89.58939729110878 - type: similarity_accuracy_threshold value: 74.62999820709229 - type: similarity_ap value: 87.2110644450708 - type: similarity_f1 value: 79.70716766892018 - type: similarity_f1_threshold value: 72.82971739768982 - type: similarity_precision value: 77.40297424737032 - type: similarity_recall value: 82.15275639051433 --- <h2 align="center"> LENS Embeddings</h2> LENS is a model that produces **L**exicon-based **E**mbeddi**N**g**S** (LENS) leveraging large language models. Each dimension of the embeddings is designed to correspond to a token cluster where semantically similar tokens are grouped together. These embeddings have a similar feature size as dense embeddings, with LENS-d4000 offering 4000-dimensional representations. The technical report of **LENS** is available in [Enhancing Lexicon-Based Text Embeddings with Large Language Models](https://arxiv.org/abs/2501.09749). ## Usage ``` git clone https://huggingface.co/yibinlei/LENS-d4000 cd LENS-d4000 ``` ```python import torch from torch import Tensor import torch.nn.functional as F from transformers import AutoTokenizer from bidirectional_mistral import MistralBiForCausalLM def get_detailed_instruct(task_instruction: str, query: str) -> str: return f'<instruct>{task_instruction}\n<query>{query}' def pooling_func(vecs: Tensor, pooling_mask: Tensor) -> Tensor: # We use max-pooling for LENS. return torch.max(torch.log(1 + torch.relu(vecs)) * pooling_mask.unsqueeze(-1), dim=1).values # Prepare the data instruction = "Given a web search query, retrieve relevant passages that answer the query." queries = ["what is rba", "what is oilskin fabric"] instructed_queries = [get_detailed_instruct(instruction, query) for query in queries] docs = ["Since 2007, the RBA's outstanding reputation has been affected by the 'Securency' or NPA scandal.", "Today's oilskins (or oilies) typically come in two parts, jackets and trousers. Oilskin jackets are generally similar to common rubberized waterproofs."] # Load the model and tokenizer model = MistralBiForCausalLM.from_pretrained("yibinlei/LENS-d4000", ignore_mismatched_sizes=True) model.lm_head = torch.load('lm_head.pth') tokenizer = AutoTokenizer.from_pretrained("yibinlei/LENS-d4000") # Preprocess the data query_max_len, doc_max_len = 512, 512 instructed_query_inputs = tokenizer( instructed_queries, padding=True, truncation=True, return_tensors='pt', max_length=query_max_len, add_special_tokens=True ) doc_inputs = tokenizer( docs, padding=True, truncation=True, return_tensors='pt', max_length=doc_max_len, add_special_tokens=True ) # We perform pooling exclusively on the outputs of the query tokens, excluding outputs from the instruction. query_only_mask = torch.zeros_like(instructed_query_inputs['input_ids'], dtype=instructed_query_inputs['attention_mask'].dtype) special_token_id = tokenizer.convert_tokens_to_ids('<query>') for idx, seq in enumerate(instructed_query_inputs['input_ids']): special_pos = (seq == special_token_id).nonzero() if len(special_pos) > 0: query_start_pos = special_pos[-1].item() query_only_mask[idx, query_start_pos:-2] = 1 else: raise ValueError("No special token found") # Obtain the embeddings with torch.no_grad(): instructed_query_outputs = model(**instructed_query_inputs) query_embeddings = pooling_func(instructed_query_outputs, query_only_mask) doc_outputs = model(**doc_inputs) # As the output of each token is used for predicting the next token, the pooling mask is shifted left by 1. The output of the final token EOS token is also excluded. doc_inputs['attention_mask'][:, -2:] = 0 doc_embeddings = pooling_func(doc_outputs, doc_inputs['attention_mask']) # Normalize the embeddings query_embeddings = F.normalize(query_embeddings, p=2, dim=1) doc_embeddings = F.normalize(doc_embeddings, p=2, dim=1) # Compute the similarity similarity = torch.matmul(query_embeddings, doc_embeddings.T) ```
[ "SUMMARIZATION" ]
[ "BIOSSES", "SCIFACT" ]
nickmuchi/finbert-tone-finetuned-fintwitter-classification
nickmuchi
text-classification
[ "transformers", "pytorch", "tensorboard", "safetensors", "bert", "text-classification", "generated_from_trainer", "financial-tweets-sentiment-analysis", "sentiment-analysis", "financial", "stocks", "sentiment", "dataset:zeroshot/twitter-financial-news-sentiment", "model-index", "autotrain_compatible", "endpoints_compatible", "region:us" ]
2022-12-30T14:30:37
2023-03-19T20:07:42
138
12
--- datasets: - zeroshot/twitter-financial-news-sentiment metrics: - accuracy - f1 - precision - recall tags: - generated_from_trainer - financial-tweets-sentiment-analysis - sentiment-analysis - financial - stocks - sentiment widget: - text: $LOW - Lowe's racks up another positive rating despite recession risk example_title: Bullish Sentiment - text: $HNHAF $HNHPD $AAPL - Trendforce cuts iPhone estimate after Foxconn delay example_title: Bearish Sentiment - text: 'Coin Toss: Morgan Stanley Raises Tesla Bull Case To $500, Keeps Bear Case At $10' example_title: Neutral Sentiment model-index: - name: finbert-tone-finetuned-fintwitter-classification results: - task: type: text-classification name: Text Classification dataset: name: twitter-financial-news-sentiment type: finance metrics: - type: F1 value: 0.8838 name: F1 - type: accuracy value: 0.884 name: accuracy --- <!-- This model card has been generated automatically according to the information the Trainer had access to. You should probably proofread and complete it, then remove this comment. --> # finbert-tone-finetuned-fintwitter-classification This model is a fine-tuned version of [yiyanghkust/finbert-tone](https://huggingface.co/yiyanghkust/finbert-tone) on [Twitter Financial News](https://huggingface.co/datasets/zeroshot/twitter-financial-news-sentiment) dataset. It achieves the following results on the evaluation set: - Loss: 1.4078 - Accuracy: 0.8840 - F1: 0.8838 - Precision: 0.8838 - Recall: 0.8840 ## Model description Model determines the financial sentiment of given tweets. Given the unbalanced distribution of the class labels, the weights were adjusted to pay attention to the less sampled labels which should increase overall performance.. ## Intended uses & limitations More information needed ## Training and evaluation data More information needed ## Training procedure ### Training hyperparameters The following hyperparameters were used during training: - learning_rate: 2e-05 - train_batch_size: 16 - eval_batch_size: 16 - seed: 42 - optimizer: Adam with betas=(0.9,0.999) and epsilon=1e-08 - lr_scheduler_type: linear - num_epochs: 20 - mixed_precision_training: Native AMP ### Training results | Training Loss | Epoch | Step | Validation Loss | Accuracy | F1 | Precision | Recall | |:-------------:|:-----:|:-----:|:---------------:|:--------:|:------:|:---------:|:------:| | 0.6385 | 1.0 | 597 | 0.3688 | 0.8668 | 0.8693 | 0.8744 | 0.8668 | | 0.3044 | 2.0 | 1194 | 0.3994 | 0.8744 | 0.8726 | 0.8739 | 0.8744 | | 0.1833 | 3.0 | 1791 | 0.6212 | 0.8781 | 0.8764 | 0.8762 | 0.8781 | | 0.1189 | 4.0 | 2388 | 0.8370 | 0.8740 | 0.8743 | 0.8748 | 0.8740 | | 0.0759 | 5.0 | 2985 | 0.9107 | 0.8807 | 0.8798 | 0.8796 | 0.8807 | | 0.0291 | 6.0 | 3582 | 0.9711 | 0.8836 | 0.8825 | 0.8821 | 0.8836 | | 0.0314 | 7.0 | 4179 | 1.1305 | 0.8819 | 0.8811 | 0.8812 | 0.8819 | | 0.0217 | 8.0 | 4776 | 1.0190 | 0.8811 | 0.8813 | 0.8816 | 0.8811 | | 0.0227 | 9.0 | 5373 | 1.1940 | 0.8844 | 0.8832 | 0.8838 | 0.8844 | | 0.0156 | 10.0 | 5970 | 1.2595 | 0.8752 | 0.8768 | 0.8801 | 0.8752 | | 0.0135 | 11.0 | 6567 | 1.1931 | 0.8760 | 0.8768 | 0.8780 | 0.8760 | | 0.009 | 12.0 | 7164 | 1.2154 | 0.8857 | 0.8852 | 0.8848 | 0.8857 | | 0.0058 | 13.0 | 7761 | 1.3874 | 0.8748 | 0.8759 | 0.8776 | 0.8748 | | 0.009 | 14.0 | 8358 | 1.4193 | 0.8740 | 0.8754 | 0.8780 | 0.8740 | | 0.0042 | 15.0 | 8955 | 1.2999 | 0.8807 | 0.8800 | 0.8796 | 0.8807 | | 0.0028 | 16.0 | 9552 | 1.3428 | 0.8802 | 0.8805 | 0.8817 | 0.8802 | | 0.0029 | 17.0 | 10149 | 1.3959 | 0.8807 | 0.8807 | 0.8810 | 0.8807 | | 0.0022 | 18.0 | 10746 | 1.4149 | 0.8827 | 0.8823 | 0.8824 | 0.8827 | | 0.0037 | 19.0 | 11343 | 1.4078 | 0.8840 | 0.8838 | 0.8838 | 0.8840 | | 0.001 | 20.0 | 11940 | 1.4236 | 0.8823 | 0.8823 | 0.8825 | 0.8823 | ### Framework versions - Transformers 4.25.1 - Pytorch 1.13.0+cu116 - Datasets 2.8.0 - Tokenizers 0.13.2
[ "TEXT_CLASSIFICATION" ]
[ "BEAR" ]
KeyurRamoliya/multilingual-e5-large-instruct-GGUF
KeyurRamoliya
null
[ "sentence-transformers", "gguf", "mteb", "transformers", "llama-cpp", "gguf-my-repo", "multilingual", "af", "am", "ar", "as", "az", "be", "bg", "bn", "br", "bs", "ca", "cs", "cy", "da", "de", "el", "en", "eo", "es", "et", "eu", "fa", "fi", "fr", "fy", "ga", "gd", "gl", "gu", "ha", "he", "hi", "hr", "hu", "hy", "id", "is", "it", "ja", "jv", "ka", "kk", "km", "kn", "ko", "ku", "ky", "la", "lo", "lt", "lv", "mg", "mk", "ml", "mn", "mr", "ms", "my", "ne", "nl", "no", "om", "or", "pa", "pl", "ps", "pt", "ro", "ru", "sa", "sd", "si", "sk", "sl", "so", "sq", "sr", "su", "sv", "sw", "ta", "te", "th", "tl", "tr", "ug", "uk", "ur", "uz", "vi", "xh", "yi", "zh", "base_model:intfloat/multilingual-e5-large-instruct", "base_model:quantized:intfloat/multilingual-e5-large-instruct", "license:mit", "model-index", "endpoints_compatible", "region:us", "feature-extraction" ]
2024-08-23T05:29:29
2024-08-23T05:29:36
138
2
--- base_model: intfloat/multilingual-e5-large-instruct language: - multilingual - af - am - ar - as - az - be - bg - bn - br - bs - ca - cs - cy - da - de - el - en - eo - es - et - eu - fa - fi - fr - fy - ga - gd - gl - gu - ha - he - hi - hr - hu - hy - id - is - it - ja - jv - ka - kk - km - kn - ko - ku - ky - la - lo - lt - lv - mg - mk - ml - mn - mr - ms - my - ne - nl - 'no' - om - or - pa - pl - ps - pt - ro - ru - sa - sd - si - sk - sl - so - sq - sr - su - sv - sw - ta - te - th - tl - tr - ug - uk - ur - uz - vi - xh - yi - zh license: mit tags: - mteb - sentence-transformers - transformers - llama-cpp - gguf-my-repo model-index: - name: multilingual-e5-large-instruct results: - task: type: Classification dataset: name: MTEB AmazonCounterfactualClassification (en) type: mteb/amazon_counterfactual config: en split: test revision: e8379541af4e31359cca9fbcf4b00f2671dba205 metrics: - type: accuracy value: 76.23880597014924 - type: ap value: 39.07351965022687 - type: f1 value: 70.04836733862683 - task: type: Classification dataset: name: MTEB AmazonCounterfactualClassification (de) type: mteb/amazon_counterfactual config: de split: test revision: e8379541af4e31359cca9fbcf4b00f2671dba205 metrics: - type: accuracy value: 66.71306209850107 - type: ap value: 79.01499914759529 - type: f1 value: 64.81951817560703 - task: type: Classification dataset: name: MTEB AmazonCounterfactualClassification (en-ext) type: mteb/amazon_counterfactual config: en-ext split: test revision: e8379541af4e31359cca9fbcf4b00f2671dba205 metrics: - type: accuracy value: 73.85307346326837 - type: ap value: 22.447519885878737 - type: f1 value: 61.0162730745633 - task: type: Classification dataset: name: MTEB AmazonCounterfactualClassification (ja) type: mteb/amazon_counterfactual config: ja split: test revision: e8379541af4e31359cca9fbcf4b00f2671dba205 metrics: - type: accuracy value: 76.04925053533191 - type: ap value: 23.44983217128922 - type: f1 value: 62.5723230907759 - task: type: Classification dataset: name: MTEB AmazonPolarityClassification type: mteb/amazon_polarity config: default split: test revision: e2d317d38cd51312af73b3d32a06d1a08b442046 metrics: - type: accuracy value: 96.28742500000001 - type: ap value: 94.8449918887462 - type: f1 value: 96.28680923610432 - task: type: Classification dataset: name: MTEB AmazonReviewsClassification (en) type: mteb/amazon_reviews_multi config: en split: test revision: 1399c76144fd37290681b995c656ef9b2e06e26d metrics: - type: accuracy value: 56.716 - type: f1 value: 55.76510398266401 - task: type: Classification dataset: name: MTEB AmazonReviewsClassification (de) type: mteb/amazon_reviews_multi config: de split: test revision: 1399c76144fd37290681b995c656ef9b2e06e26d metrics: - type: accuracy value: 52.99999999999999 - type: f1 value: 52.00829994765178 - task: type: Classification dataset: name: MTEB AmazonReviewsClassification (es) type: mteb/amazon_reviews_multi config: es split: test revision: 1399c76144fd37290681b995c656ef9b2e06e26d metrics: - type: accuracy value: 48.806000000000004 - type: f1 value: 48.082345914983634 - task: type: Classification dataset: name: MTEB AmazonReviewsClassification (fr) type: mteb/amazon_reviews_multi config: fr split: test revision: 1399c76144fd37290681b995c656ef9b2e06e26d metrics: - type: accuracy value: 48.507999999999996 - type: f1 value: 47.68752844642045 - task: type: Classification dataset: name: MTEB AmazonReviewsClassification (ja) type: mteb/amazon_reviews_multi config: ja split: test revision: 1399c76144fd37290681b995c656ef9b2e06e26d metrics: - type: accuracy value: 47.709999999999994 - type: f1 value: 47.05870376637181 - task: type: Classification dataset: name: MTEB AmazonReviewsClassification (zh) type: mteb/amazon_reviews_multi config: zh split: test revision: 1399c76144fd37290681b995c656ef9b2e06e26d metrics: - type: accuracy value: 44.662000000000006 - type: f1 value: 43.42371965372771 - task: type: Retrieval dataset: name: MTEB ArguAna type: arguana config: default split: test revision: None metrics: - type: map_at_1 value: 31.721 - type: map_at_10 value: 49.221 - type: map_at_100 value: 49.884 - type: map_at_1000 value: 49.888 - type: map_at_3 value: 44.31 - type: map_at_5 value: 47.276 - type: mrr_at_1 value: 32.432 - type: mrr_at_10 value: 49.5 - type: mrr_at_100 value: 50.163000000000004 - type: mrr_at_1000 value: 50.166 - type: mrr_at_3 value: 44.618 - type: mrr_at_5 value: 47.541 - type: ndcg_at_1 value: 31.721 - type: ndcg_at_10 value: 58.384 - type: ndcg_at_100 value: 61.111000000000004 - type: ndcg_at_1000 value: 61.187999999999995 - type: ndcg_at_3 value: 48.386 - type: ndcg_at_5 value: 53.708999999999996 - type: precision_at_1 value: 31.721 - type: precision_at_10 value: 8.741 - type: precision_at_100 value: 0.991 - type: precision_at_1000 value: 0.1 - type: precision_at_3 value: 20.057 - type: precision_at_5 value: 14.609 - type: recall_at_1 value: 31.721 - type: recall_at_10 value: 87.411 - type: recall_at_100 value: 99.075 - type: recall_at_1000 value: 99.644 - type: recall_at_3 value: 60.171 - type: recall_at_5 value: 73.044 - task: type: Clustering dataset: name: MTEB ArxivClusteringP2P type: mteb/arxiv-clustering-p2p config: default split: test revision: a122ad7f3f0291bf49cc6f4d32aa80929df69d5d metrics: - type: v_measure value: 46.40419580759799 - task: type: Clustering dataset: name: MTEB ArxivClusteringS2S type: mteb/arxiv-clustering-s2s config: default split: test revision: f910caf1a6075f7329cdf8c1a6135696f37dbd53 metrics: - type: v_measure value: 40.48593255007969 - task: type: Reranking dataset: name: MTEB AskUbuntuDupQuestions type: mteb/askubuntudupquestions-reranking config: default split: test revision: 2000358ca161889fa9c082cb41daa8dcfb161a54 metrics: - type: map value: 63.889179122289995 - type: mrr value: 77.61146286769556 - task: type: STS dataset: name: MTEB BIOSSES type: mteb/biosses-sts config: default split: test revision: d3fb88f8f02e40887cd149695127462bbcf29b4a metrics: - type: cos_sim_pearson value: 88.15075203727929 - type: cos_sim_spearman value: 86.9622224570873 - type: euclidean_pearson value: 86.70473853624121 - type: euclidean_spearman value: 86.9622224570873 - type: manhattan_pearson value: 86.21089380980065 - type: manhattan_spearman value: 86.75318154937008 - task: type: BitextMining dataset: name: MTEB BUCC (de-en) type: mteb/bucc-bitext-mining config: de-en split: test revision: d51519689f32196a32af33b075a01d0e7c51e252 metrics: - type: accuracy value: 99.65553235908142 - type: f1 value: 99.60681976339595 - type: precision value: 99.58246346555325 - type: recall value: 99.65553235908142 - task: type: BitextMining dataset: name: MTEB BUCC (fr-en) type: mteb/bucc-bitext-mining config: fr-en split: test revision: d51519689f32196a32af33b075a01d0e7c51e252 metrics: - type: accuracy value: 99.26260180497468 - type: f1 value: 99.14520507740848 - type: precision value: 99.08650671362535 - type: recall value: 99.26260180497468 - task: type: BitextMining dataset: name: MTEB BUCC (ru-en) type: mteb/bucc-bitext-mining config: ru-en split: test revision: d51519689f32196a32af33b075a01d0e7c51e252 metrics: - type: accuracy value: 98.07412538967787 - type: f1 value: 97.86629719431936 - type: precision value: 97.76238309664012 - type: recall value: 98.07412538967787 - task: type: BitextMining dataset: name: MTEB BUCC (zh-en) type: mteb/bucc-bitext-mining config: zh-en split: test revision: d51519689f32196a32af33b075a01d0e7c51e252 metrics: - type: accuracy value: 99.42074776197998 - type: f1 value: 99.38564156573635 - type: precision value: 99.36808846761454 - type: recall value: 99.42074776197998 - task: type: Classification dataset: name: MTEB Banking77Classification type: mteb/banking77 config: default split: test revision: 0fd18e25b25c072e09e0d92ab615fda904d66300 metrics: - type: accuracy value: 85.73376623376623 - type: f1 value: 85.68480707214599 - task: type: Clustering dataset: name: MTEB BiorxivClusteringP2P type: mteb/biorxiv-clustering-p2p config: default split: test revision: 65b79d1d13f80053f67aca9498d9402c2d9f1f40 metrics: - type: v_measure value: 40.935218072113855 - task: type: Clustering dataset: name: MTEB BiorxivClusteringS2S type: mteb/biorxiv-clustering-s2s config: default split: test revision: 258694dd0231531bc1fd9de6ceb52a0853c6d908 metrics: - type: v_measure value: 36.276389017675264 - task: type: Retrieval dataset: name: MTEB CQADupstackRetrieval type: BeIR/cqadupstack config: default split: test revision: None metrics: - type: map_at_1 value: 27.764166666666668 - type: map_at_10 value: 37.298166666666674 - type: map_at_100 value: 38.530166666666666 - type: map_at_1000 value: 38.64416666666667 - type: map_at_3 value: 34.484833333333334 - type: map_at_5 value: 36.0385 - type: mrr_at_1 value: 32.93558333333333 - type: mrr_at_10 value: 41.589749999999995 - type: mrr_at_100 value: 42.425333333333334 - type: mrr_at_1000 value: 42.476333333333336 - type: mrr_at_3 value: 39.26825 - type: mrr_at_5 value: 40.567083333333336 - type: ndcg_at_1 value: 32.93558333333333 - type: ndcg_at_10 value: 42.706583333333334 - type: ndcg_at_100 value: 47.82483333333333 - type: ndcg_at_1000 value: 49.95733333333334 - type: ndcg_at_3 value: 38.064750000000004 - type: ndcg_at_5 value: 40.18158333333333 - type: precision_at_1 value: 32.93558333333333 - type: precision_at_10 value: 7.459833333333334 - type: precision_at_100 value: 1.1830833333333335 - type: precision_at_1000 value: 0.15608333333333332 - type: precision_at_3 value: 17.5235 - type: precision_at_5 value: 12.349833333333333 - type: recall_at_1 value: 27.764166666666668 - type: recall_at_10 value: 54.31775 - type: recall_at_100 value: 76.74350000000001 - type: recall_at_1000 value: 91.45208333333332 - type: recall_at_3 value: 41.23425 - type: recall_at_5 value: 46.73983333333334 - task: type: Retrieval dataset: name: MTEB ClimateFEVER type: climate-fever config: default split: test revision: None metrics: - type: map_at_1 value: 12.969 - type: map_at_10 value: 21.584999999999997 - type: map_at_100 value: 23.3 - type: map_at_1000 value: 23.5 - type: map_at_3 value: 18.218999999999998 - type: map_at_5 value: 19.983 - type: mrr_at_1 value: 29.316 - type: mrr_at_10 value: 40.033 - type: mrr_at_100 value: 40.96 - type: mrr_at_1000 value: 41.001 - type: mrr_at_3 value: 37.123 - type: mrr_at_5 value: 38.757999999999996 - type: ndcg_at_1 value: 29.316 - type: ndcg_at_10 value: 29.858 - type: ndcg_at_100 value: 36.756 - type: ndcg_at_1000 value: 40.245999999999995 - type: ndcg_at_3 value: 24.822 - type: ndcg_at_5 value: 26.565 - type: precision_at_1 value: 29.316 - type: precision_at_10 value: 9.186 - type: precision_at_100 value: 1.6549999999999998 - type: precision_at_1000 value: 0.22999999999999998 - type: precision_at_3 value: 18.436 - type: precision_at_5 value: 13.876 - type: recall_at_1 value: 12.969 - type: recall_at_10 value: 35.142 - type: recall_at_100 value: 59.143 - type: recall_at_1000 value: 78.594 - type: recall_at_3 value: 22.604 - type: recall_at_5 value: 27.883000000000003 - task: type: Retrieval dataset: name: MTEB DBPedia type: dbpedia-entity config: default split: test revision: None metrics: - type: map_at_1 value: 8.527999999999999 - type: map_at_10 value: 17.974999999999998 - type: map_at_100 value: 25.665 - type: map_at_1000 value: 27.406000000000002 - type: map_at_3 value: 13.017999999999999 - type: map_at_5 value: 15.137 - type: mrr_at_1 value: 62.5 - type: mrr_at_10 value: 71.891 - type: mrr_at_100 value: 72.294 - type: mrr_at_1000 value: 72.296 - type: mrr_at_3 value: 69.958 - type: mrr_at_5 value: 71.121 - type: ndcg_at_1 value: 50.875 - type: ndcg_at_10 value: 38.36 - type: ndcg_at_100 value: 44.235 - type: ndcg_at_1000 value: 52.154 - type: ndcg_at_3 value: 43.008 - type: ndcg_at_5 value: 40.083999999999996 - type: precision_at_1 value: 62.5 - type: precision_at_10 value: 30.0 - type: precision_at_100 value: 10.038 - type: precision_at_1000 value: 2.0869999999999997 - type: precision_at_3 value: 46.833000000000006 - type: precision_at_5 value: 38.800000000000004 - type: recall_at_1 value: 8.527999999999999 - type: recall_at_10 value: 23.828 - type: recall_at_100 value: 52.322 - type: recall_at_1000 value: 77.143 - type: recall_at_3 value: 14.136000000000001 - type: recall_at_5 value: 17.761 - task: type: Classification dataset: name: MTEB EmotionClassification type: mteb/emotion config: default split: test revision: 4f58c6b202a23cf9a4da393831edf4f9183cad37 metrics: - type: accuracy value: 51.51 - type: f1 value: 47.632159862049896 - task: type: Retrieval dataset: name: MTEB FEVER type: fever config: default split: test revision: None metrics: - type: map_at_1 value: 60.734 - type: map_at_10 value: 72.442 - type: map_at_100 value: 72.735 - type: map_at_1000 value: 72.75 - type: map_at_3 value: 70.41199999999999 - type: map_at_5 value: 71.80499999999999 - type: mrr_at_1 value: 65.212 - type: mrr_at_10 value: 76.613 - type: mrr_at_100 value: 76.79899999999999 - type: mrr_at_1000 value: 76.801 - type: mrr_at_3 value: 74.8 - type: mrr_at_5 value: 76.12400000000001 - type: ndcg_at_1 value: 65.212 - type: ndcg_at_10 value: 77.988 - type: ndcg_at_100 value: 79.167 - type: ndcg_at_1000 value: 79.452 - type: ndcg_at_3 value: 74.362 - type: ndcg_at_5 value: 76.666 - type: precision_at_1 value: 65.212 - type: precision_at_10 value: 10.003 - type: precision_at_100 value: 1.077 - type: precision_at_1000 value: 0.11199999999999999 - type: precision_at_3 value: 29.518 - type: precision_at_5 value: 19.016 - type: recall_at_1 value: 60.734 - type: recall_at_10 value: 90.824 - type: recall_at_100 value: 95.71600000000001 - type: recall_at_1000 value: 97.577 - type: recall_at_3 value: 81.243 - type: recall_at_5 value: 86.90299999999999 - task: type: Retrieval dataset: name: MTEB FiQA2018 type: fiqa config: default split: test revision: None metrics: - type: map_at_1 value: 23.845 - type: map_at_10 value: 39.281 - type: map_at_100 value: 41.422 - type: map_at_1000 value: 41.593 - type: map_at_3 value: 34.467 - type: map_at_5 value: 37.017 - type: mrr_at_1 value: 47.531 - type: mrr_at_10 value: 56.204 - type: mrr_at_100 value: 56.928999999999995 - type: mrr_at_1000 value: 56.962999999999994 - type: mrr_at_3 value: 54.115 - type: mrr_at_5 value: 55.373000000000005 - type: ndcg_at_1 value: 47.531 - type: ndcg_at_10 value: 47.711999999999996 - type: ndcg_at_100 value: 54.510999999999996 - type: ndcg_at_1000 value: 57.103 - type: ndcg_at_3 value: 44.145 - type: ndcg_at_5 value: 45.032 - type: precision_at_1 value: 47.531 - type: precision_at_10 value: 13.194 - type: precision_at_100 value: 2.045 - type: precision_at_1000 value: 0.249 - type: precision_at_3 value: 29.424 - type: precision_at_5 value: 21.451 - type: recall_at_1 value: 23.845 - type: recall_at_10 value: 54.967 - type: recall_at_100 value: 79.11399999999999 - type: recall_at_1000 value: 94.56700000000001 - type: recall_at_3 value: 40.256 - type: recall_at_5 value: 46.215 - task: type: Retrieval dataset: name: MTEB HotpotQA type: hotpotqa config: default split: test revision: None metrics: - type: map_at_1 value: 37.819 - type: map_at_10 value: 60.889 - type: map_at_100 value: 61.717999999999996 - type: map_at_1000 value: 61.778 - type: map_at_3 value: 57.254000000000005 - type: map_at_5 value: 59.541 - type: mrr_at_1 value: 75.638 - type: mrr_at_10 value: 82.173 - type: mrr_at_100 value: 82.362 - type: mrr_at_1000 value: 82.37 - type: mrr_at_3 value: 81.089 - type: mrr_at_5 value: 81.827 - type: ndcg_at_1 value: 75.638 - type: ndcg_at_10 value: 69.317 - type: ndcg_at_100 value: 72.221 - type: ndcg_at_1000 value: 73.382 - type: ndcg_at_3 value: 64.14 - type: ndcg_at_5 value: 67.07600000000001 - type: precision_at_1 value: 75.638 - type: precision_at_10 value: 14.704999999999998 - type: precision_at_100 value: 1.698 - type: precision_at_1000 value: 0.185 - type: precision_at_3 value: 41.394999999999996 - type: precision_at_5 value: 27.162999999999997 - type: recall_at_1 value: 37.819 - type: recall_at_10 value: 73.52499999999999 - type: recall_at_100 value: 84.875 - type: recall_at_1000 value: 92.559 - type: recall_at_3 value: 62.092999999999996 - type: recall_at_5 value: 67.907 - task: type: Classification dataset: name: MTEB ImdbClassification type: mteb/imdb config: default split: test revision: 3d86128a09e091d6018b6d26cad27f2739fc2db7 metrics: - type: accuracy value: 94.60079999999999 - type: ap value: 92.67396345347356 - type: f1 value: 94.5988098167121 - task: type: Retrieval dataset: name: MTEB MSMARCO type: msmarco config: default split: dev revision: None metrics: - type: map_at_1 value: 21.285 - type: map_at_10 value: 33.436 - type: map_at_100 value: 34.63 - type: map_at_1000 value: 34.681 - type: map_at_3 value: 29.412 - type: map_at_5 value: 31.715 - type: mrr_at_1 value: 21.848 - type: mrr_at_10 value: 33.979 - type: mrr_at_100 value: 35.118 - type: mrr_at_1000 value: 35.162 - type: mrr_at_3 value: 30.036 - type: mrr_at_5 value: 32.298 - type: ndcg_at_1 value: 21.862000000000002 - type: ndcg_at_10 value: 40.43 - type: ndcg_at_100 value: 46.17 - type: ndcg_at_1000 value: 47.412 - type: ndcg_at_3 value: 32.221 - type: ndcg_at_5 value: 36.332 - type: precision_at_1 value: 21.862000000000002 - type: precision_at_10 value: 6.491 - type: precision_at_100 value: 0.935 - type: precision_at_1000 value: 0.104 - type: precision_at_3 value: 13.744 - type: precision_at_5 value: 10.331999999999999 - type: recall_at_1 value: 21.285 - type: recall_at_10 value: 62.083 - type: recall_at_100 value: 88.576 - type: recall_at_1000 value: 98.006 - type: recall_at_3 value: 39.729 - type: recall_at_5 value: 49.608000000000004 - task: type: Classification dataset: name: MTEB MTOPDomainClassification (en) type: mteb/mtop_domain config: en split: test revision: d80d48c1eb48d3562165c59d59d0034df9fff0bf metrics: - type: accuracy value: 93.92612859097127 - type: f1 value: 93.82370333372853 - task: type: Classification dataset: name: MTEB MTOPDomainClassification (de) type: mteb/mtop_domain config: de split: test revision: d80d48c1eb48d3562165c59d59d0034df9fff0bf metrics: - type: accuracy value: 92.67681036911807 - type: f1 value: 92.14191382411472 - task: type: Classification dataset: name: MTEB MTOPDomainClassification (es) type: mteb/mtop_domain config: es split: test revision: d80d48c1eb48d3562165c59d59d0034df9fff0bf metrics: - type: accuracy value: 92.26817878585723 - type: f1 value: 91.92824250337878 - task: type: Classification dataset: name: MTEB MTOPDomainClassification (fr) type: mteb/mtop_domain config: fr split: test revision: d80d48c1eb48d3562165c59d59d0034df9fff0bf metrics: - type: accuracy value: 89.96554963983714 - type: f1 value: 90.02859329630792 - task: type: Classification dataset: name: MTEB MTOPDomainClassification (hi) type: mteb/mtop_domain config: hi split: test revision: d80d48c1eb48d3562165c59d59d0034df9fff0bf metrics: - type: accuracy value: 90.02509860164935 - type: f1 value: 89.30665159182062 - task: type: Classification dataset: name: MTEB MTOPDomainClassification (th) type: mteb/mtop_domain config: th split: test revision: d80d48c1eb48d3562165c59d59d0034df9fff0bf metrics: - type: accuracy value: 87.55515370705244 - type: f1 value: 87.94449232331907 - task: type: Classification dataset: name: MTEB MTOPIntentClassification (en) type: mteb/mtop_intent config: en split: test revision: ae001d0e6b1228650b7bd1c2c65fb50ad11a8aba metrics: - type: accuracy value: 82.4623803009576 - type: f1 value: 66.06738378772725 - task: type: Classification dataset: name: MTEB MTOPIntentClassification (de) type: mteb/mtop_intent config: de split: test revision: ae001d0e6b1228650b7bd1c2c65fb50ad11a8aba metrics: - type: accuracy value: 79.3716539870386 - type: f1 value: 60.37614033396853 - task: type: Classification dataset: name: MTEB MTOPIntentClassification (es) type: mteb/mtop_intent config: es split: test revision: ae001d0e6b1228650b7bd1c2c65fb50ad11a8aba metrics: - type: accuracy value: 80.34022681787857 - type: f1 value: 58.302008026952 - task: type: Classification dataset: name: MTEB MTOPIntentClassification (fr) type: mteb/mtop_intent config: fr split: test revision: ae001d0e6b1228650b7bd1c2c65fb50ad11a8aba metrics: - type: accuracy value: 76.72095208268087 - type: f1 value: 59.64524724009049 - task: type: Classification dataset: name: MTEB MTOPIntentClassification (hi) type: mteb/mtop_intent config: hi split: test revision: ae001d0e6b1228650b7bd1c2c65fb50ad11a8aba metrics: - type: accuracy value: 77.87020437432773 - type: f1 value: 57.80202694670567 - task: type: Classification dataset: name: MTEB MTOPIntentClassification (th) type: mteb/mtop_intent config: th split: test revision: ae001d0e6b1228650b7bd1c2c65fb50ad11a8aba metrics: - type: accuracy value: 77.73598553345387 - type: f1 value: 58.19628250675031 - task: type: Classification dataset: name: MTEB MassiveIntentClassification (af) type: mteb/amazon_massive_intent config: af split: test revision: 31efe3c427b0bae9c22cbb560b8f15491cc6bed7 metrics: - type: accuracy value: 67.6630800268998 - type: f1 value: 65.00996668051691 - task: type: Classification dataset: name: MTEB MassiveIntentClassification (am) type: mteb/amazon_massive_intent config: am split: test revision: 31efe3c427b0bae9c22cbb560b8f15491cc6bed7 metrics: - type: accuracy value: 60.7128446536651 - type: f1 value: 57.95860594874963 - task: type: Classification dataset: name: MTEB MassiveIntentClassification (ar) type: mteb/amazon_massive_intent config: ar split: test revision: 31efe3c427b0bae9c22cbb560b8f15491cc6bed7 metrics: - type: accuracy value: 63.61129791526563 - type: f1 value: 59.75328290206483 - task: type: Classification dataset: name: MTEB MassiveIntentClassification (az) type: mteb/amazon_massive_intent config: az split: test revision: 31efe3c427b0bae9c22cbb560b8f15491cc6bed7 metrics: - type: accuracy value: 69.00134498991257 - type: f1 value: 67.0230483991802 - task: type: Classification dataset: name: MTEB MassiveIntentClassification (bn) type: mteb/amazon_massive_intent config: bn split: test revision: 31efe3c427b0bae9c22cbb560b8f15491cc6bed7 metrics: - type: accuracy value: 68.54068594485541 - type: f1 value: 65.54604628946976 - task: type: Classification dataset: name: MTEB MassiveIntentClassification (cy) type: mteb/amazon_massive_intent config: cy split: test revision: 31efe3c427b0bae9c22cbb560b8f15491cc6bed7 metrics: - type: accuracy value: 63.032952252858095 - type: f1 value: 58.715741857057104 - task: type: Classification dataset: name: MTEB MassiveIntentClassification (da) type: mteb/amazon_massive_intent config: da split: test revision: 31efe3c427b0bae9c22cbb560b8f15491cc6bed7 metrics: - type: accuracy value: 71.80901143241427 - type: f1 value: 68.33963989243877 - task: type: Classification dataset: name: MTEB MassiveIntentClassification (de) type: mteb/amazon_massive_intent config: de split: test revision: 31efe3c427b0bae9c22cbb560b8f15491cc6bed7 metrics: - type: accuracy value: 72.47141896435777 - type: f1 value: 69.56765020308262 - task: type: Classification dataset: name: MTEB MassiveIntentClassification (el) type: mteb/amazon_massive_intent config: el split: test revision: 31efe3c427b0bae9c22cbb560b8f15491cc6bed7 metrics: - type: accuracy value: 71.2373907195696 - type: f1 value: 69.04529836036467 - task: type: Classification dataset: name: MTEB MassiveIntentClassification (en) type: mteb/amazon_massive_intent config: en split: test revision: 31efe3c427b0bae9c22cbb560b8f15491cc6bed7 metrics: - type: accuracy value: 77.05783456624076 - type: f1 value: 74.69430584708174 - task: type: Classification dataset: name: MTEB MassiveIntentClassification (es) type: mteb/amazon_massive_intent config: es split: test revision: 31efe3c427b0bae9c22cbb560b8f15491cc6bed7 metrics: - type: accuracy value: 72.82111634162744 - type: f1 value: 70.77228952803762 - task: type: Classification dataset: name: MTEB MassiveIntentClassification (fa) type: mteb/amazon_massive_intent config: fa split: test revision: 31efe3c427b0bae9c22cbb560b8f15491cc6bed7 metrics: - type: accuracy value: 74.25353059852051 - type: f1 value: 71.05310103416411 - task: type: Classification dataset: name: MTEB MassiveIntentClassification (fi) type: mteb/amazon_massive_intent config: fi split: test revision: 31efe3c427b0bae9c22cbb560b8f15491cc6bed7 metrics: - type: accuracy value: 72.28648285137861 - type: f1 value: 69.08020473732226 - task: type: Classification dataset: name: MTEB MassiveIntentClassification (fr) type: mteb/amazon_massive_intent config: fr split: test revision: 31efe3c427b0bae9c22cbb560b8f15491cc6bed7 metrics: - type: accuracy value: 73.31540013449899 - type: f1 value: 70.9426355465791 - task: type: Classification dataset: name: MTEB MassiveIntentClassification (he) type: mteb/amazon_massive_intent config: he split: test revision: 31efe3c427b0bae9c22cbb560b8f15491cc6bed7 metrics: - type: accuracy value: 70.2151983860121 - type: f1 value: 67.52541755908858 - task: type: Classification dataset: name: MTEB MassiveIntentClassification (hi) type: mteb/amazon_massive_intent config: hi split: test revision: 31efe3c427b0bae9c22cbb560b8f15491cc6bed7 metrics: - type: accuracy value: 71.58372562205784 - type: f1 value: 69.49769064229827 - task: type: Classification dataset: name: MTEB MassiveIntentClassification (hu) type: mteb/amazon_massive_intent config: hu split: test revision: 31efe3c427b0bae9c22cbb560b8f15491cc6bed7 metrics: - type: accuracy value: 71.9233355749832 - type: f1 value: 69.36311548259593 - task: type: Classification dataset: name: MTEB MassiveIntentClassification (hy) type: mteb/amazon_massive_intent config: hy split: test revision: 31efe3c427b0bae9c22cbb560b8f15491cc6bed7 metrics: - type: accuracy value: 68.07330195023538 - type: f1 value: 64.99882022345572 - task: type: Classification dataset: name: MTEB MassiveIntentClassification (id) type: mteb/amazon_massive_intent config: id split: test revision: 31efe3c427b0bae9c22cbb560b8f15491cc6bed7 metrics: - type: accuracy value: 72.62273032952253 - type: f1 value: 70.6394885471001 - task: type: Classification dataset: name: MTEB MassiveIntentClassification (is) type: mteb/amazon_massive_intent config: is split: test revision: 31efe3c427b0bae9c22cbb560b8f15491cc6bed7 metrics: - type: accuracy value: 65.77000672494957 - type: f1 value: 62.9368944815065 - task: type: Classification dataset: name: MTEB MassiveIntentClassification (it) type: mteb/amazon_massive_intent config: it split: test revision: 31efe3c427b0bae9c22cbb560b8f15491cc6bed7 metrics: - type: accuracy value: 73.453261600538 - type: f1 value: 70.85069934666681 - task: type: Classification dataset: name: MTEB MassiveIntentClassification (ja) type: mteb/amazon_massive_intent config: ja split: test revision: 31efe3c427b0bae9c22cbb560b8f15491cc6bed7 metrics: - type: accuracy value: 74.6906523201076 - type: f1 value: 72.03249740074217 - task: type: Classification dataset: name: MTEB MassiveIntentClassification (jv) type: mteb/amazon_massive_intent config: jv split: test revision: 31efe3c427b0bae9c22cbb560b8f15491cc6bed7 metrics: - type: accuracy value: 63.03631472763953 - type: f1 value: 59.3165215571852 - task: type: Classification dataset: name: MTEB MassiveIntentClassification (ka) type: mteb/amazon_massive_intent config: ka split: test revision: 31efe3c427b0bae9c22cbb560b8f15491cc6bed7 metrics: - type: accuracy value: 58.913920645595155 - type: f1 value: 57.367337711611285 - task: type: Classification dataset: name: MTEB MassiveIntentClassification (km) type: mteb/amazon_massive_intent config: km split: test revision: 31efe3c427b0bae9c22cbb560b8f15491cc6bed7 metrics: - type: accuracy value: 54.42837928715535 - type: f1 value: 52.60527294970906 - task: type: Classification dataset: name: MTEB MassiveIntentClassification (kn) type: mteb/amazon_massive_intent config: kn split: test revision: 31efe3c427b0bae9c22cbb560b8f15491cc6bed7 metrics: - type: accuracy value: 66.33490248823135 - type: f1 value: 63.213340969404065 - task: type: Classification dataset: name: MTEB MassiveIntentClassification (ko) type: mteb/amazon_massive_intent config: ko split: test revision: 31efe3c427b0bae9c22cbb560b8f15491cc6bed7 metrics: - type: accuracy value: 70.58507061197041 - type: f1 value: 68.40256628040486 - task: type: Classification dataset: name: MTEB MassiveIntentClassification (lv) type: mteb/amazon_massive_intent config: lv split: test revision: 31efe3c427b0bae9c22cbb560b8f15491cc6bed7 metrics: - type: accuracy value: 69.11230665770006 - type: f1 value: 66.44863577842305 - task: type: Classification dataset: name: MTEB MassiveIntentClassification (ml) type: mteb/amazon_massive_intent config: ml split: test revision: 31efe3c427b0bae9c22cbb560b8f15491cc6bed7 metrics: - type: accuracy value: 69.70073974445192 - type: f1 value: 67.21291337273702 - task: type: Classification dataset: name: MTEB MassiveIntentClassification (mn) type: mteb/amazon_massive_intent config: mn split: test revision: 31efe3c427b0bae9c22cbb560b8f15491cc6bed7 metrics: - type: accuracy value: 66.43913920645595 - type: f1 value: 64.09838087422806 - task: type: Classification dataset: name: MTEB MassiveIntentClassification (ms) type: mteb/amazon_massive_intent config: ms split: test revision: 31efe3c427b0bae9c22cbb560b8f15491cc6bed7 metrics: - type: accuracy value: 70.80026899798251 - type: f1 value: 68.76986742962444 - task: type: Classification dataset: name: MTEB MassiveIntentClassification (my) type: mteb/amazon_massive_intent config: my split: test revision: 31efe3c427b0bae9c22cbb560b8f15491cc6bed7 metrics: - type: accuracy value: 64.78816408876934 - type: f1 value: 62.18781873428972 - task: type: Classification dataset: name: MTEB MassiveIntentClassification (nb) type: mteb/amazon_massive_intent config: nb split: test revision: 31efe3c427b0bae9c22cbb560b8f15491cc6bed7 metrics: - type: accuracy value: 71.6577000672495 - type: f1 value: 68.75171511133003 - task: type: Classification dataset: name: MTEB MassiveIntentClassification (nl) type: mteb/amazon_massive_intent config: nl split: test revision: 31efe3c427b0bae9c22cbb560b8f15491cc6bed7 metrics: - type: accuracy value: 74.42501681237391 - type: f1 value: 71.18434963451544 - task: type: Classification dataset: name: MTEB MassiveIntentClassification (pl) type: mteb/amazon_massive_intent config: pl split: test revision: 31efe3c427b0bae9c22cbb560b8f15491cc6bed7 metrics: - type: accuracy value: 73.64828513786146 - type: f1 value: 70.67741914007422 - task: type: Classification dataset: name: MTEB MassiveIntentClassification (pt) type: mteb/amazon_massive_intent config: pt split: test revision: 31efe3c427b0bae9c22cbb560b8f15491cc6bed7 metrics: - type: accuracy value: 73.62811028917284 - type: f1 value: 71.36402039740959 - task: type: Classification dataset: name: MTEB MassiveIntentClassification (ro) type: mteb/amazon_massive_intent config: ro split: test revision: 31efe3c427b0bae9c22cbb560b8f15491cc6bed7 metrics: - type: accuracy value: 71.88634835238736 - type: f1 value: 69.23701923480677 - task: type: Classification dataset: name: MTEB MassiveIntentClassification (ru) type: mteb/amazon_massive_intent config: ru split: test revision: 31efe3c427b0bae9c22cbb560b8f15491cc6bed7 metrics: - type: accuracy value: 74.15938130464022 - type: f1 value: 71.87792218993388 - task: type: Classification dataset: name: MTEB MassiveIntentClassification (sl) type: mteb/amazon_massive_intent config: sl split: test revision: 31efe3c427b0bae9c22cbb560b8f15491cc6bed7 metrics: - type: accuracy value: 69.96301277740416 - type: f1 value: 67.29584200202983 - task: type: Classification dataset: name: MTEB MassiveIntentClassification (sq) type: mteb/amazon_massive_intent config: sq split: test revision: 31efe3c427b0bae9c22cbb560b8f15491cc6bed7 metrics: - type: accuracy value: 69.49562878278412 - type: f1 value: 66.91716685679431 - task: type: Classification dataset: name: MTEB MassiveIntentClassification (sv) type: mteb/amazon_massive_intent config: sv split: test revision: 31efe3c427b0bae9c22cbb560b8f15491cc6bed7 metrics: - type: accuracy value: 74.6805648957633 - type: f1 value: 72.02723592594374 - task: type: Classification dataset: name: MTEB MassiveIntentClassification (sw) type: mteb/amazon_massive_intent config: sw split: test revision: 31efe3c427b0bae9c22cbb560b8f15491cc6bed7 metrics: - type: accuracy value: 63.00605245460659 - type: f1 value: 60.16716669482932 - task: type: Classification dataset: name: MTEB MassiveIntentClassification (ta) type: mteb/amazon_massive_intent config: ta split: test revision: 31efe3c427b0bae9c22cbb560b8f15491cc6bed7 metrics: - type: accuracy value: 66.90988567585742 - type: f1 value: 63.99405488777784 - task: type: Classification dataset: name: MTEB MassiveIntentClassification (te) type: mteb/amazon_massive_intent config: te split: test revision: 31efe3c427b0bae9c22cbb560b8f15491cc6bed7 metrics: - type: accuracy value: 67.62273032952253 - type: f1 value: 65.17213906909481 - task: type: Classification dataset: name: MTEB MassiveIntentClassification (th) type: mteb/amazon_massive_intent config: th split: test revision: 31efe3c427b0bae9c22cbb560b8f15491cc6bed7 metrics: - type: accuracy value: 69.50907868190988 - type: f1 value: 69.15165697194853 - task: type: Classification dataset: name: MTEB MassiveIntentClassification (tl) type: mteb/amazon_massive_intent config: tl split: test revision: 31efe3c427b0bae9c22cbb560b8f15491cc6bed7 metrics: - type: accuracy value: 69.30733019502352 - type: f1 value: 66.69024007380474 - task: type: Classification dataset: name: MTEB MassiveIntentClassification (tr) type: mteb/amazon_massive_intent config: tr split: test revision: 31efe3c427b0bae9c22cbb560b8f15491cc6bed7 metrics: - type: accuracy value: 72.24277067921989 - type: f1 value: 68.80515408492947 - task: type: Classification dataset: name: MTEB MassiveIntentClassification (ur) type: mteb/amazon_massive_intent config: ur split: test revision: 31efe3c427b0bae9c22cbb560b8f15491cc6bed7 metrics: - type: accuracy value: 67.49831876260929 - type: f1 value: 64.83778567111116 - task: type: Classification dataset: name: MTEB MassiveIntentClassification (vi) type: mteb/amazon_massive_intent config: vi split: test revision: 31efe3c427b0bae9c22cbb560b8f15491cc6bed7 metrics: - type: accuracy value: 71.28782784129119 - type: f1 value: 69.3294186700733 - task: type: Classification dataset: name: MTEB MassiveIntentClassification (zh-CN) type: mteb/amazon_massive_intent config: zh-CN split: test revision: 31efe3c427b0bae9c22cbb560b8f15491cc6bed7 metrics: - type: accuracy value: 73.315400134499 - type: f1 value: 71.22674385243207 - task: type: Classification dataset: name: MTEB MassiveIntentClassification (zh-TW) type: mteb/amazon_massive_intent config: zh-TW split: test revision: 31efe3c427b0bae9c22cbb560b8f15491cc6bed7 metrics: - type: accuracy value: 69.37794216543377 - type: f1 value: 68.96962492838232 - task: type: Classification dataset: name: MTEB MassiveScenarioClassification (af) type: mteb/amazon_massive_scenario config: af split: test revision: 7d571f92784cd94a019292a1f45445077d0ef634 metrics: - type: accuracy value: 73.33557498318764 - type: f1 value: 72.28949738478356 - task: type: Classification dataset: name: MTEB MassiveScenarioClassification (am) type: mteb/amazon_massive_scenario config: am split: test revision: 7d571f92784cd94a019292a1f45445077d0ef634 metrics: - type: accuracy value: 65.84398117014123 - type: f1 value: 64.71026362091463 - task: type: Classification dataset: name: MTEB MassiveScenarioClassification (ar) type: mteb/amazon_massive_scenario config: ar split: test revision: 7d571f92784cd94a019292a1f45445077d0ef634 metrics: - type: accuracy value: 69.76462676529925 - type: f1 value: 69.8229667407667 - task: type: Classification dataset: name: MTEB MassiveScenarioClassification (az) type: mteb/amazon_massive_scenario config: az split: test revision: 7d571f92784cd94a019292a1f45445077d0ef634 metrics: - type: accuracy value: 72.02420981842636 - type: f1 value: 71.76576384895898 - task: type: Classification dataset: name: MTEB MassiveScenarioClassification (bn) type: mteb/amazon_massive_scenario config: bn split: test revision: 7d571f92784cd94a019292a1f45445077d0ef634 metrics: - type: accuracy value: 72.7572293207801 - type: f1 value: 72.76840765295256 - task: type: Classification dataset: name: MTEB MassiveScenarioClassification (cy) type: mteb/amazon_massive_scenario config: cy split: test revision: 7d571f92784cd94a019292a1f45445077d0ef634 metrics: - type: accuracy value: 68.02286482851379 - type: f1 value: 66.17237947327872 - task: type: Classification dataset: name: MTEB MassiveScenarioClassification (da) type: mteb/amazon_massive_scenario config: da split: test revision: 7d571f92784cd94a019292a1f45445077d0ef634 metrics: - type: accuracy value: 77.60928043039678 - type: f1 value: 77.27094731234773 - task: type: Classification dataset: name: MTEB MassiveScenarioClassification (de) type: mteb/amazon_massive_scenario config: de split: test revision: 7d571f92784cd94a019292a1f45445077d0ef634 metrics: - type: accuracy value: 77.68325487558843 - type: f1 value: 77.97530399082261 - task: type: Classification dataset: name: MTEB MassiveScenarioClassification (el) type: mteb/amazon_massive_scenario config: el split: test revision: 7d571f92784cd94a019292a1f45445077d0ef634 metrics: - type: accuracy value: 76.13315400134498 - type: f1 value: 75.97558584796424 - task: type: Classification dataset: name: MTEB MassiveScenarioClassification (en) type: mteb/amazon_massive_scenario config: en split: test revision: 7d571f92784cd94a019292a1f45445077d0ef634 metrics: - type: accuracy value: 80.47410894418292 - type: f1 value: 80.52244841473792 - task: type: Classification dataset: name: MTEB MassiveScenarioClassification (es) type: mteb/amazon_massive_scenario config: es split: test revision: 7d571f92784cd94a019292a1f45445077d0ef634 metrics: - type: accuracy value: 76.9670477471419 - type: f1 value: 77.37318805793146 - task: type: Classification dataset: name: MTEB MassiveScenarioClassification (fa) type: mteb/amazon_massive_scenario config: fa split: test revision: 7d571f92784cd94a019292a1f45445077d0ef634 metrics: - type: accuracy value: 78.09683927370544 - type: f1 value: 77.69773737430847 - task: type: Classification dataset: name: MTEB MassiveScenarioClassification (fi) type: mteb/amazon_massive_scenario config: fi split: test revision: 7d571f92784cd94a019292a1f45445077d0ef634 metrics: - type: accuracy value: 75.20847343644922 - type: f1 value: 75.17071738727348 - task: type: Classification dataset: name: MTEB MassiveScenarioClassification (fr) type: mteb/amazon_massive_scenario config: fr split: test revision: 7d571f92784cd94a019292a1f45445077d0ef634 metrics: - type: accuracy value: 77.07464694014796 - type: f1 value: 77.16136207698571 - task: type: Classification dataset: name: MTEB MassiveScenarioClassification (he) type: mteb/amazon_massive_scenario config: he split: test revision: 7d571f92784cd94a019292a1f45445077d0ef634 metrics: - type: accuracy value: 73.53396099529255 - type: f1 value: 73.58296404484122 - task: type: Classification dataset: name: MTEB MassiveScenarioClassification (hi) type: mteb/amazon_massive_scenario config: hi split: test revision: 7d571f92784cd94a019292a1f45445077d0ef634 metrics: - type: accuracy value: 75.75319435104237 - type: f1 value: 75.24674707850833 - task: type: Classification dataset: name: MTEB MassiveScenarioClassification (hu) type: mteb/amazon_massive_scenario config: hu split: test revision: 7d571f92784cd94a019292a1f45445077d0ef634 metrics: - type: accuracy value: 77.0948217888366 - type: f1 value: 76.47559490205028 - task: type: Classification dataset: name: MTEB MassiveScenarioClassification (hy) type: mteb/amazon_massive_scenario config: hy split: test revision: 7d571f92784cd94a019292a1f45445077d0ef634 metrics: - type: accuracy value: 71.07599193006052 - type: f1 value: 70.76028043093511 - task: type: Classification dataset: name: MTEB MassiveScenarioClassification (id) type: mteb/amazon_massive_scenario config: id split: test revision: 7d571f92784cd94a019292a1f45445077d0ef634 metrics: - type: accuracy value: 77.10490921318089 - type: f1 value: 77.01215275283272 - task: type: Classification dataset: name: MTEB MassiveScenarioClassification (is) type: mteb/amazon_massive_scenario config: is split: test revision: 7d571f92784cd94a019292a1f45445077d0ef634 metrics: - type: accuracy value: 71.25756556825824 - type: f1 value: 70.20605314648762 - task: type: Classification dataset: name: MTEB MassiveScenarioClassification (it) type: mteb/amazon_massive_scenario config: it split: test revision: 7d571f92784cd94a019292a1f45445077d0ef634 metrics: - type: accuracy value: 77.08137188971082 - type: f1 value: 77.3899269057439 - task: type: Classification dataset: name: MTEB MassiveScenarioClassification (ja) type: mteb/amazon_massive_scenario config: ja split: test revision: 7d571f92784cd94a019292a1f45445077d0ef634 metrics: - type: accuracy value: 79.35440484196369 - type: f1 value: 79.58964690002772 - task: type: Classification dataset: name: MTEB MassiveScenarioClassification (jv) type: mteb/amazon_massive_scenario config: jv split: test revision: 7d571f92784cd94a019292a1f45445077d0ef634 metrics: - type: accuracy value: 68.42299932750504 - type: f1 value: 68.07844356925413 - task: type: Classification dataset: name: MTEB MassiveScenarioClassification (ka) type: mteb/amazon_massive_scenario config: ka split: test revision: 7d571f92784cd94a019292a1f45445077d0ef634 metrics: - type: accuracy value: 66.15669132481507 - type: f1 value: 65.89383352608513 - task: type: Classification dataset: name: MTEB MassiveScenarioClassification (km) type: mteb/amazon_massive_scenario config: km split: test revision: 7d571f92784cd94a019292a1f45445077d0ef634 metrics: - type: accuracy value: 60.11432414256894 - type: f1 value: 57.69910594559806 - task: type: Classification dataset: name: MTEB MassiveScenarioClassification (kn) type: mteb/amazon_massive_scenario config: kn split: test revision: 7d571f92784cd94a019292a1f45445077d0ef634 metrics: - type: accuracy value: 71.24747814391392 - type: f1 value: 70.42455553830918 - task: type: Classification dataset: name: MTEB MassiveScenarioClassification (ko) type: mteb/amazon_massive_scenario config: ko split: test revision: 7d571f92784cd94a019292a1f45445077d0ef634 metrics: - type: accuracy value: 76.46267652992603 - type: f1 value: 76.8854559308316 - task: type: Classification dataset: name: MTEB MassiveScenarioClassification (lv) type: mteb/amazon_massive_scenario config: lv split: test revision: 7d571f92784cd94a019292a1f45445077d0ef634 metrics: - type: accuracy value: 73.24815063887021 - type: f1 value: 72.77805034658074 - task: type: Classification dataset: name: MTEB MassiveScenarioClassification (ml) type: mteb/amazon_massive_scenario config: ml split: test revision: 7d571f92784cd94a019292a1f45445077d0ef634 metrics: - type: accuracy value: 74.11566913248151 - type: f1 value: 73.86147988001356 - task: type: Classification dataset: name: MTEB MassiveScenarioClassification (mn) type: mteb/amazon_massive_scenario config: mn split: test revision: 7d571f92784cd94a019292a1f45445077d0ef634 metrics: - type: accuracy value: 70.0168123739072 - type: f1 value: 69.38515920054571 - task: type: Classification dataset: name: MTEB MassiveScenarioClassification (ms) type: mteb/amazon_massive_scenario config: ms split: test revision: 7d571f92784cd94a019292a1f45445077d0ef634 metrics: - type: accuracy value: 74.41156691324814 - type: f1 value: 73.43474953408237 - task: type: Classification dataset: name: MTEB MassiveScenarioClassification (my) type: mteb/amazon_massive_scenario config: my split: test revision: 7d571f92784cd94a019292a1f45445077d0ef634 metrics: - type: accuracy value: 68.39609952925353 - type: f1 value: 67.29731681109291 - task: type: Classification dataset: name: MTEB MassiveScenarioClassification (nb) type: mteb/amazon_massive_scenario config: nb split: test revision: 7d571f92784cd94a019292a1f45445077d0ef634 metrics: - type: accuracy value: 77.20914593140552 - type: f1 value: 77.07066497935367 - task: type: Classification dataset: name: MTEB MassiveScenarioClassification (nl) type: mteb/amazon_massive_scenario config: nl split: test revision: 7d571f92784cd94a019292a1f45445077d0ef634 metrics: - type: accuracy value: 78.52387357094821 - type: f1 value: 78.5259569473291 - task: type: Classification dataset: name: MTEB MassiveScenarioClassification (pl) type: mteb/amazon_massive_scenario config: pl split: test revision: 7d571f92784cd94a019292a1f45445077d0ef634 metrics: - type: accuracy value: 76.6913248150639 - type: f1 value: 76.91201656350455 - task: type: Classification dataset: name: MTEB MassiveScenarioClassification (pt) type: mteb/amazon_massive_scenario config: pt split: test revision: 7d571f92784cd94a019292a1f45445077d0ef634 metrics: - type: accuracy value: 77.1217215870881 - type: f1 value: 77.41179937912504 - task: type: Classification dataset: name: MTEB MassiveScenarioClassification (ro) type: mteb/amazon_massive_scenario config: ro split: test revision: 7d571f92784cd94a019292a1f45445077d0ef634 metrics: - type: accuracy value: 75.25891055817083 - type: f1 value: 75.8089244542887 - task: type: Classification dataset: name: MTEB MassiveScenarioClassification (ru) type: mteb/amazon_massive_scenario config: ru split: test revision: 7d571f92784cd94a019292a1f45445077d0ef634 metrics: - type: accuracy value: 77.70679219905851 - type: f1 value: 78.21459594517711 - task: type: Classification dataset: name: MTEB MassiveScenarioClassification (sl) type: mteb/amazon_massive_scenario config: sl split: test revision: 7d571f92784cd94a019292a1f45445077d0ef634 metrics: - type: accuracy value: 74.83523873570948 - type: f1 value: 74.86847028401978 - task: type: Classification dataset: name: MTEB MassiveScenarioClassification (sq) type: mteb/amazon_massive_scenario config: sq split: test revision: 7d571f92784cd94a019292a1f45445077d0ef634 metrics: - type: accuracy value: 74.71755211835911 - type: f1 value: 74.0214326485662 - task: type: Classification dataset: name: MTEB MassiveScenarioClassification (sv) type: mteb/amazon_massive_scenario config: sv split: test revision: 7d571f92784cd94a019292a1f45445077d0ef634 metrics: - type: accuracy value: 79.06523201075991 - type: f1 value: 79.10545620325138 - task: type: Classification dataset: name: MTEB MassiveScenarioClassification (sw) type: mteb/amazon_massive_scenario config: sw split: test revision: 7d571f92784cd94a019292a1f45445077d0ef634 metrics: - type: accuracy value: 67.91862811028918 - type: f1 value: 66.50386121217983 - task: type: Classification dataset: name: MTEB MassiveScenarioClassification (ta) type: mteb/amazon_massive_scenario config: ta split: test revision: 7d571f92784cd94a019292a1f45445077d0ef634 metrics: - type: accuracy value: 70.93140551445865 - type: f1 value: 70.755435928495 - task: type: Classification dataset: name: MTEB MassiveScenarioClassification (te) type: mteb/amazon_massive_scenario config: te split: test revision: 7d571f92784cd94a019292a1f45445077d0ef634 metrics: - type: accuracy value: 72.40753194351042 - type: f1 value: 71.61816115782923 - task: type: Classification dataset: name: MTEB MassiveScenarioClassification (th) type: mteb/amazon_massive_scenario config: th split: test revision: 7d571f92784cd94a019292a1f45445077d0ef634 metrics: - type: accuracy value: 75.1815736381977 - type: f1 value: 75.08016717887205 - task: type: Classification dataset: name: MTEB MassiveScenarioClassification (tl) type: mteb/amazon_massive_scenario config: tl split: test revision: 7d571f92784cd94a019292a1f45445077d0ef634 metrics: - type: accuracy value: 72.86482851378614 - type: f1 value: 72.39521180006291 - task: type: Classification dataset: name: MTEB MassiveScenarioClassification (tr) type: mteb/amazon_massive_scenario config: tr split: test revision: 7d571f92784cd94a019292a1f45445077d0ef634 metrics: - type: accuracy value: 76.46940147948891 - type: f1 value: 76.70044085362349 - task: type: Classification dataset: name: MTEB MassiveScenarioClassification (ur) type: mteb/amazon_massive_scenario config: ur split: test revision: 7d571f92784cd94a019292a1f45445077d0ef634 metrics: - type: accuracy value: 71.89307330195024 - type: f1 value: 71.5721825332298 - task: type: Classification dataset: name: MTEB MassiveScenarioClassification (vi) type: mteb/amazon_massive_scenario config: vi split: test revision: 7d571f92784cd94a019292a1f45445077d0ef634 metrics: - type: accuracy value: 74.7511768661735 - type: f1 value: 75.17918654541515 - task: type: Classification dataset: name: MTEB MassiveScenarioClassification (zh-CN) type: mteb/amazon_massive_scenario config: zh-CN split: test revision: 7d571f92784cd94a019292a1f45445077d0ef634 metrics: - type: accuracy value: 78.69535978480162 - type: f1 value: 78.90019070153316 - task: type: Classification dataset: name: MTEB MassiveScenarioClassification (zh-TW) type: mteb/amazon_massive_scenario config: zh-TW split: test revision: 7d571f92784cd94a019292a1f45445077d0ef634 metrics: - type: accuracy value: 75.45729657027572 - type: f1 value: 76.19578371794672 - task: type: Clustering dataset: name: MTEB MedrxivClusteringP2P type: mteb/medrxiv-clustering-p2p config: default split: test revision: e7a26af6f3ae46b30dde8737f02c07b1505bcc73 metrics: - type: v_measure value: 36.92715354123554 - task: type: Clustering dataset: name: MTEB MedrxivClusteringS2S type: mteb/medrxiv-clustering-s2s config: default split: test revision: 35191c8c0dca72d8ff3efcd72aa802307d469663 metrics: - type: v_measure value: 35.53536244162518 - task: type: Reranking dataset: name: MTEB MindSmallReranking type: mteb/mind_small config: default split: test revision: 3bdac13927fdc888b903db93b2ffdbd90b295a69 metrics: - type: map value: 33.08507884504006 - type: mrr value: 34.32436977159129 - task: type: Retrieval dataset: name: MTEB NFCorpus type: nfcorpus config: default split: test revision: None metrics: - type: map_at_1 value: 5.935 - type: map_at_10 value: 13.297 - type: map_at_100 value: 16.907 - type: map_at_1000 value: 18.391 - type: map_at_3 value: 9.626999999999999 - type: map_at_5 value: 11.190999999999999 - type: mrr_at_1 value: 46.129999999999995 - type: mrr_at_10 value: 54.346000000000004 - type: mrr_at_100 value: 55.067 - type: mrr_at_1000 value: 55.1 - type: mrr_at_3 value: 51.961 - type: mrr_at_5 value: 53.246 - type: ndcg_at_1 value: 44.118 - type: ndcg_at_10 value: 35.534 - type: ndcg_at_100 value: 32.946999999999996 - type: ndcg_at_1000 value: 41.599000000000004 - type: ndcg_at_3 value: 40.25 - type: ndcg_at_5 value: 37.978 - type: precision_at_1 value: 46.129999999999995 - type: precision_at_10 value: 26.842 - type: precision_at_100 value: 8.427 - type: precision_at_1000 value: 2.128 - type: precision_at_3 value: 37.977 - type: precision_at_5 value: 32.879000000000005 - type: recall_at_1 value: 5.935 - type: recall_at_10 value: 17.211000000000002 - type: recall_at_100 value: 34.33 - type: recall_at_1000 value: 65.551 - type: recall_at_3 value: 10.483 - type: recall_at_5 value: 13.078999999999999 - task: type: Retrieval dataset: name: MTEB NQ type: nq config: default split: test revision: None metrics: - type: map_at_1 value: 35.231 - type: map_at_10 value: 50.202000000000005 - type: map_at_100 value: 51.154999999999994 - type: map_at_1000 value: 51.181 - type: map_at_3 value: 45.774 - type: map_at_5 value: 48.522 - type: mrr_at_1 value: 39.687 - type: mrr_at_10 value: 52.88 - type: mrr_at_100 value: 53.569 - type: mrr_at_1000 value: 53.58500000000001 - type: mrr_at_3 value: 49.228 - type: mrr_at_5 value: 51.525 - type: ndcg_at_1 value: 39.687 - type: ndcg_at_10 value: 57.754000000000005 - type: ndcg_at_100 value: 61.597 - type: ndcg_at_1000 value: 62.18900000000001 - type: ndcg_at_3 value: 49.55 - type: ndcg_at_5 value: 54.11899999999999 - type: precision_at_1 value: 39.687 - type: precision_at_10 value: 9.313 - type: precision_at_100 value: 1.146 - type: precision_at_1000 value: 0.12 - type: precision_at_3 value: 22.229 - type: precision_at_5 value: 15.939 - type: recall_at_1 value: 35.231 - type: recall_at_10 value: 78.083 - type: recall_at_100 value: 94.42099999999999 - type: recall_at_1000 value: 98.81 - type: recall_at_3 value: 57.047000000000004 - type: recall_at_5 value: 67.637 - task: type: Retrieval dataset: name: MTEB QuoraRetrieval type: quora config: default split: test revision: None metrics: - type: map_at_1 value: 71.241 - type: map_at_10 value: 85.462 - type: map_at_100 value: 86.083 - type: map_at_1000 value: 86.09700000000001 - type: map_at_3 value: 82.49499999999999 - type: map_at_5 value: 84.392 - type: mrr_at_1 value: 82.09 - type: mrr_at_10 value: 88.301 - type: mrr_at_100 value: 88.383 - type: mrr_at_1000 value: 88.384 - type: mrr_at_3 value: 87.37 - type: mrr_at_5 value: 88.035 - type: ndcg_at_1 value: 82.12 - type: ndcg_at_10 value: 89.149 - type: ndcg_at_100 value: 90.235 - type: ndcg_at_1000 value: 90.307 - type: ndcg_at_3 value: 86.37599999999999 - type: ndcg_at_5 value: 87.964 - type: precision_at_1 value: 82.12 - type: precision_at_10 value: 13.56 - type: precision_at_100 value: 1.539 - type: precision_at_1000 value: 0.157 - type: precision_at_3 value: 37.88 - type: precision_at_5 value: 24.92 - type: recall_at_1 value: 71.241 - type: recall_at_10 value: 96.128 - type: recall_at_100 value: 99.696 - type: recall_at_1000 value: 99.994 - type: recall_at_3 value: 88.181 - type: recall_at_5 value: 92.694 - task: type: Clustering dataset: name: MTEB RedditClustering type: mteb/reddit-clustering config: default split: test revision: 24640382cdbf8abc73003fb0fa6d111a705499eb metrics: - type: v_measure value: 56.59757799655151 - task: type: Clustering dataset: name: MTEB RedditClusteringP2P type: mteb/reddit-clustering-p2p config: default split: test revision: 282350215ef01743dc01b456c7f5241fa8937f16 metrics: - type: v_measure value: 64.27391998854624 - task: type: Retrieval dataset: name: MTEB SCIDOCS type: scidocs config: default split: test revision: None metrics: - type: map_at_1 value: 4.243 - type: map_at_10 value: 10.965 - type: map_at_100 value: 12.934999999999999 - type: map_at_1000 value: 13.256 - type: map_at_3 value: 7.907 - type: map_at_5 value: 9.435 - type: mrr_at_1 value: 20.9 - type: mrr_at_10 value: 31.849 - type: mrr_at_100 value: 32.964 - type: mrr_at_1000 value: 33.024 - type: mrr_at_3 value: 28.517 - type: mrr_at_5 value: 30.381999999999998 - type: ndcg_at_1 value: 20.9 - type: ndcg_at_10 value: 18.723 - type: ndcg_at_100 value: 26.384999999999998 - type: ndcg_at_1000 value: 32.114 - type: ndcg_at_3 value: 17.753 - type: ndcg_at_5 value: 15.558 - type: precision_at_1 value: 20.9 - type: precision_at_10 value: 9.8 - type: precision_at_100 value: 2.078 - type: precision_at_1000 value: 0.345 - type: precision_at_3 value: 16.900000000000002 - type: precision_at_5 value: 13.88 - type: recall_at_1 value: 4.243 - type: recall_at_10 value: 19.885 - type: recall_at_100 value: 42.17 - type: recall_at_1000 value: 70.12 - type: recall_at_3 value: 10.288 - type: recall_at_5 value: 14.072000000000001 - task: type: STS dataset: name: MTEB SICK-R type: mteb/sickr-sts config: default split: test revision: a6ea5a8cab320b040a23452cc28066d9beae2cee metrics: - type: cos_sim_pearson value: 85.84209174935282 - type: cos_sim_spearman value: 81.73248048438833 - type: euclidean_pearson value: 83.02810070308149 - type: euclidean_spearman value: 81.73248295679514 - type: manhattan_pearson value: 82.95368060376002 - type: manhattan_spearman value: 81.60277910998718 - task: type: STS dataset: name: MTEB STS12 type: mteb/sts12-sts config: default split: test revision: a0d554a64d88156834ff5ae9920b964011b16384 metrics: - type: cos_sim_pearson value: 88.52628804556943 - type: cos_sim_spearman value: 82.5713913555672 - type: euclidean_pearson value: 85.8796774746988 - type: euclidean_spearman value: 82.57137506803424 - type: manhattan_pearson value: 85.79671002960058 - type: manhattan_spearman value: 82.49445981618027 - task: type: STS dataset: name: MTEB STS13 type: mteb/sts13-sts config: default split: test revision: 7e90230a92c190f1bf69ae9002b8cea547a64cca metrics: - type: cos_sim_pearson value: 86.23682503505542 - type: cos_sim_spearman value: 87.15008956711806 - type: euclidean_pearson value: 86.79805401524959 - type: euclidean_spearman value: 87.15008956711806 - type: manhattan_pearson value: 86.65298502699244 - type: manhattan_spearman value: 86.97677821948562 - task: type: STS dataset: name: MTEB STS14 type: mteb/sts14-sts config: default split: test revision: 6031580fec1f6af667f0bd2da0a551cf4f0b2375 metrics: - type: cos_sim_pearson value: 85.63370304677802 - type: cos_sim_spearman value: 84.97105553540318 - type: euclidean_pearson value: 85.28896108687721 - type: euclidean_spearman value: 84.97105553540318 - type: manhattan_pearson value: 85.09663190337331 - type: manhattan_spearman value: 84.79126831644619 - task: type: STS dataset: name: MTEB STS15 type: mteb/sts15-sts config: default split: test revision: ae752c7c21bf194d8b67fd573edf7ae58183cbe3 metrics: - type: cos_sim_pearson value: 90.2614838800733 - type: cos_sim_spearman value: 91.0509162991835 - type: euclidean_pearson value: 90.33098317533373 - type: euclidean_spearman value: 91.05091625871644 - type: manhattan_pearson value: 90.26250435151107 - type: manhattan_spearman value: 90.97999594417519 - task: type: STS dataset: name: MTEB STS16 type: mteb/sts16-sts config: default split: test revision: 4d8694f8f0e0100860b497b999b3dbed754a0513 metrics: - type: cos_sim_pearson value: 85.80480973335091 - type: cos_sim_spearman value: 87.313695492969 - type: euclidean_pearson value: 86.49267251576939 - type: euclidean_spearman value: 87.313695492969 - type: manhattan_pearson value: 86.44019901831935 - type: manhattan_spearman value: 87.24205395460392 - task: type: STS dataset: name: MTEB STS17 (en-en) type: mteb/sts17-crosslingual-sts config: en-en split: test revision: af5e6fb845001ecf41f4c1e033ce921939a2a68d metrics: - type: cos_sim_pearson value: 90.05662789380672 - type: cos_sim_spearman value: 90.02759424426651 - type: euclidean_pearson value: 90.4042483422981 - type: euclidean_spearman value: 90.02759424426651 - type: manhattan_pearson value: 90.51446975000226 - type: manhattan_spearman value: 90.08832889933616 - task: type: STS dataset: name: MTEB STS22 (en) type: mteb/sts22-crosslingual-sts config: en split: test revision: 6d1ba47164174a496b7fa5d3569dae26a6813b80 metrics: - type: cos_sim_pearson value: 67.5975528273532 - type: cos_sim_spearman value: 67.62969861411354 - type: euclidean_pearson value: 69.224275734323 - type: euclidean_spearman value: 67.62969861411354 - type: manhattan_pearson value: 69.3761447059927 - type: manhattan_spearman value: 67.90921005611467 - task: type: STS dataset: name: MTEB STSBenchmark type: mteb/stsbenchmark-sts config: default split: test revision: b0fddb56ed78048fa8b90373c8a3cfc37b684831 metrics: - type: cos_sim_pearson value: 87.11244327231684 - type: cos_sim_spearman value: 88.37902438979035 - type: euclidean_pearson value: 87.86054279847336 - type: euclidean_spearman value: 88.37902438979035 - type: manhattan_pearson value: 87.77257757320378 - type: manhattan_spearman value: 88.25208966098123 - task: type: Reranking dataset: name: MTEB SciDocsRR type: mteb/scidocs-reranking config: default split: test revision: d3c5e1fc0b855ab6097bf1cda04dd73947d7caab metrics: - type: map value: 85.87174608143563 - type: mrr value: 96.12836872640794 - task: type: Retrieval dataset: name: MTEB SciFact type: scifact config: default split: test revision: None metrics: - type: map_at_1 value: 57.760999999999996 - type: map_at_10 value: 67.258 - type: map_at_100 value: 67.757 - type: map_at_1000 value: 67.78800000000001 - type: map_at_3 value: 64.602 - type: map_at_5 value: 65.64 - type: mrr_at_1 value: 60.667 - type: mrr_at_10 value: 68.441 - type: mrr_at_100 value: 68.825 - type: mrr_at_1000 value: 68.853 - type: mrr_at_3 value: 66.444 - type: mrr_at_5 value: 67.26100000000001 - type: ndcg_at_1 value: 60.667 - type: ndcg_at_10 value: 71.852 - type: ndcg_at_100 value: 73.9 - type: ndcg_at_1000 value: 74.628 - type: ndcg_at_3 value: 67.093 - type: ndcg_at_5 value: 68.58 - type: precision_at_1 value: 60.667 - type: precision_at_10 value: 9.6 - type: precision_at_100 value: 1.0670000000000002 - type: precision_at_1000 value: 0.11199999999999999 - type: precision_at_3 value: 26.111 - type: precision_at_5 value: 16.733 - type: recall_at_1 value: 57.760999999999996 - type: recall_at_10 value: 84.967 - type: recall_at_100 value: 93.833 - type: recall_at_1000 value: 99.333 - type: recall_at_3 value: 71.589 - type: recall_at_5 value: 75.483 - task: type: PairClassification dataset: name: MTEB SprintDuplicateQuestions type: mteb/sprintduplicatequestions-pairclassification config: default split: test revision: d66bd1f72af766a5cc4b0ca5e00c162f89e8cc46 metrics: - type: cos_sim_accuracy value: 99.66633663366336 - type: cos_sim_ap value: 91.17685358899108 - type: cos_sim_f1 value: 82.16818642350559 - type: cos_sim_precision value: 83.26488706365504 - type: cos_sim_recall value: 81.10000000000001 - type: dot_accuracy value: 99.66633663366336 - type: dot_ap value: 91.17663411119032 - type: dot_f1 value: 82.16818642350559 - type: dot_precision value: 83.26488706365504 - type: dot_recall value: 81.10000000000001 - type: euclidean_accuracy value: 99.66633663366336 - type: euclidean_ap value: 91.17685189882275 - type: euclidean_f1 value: 82.16818642350559 - type: euclidean_precision value: 83.26488706365504 - type: euclidean_recall value: 81.10000000000001 - type: manhattan_accuracy value: 99.66633663366336 - type: manhattan_ap value: 91.2241619496737 - type: manhattan_f1 value: 82.20472440944883 - type: manhattan_precision value: 86.51933701657458 - type: manhattan_recall value: 78.3 - type: max_accuracy value: 99.66633663366336 - type: max_ap value: 91.2241619496737 - type: max_f1 value: 82.20472440944883 - task: type: Clustering dataset: name: MTEB StackExchangeClustering type: mteb/stackexchange-clustering config: default split: test revision: 6cbc1f7b2bc0622f2e39d2c77fa502909748c259 metrics: - type: v_measure value: 66.85101268897951 - task: type: Clustering dataset: name: MTEB StackExchangeClusteringP2P type: mteb/stackexchange-clustering-p2p config: default split: test revision: 815ca46b2622cec33ccafc3735d572c266efdb44 metrics: - type: v_measure value: 42.461184054706905 - task: type: Reranking dataset: name: MTEB StackOverflowDupQuestions type: mteb/stackoverflowdupquestions-reranking config: default split: test revision: e185fbe320c72810689fc5848eb6114e1ef5ec69 metrics: - type: map value: 51.44542568873886 - type: mrr value: 52.33656151854681 - task: type: Summarization dataset: name: MTEB SummEval type: mteb/summeval config: default split: test revision: cda12ad7615edc362dbf25a00fdd61d3b1eaf93c metrics: - type: cos_sim_pearson value: 30.75982974997539 - type: cos_sim_spearman value: 30.385405026539914 - type: dot_pearson value: 30.75982433546523 - type: dot_spearman value: 30.385405026539914 - task: type: Retrieval dataset: name: MTEB TRECCOVID type: trec-covid config: default split: test revision: None metrics: - type: map_at_1 value: 0.22799999999999998 - type: map_at_10 value: 2.064 - type: map_at_100 value: 13.056000000000001 - type: map_at_1000 value: 31.747999999999998 - type: map_at_3 value: 0.67 - type: map_at_5 value: 1.097 - type: mrr_at_1 value: 90.0 - type: mrr_at_10 value: 94.667 - type: mrr_at_100 value: 94.667 - type: mrr_at_1000 value: 94.667 - type: mrr_at_3 value: 94.667 - type: mrr_at_5 value: 94.667 - type: ndcg_at_1 value: 86.0 - type: ndcg_at_10 value: 82.0 - type: ndcg_at_100 value: 64.307 - type: ndcg_at_1000 value: 57.023999999999994 - type: ndcg_at_3 value: 85.816 - type: ndcg_at_5 value: 84.904 - type: precision_at_1 value: 90.0 - type: precision_at_10 value: 85.8 - type: precision_at_100 value: 66.46 - type: precision_at_1000 value: 25.202 - type: precision_at_3 value: 90.0 - type: precision_at_5 value: 89.2 - type: recall_at_1 value: 0.22799999999999998 - type: recall_at_10 value: 2.235 - type: recall_at_100 value: 16.185 - type: recall_at_1000 value: 53.620999999999995 - type: recall_at_3 value: 0.7040000000000001 - type: recall_at_5 value: 1.172 - task: type: BitextMining dataset: name: MTEB Tatoeba (sqi-eng) type: mteb/tatoeba-bitext-mining config: sqi-eng split: test revision: 9080400076fbadbb4c4dcb136ff4eddc40b42553 metrics: - type: accuracy value: 97.39999999999999 - type: f1 value: 96.75 - type: precision value: 96.45 - type: recall value: 97.39999999999999 - task: type: BitextMining dataset: name: MTEB Tatoeba (fry-eng) type: mteb/tatoeba-bitext-mining config: fry-eng split: test revision: 9080400076fbadbb4c4dcb136ff4eddc40b42553 metrics: - type: accuracy value: 85.54913294797689 - type: f1 value: 82.46628131021194 - type: precision value: 81.1175337186898 - type: recall value: 85.54913294797689 - task: type: BitextMining dataset: name: MTEB Tatoeba (kur-eng) type: mteb/tatoeba-bitext-mining config: kur-eng split: test revision: 9080400076fbadbb4c4dcb136ff4eddc40b42553 metrics: - type: accuracy value: 81.21951219512195 - type: f1 value: 77.33333333333334 - type: precision value: 75.54878048780488 - type: recall value: 81.21951219512195 - task: type: BitextMining dataset: name: MTEB Tatoeba (tur-eng) type: mteb/tatoeba-bitext-mining config: tur-eng split: test revision: 9080400076fbadbb4c4dcb136ff4eddc40b42553 metrics: - type: accuracy value: 98.6 - type: f1 value: 98.26666666666665 - type: precision value: 98.1 - type: recall value: 98.6 - task: type: BitextMining dataset: name: MTEB Tatoeba (deu-eng) type: mteb/tatoeba-bitext-mining config: deu-eng split: test revision: 9080400076fbadbb4c4dcb136ff4eddc40b42553 metrics: - type: accuracy value: 99.5 - type: f1 value: 99.33333333333333 - type: precision value: 99.25 - type: recall value: 99.5 - task: type: BitextMining dataset: name: MTEB Tatoeba (nld-eng) type: mteb/tatoeba-bitext-mining config: nld-eng split: test revision: 9080400076fbadbb4c4dcb136ff4eddc40b42553 metrics: - type: accuracy value: 97.8 - type: f1 value: 97.2 - type: precision value: 96.89999999999999 - type: recall value: 97.8 - task: type: BitextMining dataset: name: MTEB Tatoeba (ron-eng) type: mteb/tatoeba-bitext-mining config: ron-eng split: test revision: 9080400076fbadbb4c4dcb136ff4eddc40b42553 metrics: - type: accuracy value: 97.8 - type: f1 value: 97.18333333333334 - type: precision value: 96.88333333333333 - type: recall value: 97.8 - task: type: BitextMining dataset: name: MTEB Tatoeba (ang-eng) type: mteb/tatoeba-bitext-mining config: ang-eng split: test revision: 9080400076fbadbb4c4dcb136ff4eddc40b42553 metrics: - type: accuracy value: 77.61194029850746 - type: f1 value: 72.81094527363183 - type: precision value: 70.83333333333333 - type: recall value: 77.61194029850746 - task: type: BitextMining dataset: name: MTEB Tatoeba (ido-eng) type: mteb/tatoeba-bitext-mining config: ido-eng split: test revision: 9080400076fbadbb4c4dcb136ff4eddc40b42553 metrics: - type: accuracy value: 93.7 - type: f1 value: 91.91666666666667 - type: precision value: 91.08333333333334 - type: recall value: 93.7 - task: type: BitextMining dataset: name: MTEB Tatoeba (jav-eng) type: mteb/tatoeba-bitext-mining config: jav-eng split: test revision: 9080400076fbadbb4c4dcb136ff4eddc40b42553 metrics: - type: accuracy value: 88.29268292682927 - type: f1 value: 85.27642276422765 - type: precision value: 84.01277584204414 - type: recall value: 88.29268292682927 - task: type: BitextMining dataset: name: MTEB Tatoeba (isl-eng) type: mteb/tatoeba-bitext-mining config: isl-eng split: test revision: 9080400076fbadbb4c4dcb136ff4eddc40b42553 metrics: - type: accuracy value: 96.1 - type: f1 value: 95.0 - type: precision value: 94.46666666666668 - type: recall value: 96.1 - task: type: BitextMining dataset: name: MTEB Tatoeba (slv-eng) type: mteb/tatoeba-bitext-mining config: slv-eng split: test revision: 9080400076fbadbb4c4dcb136ff4eddc40b42553 metrics: - type: accuracy value: 93.681652490887 - type: f1 value: 91.90765492102065 - type: precision value: 91.05913325232888 - type: recall value: 93.681652490887 - task: type: BitextMining dataset: name: MTEB Tatoeba (cym-eng) type: mteb/tatoeba-bitext-mining config: cym-eng split: test revision: 9080400076fbadbb4c4dcb136ff4eddc40b42553 metrics: - type: accuracy value: 92.17391304347827 - type: f1 value: 89.97101449275361 - type: precision value: 88.96811594202899 - type: recall value: 92.17391304347827 - task: type: BitextMining dataset: name: MTEB Tatoeba (kaz-eng) type: mteb/tatoeba-bitext-mining config: kaz-eng split: test revision: 9080400076fbadbb4c4dcb136ff4eddc40b42553 metrics: - type: accuracy value: 90.43478260869566 - type: f1 value: 87.72173913043478 - type: precision value: 86.42028985507245 - type: recall value: 90.43478260869566 - task: type: BitextMining dataset: name: MTEB Tatoeba (est-eng) type: mteb/tatoeba-bitext-mining config: est-eng split: test revision: 9080400076fbadbb4c4dcb136ff4eddc40b42553 metrics: - type: accuracy value: 90.4 - type: f1 value: 88.03 - type: precision value: 86.95 - type: recall value: 90.4 - task: type: BitextMining dataset: name: MTEB Tatoeba (heb-eng) type: mteb/tatoeba-bitext-mining config: heb-eng split: test revision: 9080400076fbadbb4c4dcb136ff4eddc40b42553 metrics: - type: accuracy value: 93.4 - type: f1 value: 91.45666666666666 - type: precision value: 90.525 - type: recall value: 93.4 - task: type: BitextMining dataset: name: MTEB Tatoeba (gla-eng) type: mteb/tatoeba-bitext-mining config: gla-eng split: test revision: 9080400076fbadbb4c4dcb136ff4eddc40b42553 metrics: - type: accuracy value: 81.9059107358263 - type: f1 value: 78.32557872364869 - type: precision value: 76.78260286824823 - type: recall value: 81.9059107358263 - task: type: BitextMining dataset: name: MTEB Tatoeba (mar-eng) type: mteb/tatoeba-bitext-mining config: mar-eng split: test revision: 9080400076fbadbb4c4dcb136ff4eddc40b42553 metrics: - type: accuracy value: 94.3 - type: f1 value: 92.58333333333333 - type: precision value: 91.73333333333332 - type: recall value: 94.3 - task: type: BitextMining dataset: name: MTEB Tatoeba (lat-eng) type: mteb/tatoeba-bitext-mining config: lat-eng split: test revision: 9080400076fbadbb4c4dcb136ff4eddc40b42553 metrics: - type: accuracy value: 79.10000000000001 - type: f1 value: 74.50500000000001 - type: precision value: 72.58928571428571 - type: recall value: 79.10000000000001 - task: type: BitextMining dataset: name: MTEB Tatoeba (bel-eng) type: mteb/tatoeba-bitext-mining config: bel-eng split: test revision: 9080400076fbadbb4c4dcb136ff4eddc40b42553 metrics: - type: accuracy value: 96.6 - type: f1 value: 95.55 - type: precision value: 95.05 - type: recall value: 96.6 - task: type: BitextMining dataset: name: MTEB Tatoeba (pms-eng) type: mteb/tatoeba-bitext-mining config: pms-eng split: test revision: 9080400076fbadbb4c4dcb136ff4eddc40b42553 metrics: - type: accuracy value: 82.0952380952381 - type: f1 value: 77.98458049886621 - type: precision value: 76.1968253968254 - type: recall value: 82.0952380952381 - task: type: BitextMining dataset: name: MTEB Tatoeba (gle-eng) type: mteb/tatoeba-bitext-mining config: gle-eng split: test revision: 9080400076fbadbb4c4dcb136ff4eddc40b42553 metrics: - type: accuracy value: 87.9 - type: f1 value: 84.99190476190476 - type: precision value: 83.65 - type: recall value: 87.9 - task: type: BitextMining dataset: name: MTEB Tatoeba (pes-eng) type: mteb/tatoeba-bitext-mining config: pes-eng split: test revision: 9080400076fbadbb4c4dcb136ff4eddc40b42553 metrics: - type: accuracy value: 95.7 - type: f1 value: 94.56666666666666 - type: precision value: 94.01666666666667 - type: recall value: 95.7 - task: type: BitextMining dataset: name: MTEB Tatoeba (nob-eng) type: mteb/tatoeba-bitext-mining config: nob-eng split: test revision: 9080400076fbadbb4c4dcb136ff4eddc40b42553 metrics: - type: accuracy value: 98.6 - type: f1 value: 98.2 - type: precision value: 98.0 - type: recall value: 98.6 - task: type: BitextMining dataset: name: MTEB Tatoeba (bul-eng) type: mteb/tatoeba-bitext-mining config: bul-eng split: test revision: 9080400076fbadbb4c4dcb136ff4eddc40b42553 metrics: - type: accuracy value: 95.6 - type: f1 value: 94.38333333333334 - type: precision value: 93.78333333333335 - type: recall value: 95.6 - task: type: BitextMining dataset: name: MTEB Tatoeba (cbk-eng) type: mteb/tatoeba-bitext-mining config: cbk-eng split: test revision: 9080400076fbadbb4c4dcb136ff4eddc40b42553 metrics: - type: accuracy value: 87.4 - type: f1 value: 84.10380952380952 - type: precision value: 82.67 - type: recall value: 87.4 - task: type: BitextMining dataset: name: MTEB Tatoeba (hun-eng) type: mteb/tatoeba-bitext-mining config: hun-eng split: test revision: 9080400076fbadbb4c4dcb136ff4eddc40b42553 metrics: - type: accuracy value: 95.5 - type: f1 value: 94.33333333333334 - type: precision value: 93.78333333333333 - type: recall value: 95.5 - task: type: BitextMining dataset: name: MTEB Tatoeba (uig-eng) type: mteb/tatoeba-bitext-mining config: uig-eng split: test revision: 9080400076fbadbb4c4dcb136ff4eddc40b42553 metrics: - type: accuracy value: 89.4 - type: f1 value: 86.82000000000001 - type: precision value: 85.64500000000001 - type: recall value: 89.4 - task: type: BitextMining dataset: name: MTEB Tatoeba (rus-eng) type: mteb/tatoeba-bitext-mining config: rus-eng split: test revision: 9080400076fbadbb4c4dcb136ff4eddc40b42553 metrics: - type: accuracy value: 95.1 - type: f1 value: 93.56666666666668 - type: precision value: 92.81666666666666 - type: recall value: 95.1 - task: type: BitextMining dataset: name: MTEB Tatoeba (spa-eng) type: mteb/tatoeba-bitext-mining config: spa-eng split: test revision: 9080400076fbadbb4c4dcb136ff4eddc40b42553 metrics: - type: accuracy value: 98.9 - type: f1 value: 98.6 - type: precision value: 98.45 - type: recall value: 98.9 - task: type: BitextMining dataset: name: MTEB Tatoeba (hye-eng) type: mteb/tatoeba-bitext-mining config: hye-eng split: test revision: 9080400076fbadbb4c4dcb136ff4eddc40b42553 metrics: - type: accuracy value: 95.01347708894879 - type: f1 value: 93.51752021563343 - type: precision value: 92.82794249775381 - type: recall value: 95.01347708894879 - task: type: BitextMining dataset: name: MTEB Tatoeba (tel-eng) type: mteb/tatoeba-bitext-mining config: tel-eng split: test revision: 9080400076fbadbb4c4dcb136ff4eddc40b42553 metrics: - type: accuracy value: 97.00854700854701 - type: f1 value: 96.08262108262107 - type: precision value: 95.65527065527067 - type: recall value: 97.00854700854701 - task: type: BitextMining dataset: name: MTEB Tatoeba (afr-eng) type: mteb/tatoeba-bitext-mining config: afr-eng split: test revision: 9080400076fbadbb4c4dcb136ff4eddc40b42553 metrics: - type: accuracy value: 96.5 - type: f1 value: 95.39999999999999 - type: precision value: 94.88333333333333 - type: recall value: 96.5 - task: type: BitextMining dataset: name: MTEB Tatoeba (mon-eng) type: mteb/tatoeba-bitext-mining config: mon-eng split: test revision: 9080400076fbadbb4c4dcb136ff4eddc40b42553 metrics: - type: accuracy value: 96.5909090909091 - type: f1 value: 95.49242424242425 - type: precision value: 94.9621212121212 - type: recall value: 96.5909090909091 - task: type: BitextMining dataset: name: MTEB Tatoeba (arz-eng) type: mteb/tatoeba-bitext-mining config: arz-eng split: test revision: 9080400076fbadbb4c4dcb136ff4eddc40b42553 metrics: - type: accuracy value: 84.90566037735849 - type: f1 value: 81.85883997204752 - type: precision value: 80.54507337526205 - type: recall value: 84.90566037735849 - task: type: BitextMining dataset: name: MTEB Tatoeba (hrv-eng) type: mteb/tatoeba-bitext-mining config: hrv-eng split: test revision: 9080400076fbadbb4c4dcb136ff4eddc40b42553 metrics: - type: accuracy value: 97.5 - type: f1 value: 96.75 - type: precision value: 96.38333333333333 - type: recall value: 97.5 - task: type: BitextMining dataset: name: MTEB Tatoeba (nov-eng) type: mteb/tatoeba-bitext-mining config: nov-eng split: test revision: 9080400076fbadbb4c4dcb136ff4eddc40b42553 metrics: - type: accuracy value: 86.7704280155642 - type: f1 value: 82.99610894941635 - type: precision value: 81.32295719844358 - type: recall value: 86.7704280155642 - task: type: BitextMining dataset: name: MTEB Tatoeba (gsw-eng) type: mteb/tatoeba-bitext-mining config: gsw-eng split: test revision: 9080400076fbadbb4c4dcb136ff4eddc40b42553 metrics: - type: accuracy value: 67.52136752136752 - type: f1 value: 61.89662189662191 - type: precision value: 59.68660968660969 - type: recall value: 67.52136752136752 - task: type: BitextMining dataset: name: MTEB Tatoeba (nds-eng) type: mteb/tatoeba-bitext-mining config: nds-eng split: test revision: 9080400076fbadbb4c4dcb136ff4eddc40b42553 metrics: - type: accuracy value: 89.2 - type: f1 value: 86.32 - type: precision value: 85.015 - type: recall value: 89.2 - task: type: BitextMining dataset: name: MTEB Tatoeba (ukr-eng) type: mteb/tatoeba-bitext-mining config: ukr-eng split: test revision: 9080400076fbadbb4c4dcb136ff4eddc40b42553 metrics: - type: accuracy value: 96.0 - type: f1 value: 94.78333333333333 - type: precision value: 94.18333333333334 - type: recall value: 96.0 - task: type: BitextMining dataset: name: MTEB Tatoeba (uzb-eng) type: mteb/tatoeba-bitext-mining config: uzb-eng split: test revision: 9080400076fbadbb4c4dcb136ff4eddc40b42553 metrics: - type: accuracy value: 83.8785046728972 - type: f1 value: 80.54517133956385 - type: precision value: 79.154984423676 - type: recall value: 83.8785046728972 - task: type: BitextMining dataset: name: MTEB Tatoeba (lit-eng) type: mteb/tatoeba-bitext-mining config: lit-eng split: test revision: 9080400076fbadbb4c4dcb136ff4eddc40b42553 metrics: - type: accuracy value: 93.60000000000001 - type: f1 value: 92.01333333333334 - type: precision value: 91.28333333333333 - type: recall value: 93.60000000000001 - task: type: BitextMining dataset: name: MTEB Tatoeba (ina-eng) type: mteb/tatoeba-bitext-mining config: ina-eng split: test revision: 9080400076fbadbb4c4dcb136ff4eddc40b42553 metrics: - type: accuracy value: 97.1 - type: f1 value: 96.26666666666667 - type: precision value: 95.85000000000001 - type: recall value: 97.1 - task: type: BitextMining dataset: name: MTEB Tatoeba (lfn-eng) type: mteb/tatoeba-bitext-mining config: lfn-eng split: test revision: 9080400076fbadbb4c4dcb136ff4eddc40b42553 metrics: - type: accuracy value: 84.3 - type: f1 value: 80.67833333333333 - type: precision value: 79.03928571428571 - type: recall value: 84.3 - task: type: BitextMining dataset: name: MTEB Tatoeba (zsm-eng) type: mteb/tatoeba-bitext-mining config: zsm-eng split: test revision: 9080400076fbadbb4c4dcb136ff4eddc40b42553 metrics: - type: accuracy value: 97.3 - type: f1 value: 96.48333333333332 - type: precision value: 96.08333333333331 - type: recall value: 97.3 - task: type: BitextMining dataset: name: MTEB Tatoeba (ita-eng) type: mteb/tatoeba-bitext-mining config: ita-eng split: test revision: 9080400076fbadbb4c4dcb136ff4eddc40b42553 metrics: - type: accuracy value: 95.7 - type: f1 value: 94.66666666666667 - type: precision value: 94.16666666666667 - type: recall value: 95.7 - task: type: BitextMining dataset: name: MTEB Tatoeba (cmn-eng) type: mteb/tatoeba-bitext-mining config: cmn-eng split: test revision: 9080400076fbadbb4c4dcb136ff4eddc40b42553 metrics: - type: accuracy value: 97.2 - type: f1 value: 96.36666666666667 - type: precision value: 95.96666666666668 - type: recall value: 97.2 - task: type: BitextMining dataset: name: MTEB Tatoeba (lvs-eng) type: mteb/tatoeba-bitext-mining config: lvs-eng split: test revision: 9080400076fbadbb4c4dcb136ff4eddc40b42553 metrics: - type: accuracy value: 94.3 - type: f1 value: 92.80666666666667 - type: precision value: 92.12833333333333 - type: recall value: 94.3 - task: type: BitextMining dataset: name: MTEB Tatoeba (glg-eng) type: mteb/tatoeba-bitext-mining config: glg-eng split: test revision: 9080400076fbadbb4c4dcb136ff4eddc40b42553 metrics: - type: accuracy value: 97.0 - type: f1 value: 96.22333333333334 - type: precision value: 95.875 - type: recall value: 97.0 - task: type: BitextMining dataset: name: MTEB Tatoeba (ceb-eng) type: mteb/tatoeba-bitext-mining config: ceb-eng split: test revision: 9080400076fbadbb4c4dcb136ff4eddc40b42553 metrics: - type: accuracy value: 74.33333333333333 - type: f1 value: 70.78174603174602 - type: precision value: 69.28333333333332 - type: recall value: 74.33333333333333 - task: type: BitextMining dataset: name: MTEB Tatoeba (bre-eng) type: mteb/tatoeba-bitext-mining config: bre-eng split: test revision: 9080400076fbadbb4c4dcb136ff4eddc40b42553 metrics: - type: accuracy value: 37.6 - type: f1 value: 32.938348952090365 - type: precision value: 31.2811038961039 - type: recall value: 37.6 - task: type: BitextMining dataset: name: MTEB Tatoeba (ben-eng) type: mteb/tatoeba-bitext-mining config: ben-eng split: test revision: 9080400076fbadbb4c4dcb136ff4eddc40b42553 metrics: - type: accuracy value: 91.5 - type: f1 value: 89.13333333333333 - type: precision value: 88.03333333333333 - type: recall value: 91.5 - task: type: BitextMining dataset: name: MTEB Tatoeba (swg-eng) type: mteb/tatoeba-bitext-mining config: swg-eng split: test revision: 9080400076fbadbb4c4dcb136ff4eddc40b42553 metrics: - type: accuracy value: 82.14285714285714 - type: f1 value: 77.67857142857143 - type: precision value: 75.59523809523809 - type: recall value: 82.14285714285714 - task: type: BitextMining dataset: name: MTEB Tatoeba (arq-eng) type: mteb/tatoeba-bitext-mining config: arq-eng split: test revision: 9080400076fbadbb4c4dcb136ff4eddc40b42553 metrics: - type: accuracy value: 69.0450054884742 - type: f1 value: 63.070409283362075 - type: precision value: 60.58992781824835 - type: recall value: 69.0450054884742 - task: type: BitextMining dataset: name: MTEB Tatoeba (kab-eng) type: mteb/tatoeba-bitext-mining config: kab-eng split: test revision: 9080400076fbadbb4c4dcb136ff4eddc40b42553 metrics: - type: accuracy value: 63.1 - type: f1 value: 57.848333333333336 - type: precision value: 55.69500000000001 - type: recall value: 63.1 - task: type: BitextMining dataset: name: MTEB Tatoeba (fra-eng) type: mteb/tatoeba-bitext-mining config: fra-eng split: test revision: 9080400076fbadbb4c4dcb136ff4eddc40b42553 metrics: - type: accuracy value: 96.1 - type: f1 value: 95.01666666666667 - type: precision value: 94.5 - type: recall value: 96.1 - task: type: BitextMining dataset: name: MTEB Tatoeba (por-eng) type: mteb/tatoeba-bitext-mining config: por-eng split: test revision: 9080400076fbadbb4c4dcb136ff4eddc40b42553 metrics: - type: accuracy value: 95.89999999999999 - type: f1 value: 94.90666666666667 - type: precision value: 94.425 - type: recall value: 95.89999999999999 - task: type: BitextMining dataset: name: MTEB Tatoeba (tat-eng) type: mteb/tatoeba-bitext-mining config: tat-eng split: test revision: 9080400076fbadbb4c4dcb136ff4eddc40b42553 metrics: - type: accuracy value: 87.6 - type: f1 value: 84.61333333333333 - type: precision value: 83.27 - type: recall value: 87.6 - task: type: BitextMining dataset: name: MTEB Tatoeba (oci-eng) type: mteb/tatoeba-bitext-mining config: oci-eng split: test revision: 9080400076fbadbb4c4dcb136ff4eddc40b42553 metrics: - type: accuracy value: 76.4 - type: f1 value: 71.90746031746032 - type: precision value: 70.07027777777778 - type: recall value: 76.4 - task: type: BitextMining dataset: name: MTEB Tatoeba (pol-eng) type: mteb/tatoeba-bitext-mining config: pol-eng split: test revision: 9080400076fbadbb4c4dcb136ff4eddc40b42553 metrics: - type: accuracy value: 97.89999999999999 - type: f1 value: 97.26666666666667 - type: precision value: 96.95 - type: recall value: 97.89999999999999 - task: type: BitextMining dataset: name: MTEB Tatoeba (war-eng) type: mteb/tatoeba-bitext-mining config: war-eng split: test revision: 9080400076fbadbb4c4dcb136ff4eddc40b42553 metrics: - type: accuracy value: 78.8 - type: f1 value: 74.39555555555555 - type: precision value: 72.59416666666667 - type: recall value: 78.8 - task: type: BitextMining dataset: name: MTEB Tatoeba (aze-eng) type: mteb/tatoeba-bitext-mining config: aze-eng split: test revision: 9080400076fbadbb4c4dcb136ff4eddc40b42553 metrics: - type: accuracy value: 95.19999999999999 - type: f1 value: 93.78999999999999 - type: precision value: 93.125 - type: recall value: 95.19999999999999 - task: type: BitextMining dataset: name: MTEB Tatoeba (vie-eng) type: mteb/tatoeba-bitext-mining config: vie-eng split: test revision: 9080400076fbadbb4c4dcb136ff4eddc40b42553 metrics: - type: accuracy value: 97.8 - type: f1 value: 97.1 - type: precision value: 96.75 - type: recall value: 97.8 - task: type: BitextMining dataset: name: MTEB Tatoeba (nno-eng) type: mteb/tatoeba-bitext-mining config: nno-eng split: test revision: 9080400076fbadbb4c4dcb136ff4eddc40b42553 metrics: - type: accuracy value: 95.6 - type: f1 value: 94.25666666666666 - type: precision value: 93.64166666666668 - type: recall value: 95.6 - task: type: BitextMining dataset: name: MTEB Tatoeba (cha-eng) type: mteb/tatoeba-bitext-mining config: cha-eng split: test revision: 9080400076fbadbb4c4dcb136ff4eddc40b42553 metrics: - type: accuracy value: 56.934306569343065 - type: f1 value: 51.461591936044485 - type: precision value: 49.37434827945776 - type: recall value: 56.934306569343065 - task: type: BitextMining dataset: name: MTEB Tatoeba (mhr-eng) type: mteb/tatoeba-bitext-mining config: mhr-eng split: test revision: 9080400076fbadbb4c4dcb136ff4eddc40b42553 metrics: - type: accuracy value: 20.200000000000003 - type: f1 value: 16.91799284049284 - type: precision value: 15.791855158730158 - type: recall value: 20.200000000000003 - task: type: BitextMining dataset: name: MTEB Tatoeba (dan-eng) type: mteb/tatoeba-bitext-mining config: dan-eng split: test revision: 9080400076fbadbb4c4dcb136ff4eddc40b42553 metrics: - type: accuracy value: 96.2 - type: f1 value: 95.3 - type: precision value: 94.85 - type: recall value: 96.2 - task: type: BitextMining dataset: name: MTEB Tatoeba (ell-eng) type: mteb/tatoeba-bitext-mining config: ell-eng split: test revision: 9080400076fbadbb4c4dcb136ff4eddc40b42553 metrics: - type: accuracy value: 96.3 - type: f1 value: 95.11666666666667 - type: precision value: 94.53333333333333 - type: recall value: 96.3 - task: type: BitextMining dataset: name: MTEB Tatoeba (amh-eng) type: mteb/tatoeba-bitext-mining config: amh-eng split: test revision: 9080400076fbadbb4c4dcb136ff4eddc40b42553 metrics: - type: accuracy value: 89.88095238095238 - type: f1 value: 87.14285714285714 - type: precision value: 85.96230158730161 - type: recall value: 89.88095238095238 - task: type: BitextMining dataset: name: MTEB Tatoeba (pam-eng) type: mteb/tatoeba-bitext-mining config: pam-eng split: test revision: 9080400076fbadbb4c4dcb136ff4eddc40b42553 metrics: - type: accuracy value: 24.099999999999998 - type: f1 value: 19.630969083349783 - type: precision value: 18.275094905094907 - type: recall value: 24.099999999999998 - task: type: BitextMining dataset: name: MTEB Tatoeba (hsb-eng) type: mteb/tatoeba-bitext-mining config: hsb-eng split: test revision: 9080400076fbadbb4c4dcb136ff4eddc40b42553 metrics: - type: accuracy value: 83.4368530020704 - type: f1 value: 79.45183870649709 - type: precision value: 77.7432712215321 - type: recall value: 83.4368530020704 - task: type: BitextMining dataset: name: MTEB Tatoeba (srp-eng) type: mteb/tatoeba-bitext-mining config: srp-eng split: test revision: 9080400076fbadbb4c4dcb136ff4eddc40b42553 metrics: - type: accuracy value: 95.8 - type: f1 value: 94.53333333333333 - type: precision value: 93.91666666666666 - type: recall value: 95.8 - task: type: BitextMining dataset: name: MTEB Tatoeba (epo-eng) type: mteb/tatoeba-bitext-mining config: epo-eng split: test revision: 9080400076fbadbb4c4dcb136ff4eddc40b42553 metrics: - type: accuracy value: 98.8 - type: f1 value: 98.48333333333332 - type: precision value: 98.33333333333334 - type: recall value: 98.8 - task: type: BitextMining dataset: name: MTEB Tatoeba (kzj-eng) type: mteb/tatoeba-bitext-mining config: kzj-eng split: test revision: 9080400076fbadbb4c4dcb136ff4eddc40b42553 metrics: - type: accuracy value: 17.5 - type: f1 value: 14.979285714285714 - type: precision value: 14.23235060690943 - type: recall value: 17.5 - task: type: BitextMining dataset: name: MTEB Tatoeba (awa-eng) type: mteb/tatoeba-bitext-mining config: awa-eng split: test revision: 9080400076fbadbb4c4dcb136ff4eddc40b42553 metrics: - type: accuracy value: 93.93939393939394 - type: f1 value: 91.991341991342 - type: precision value: 91.05339105339105 - type: recall value: 93.93939393939394 - task: type: BitextMining dataset: name: MTEB Tatoeba (fao-eng) type: mteb/tatoeba-bitext-mining config: fao-eng split: test revision: 9080400076fbadbb4c4dcb136ff4eddc40b42553 metrics: - type: accuracy value: 89.31297709923665 - type: f1 value: 86.76844783715012 - type: precision value: 85.63613231552164 - type: recall value: 89.31297709923665 - task: type: BitextMining dataset: name: MTEB Tatoeba (mal-eng) type: mteb/tatoeba-bitext-mining config: mal-eng split: test revision: 9080400076fbadbb4c4dcb136ff4eddc40b42553 metrics: - type: accuracy value: 99.12663755458514 - type: f1 value: 98.93255701115964 - type: precision value: 98.83551673944687 - type: recall value: 99.12663755458514 - task: type: BitextMining dataset: name: MTEB Tatoeba (ile-eng) type: mteb/tatoeba-bitext-mining config: ile-eng split: test revision: 9080400076fbadbb4c4dcb136ff4eddc40b42553 metrics: - type: accuracy value: 92.0 - type: f1 value: 89.77999999999999 - type: precision value: 88.78333333333333 - type: recall value: 92.0 - task: type: BitextMining dataset: name: MTEB Tatoeba (bos-eng) type: mteb/tatoeba-bitext-mining config: bos-eng split: test revision: 9080400076fbadbb4c4dcb136ff4eddc40b42553 metrics: - type: accuracy value: 96.89265536723164 - type: f1 value: 95.85687382297553 - type: precision value: 95.33898305084746 - type: recall value: 96.89265536723164 - task: type: BitextMining dataset: name: MTEB Tatoeba (cor-eng) type: mteb/tatoeba-bitext-mining config: cor-eng split: test revision: 9080400076fbadbb4c4dcb136ff4eddc40b42553 metrics: - type: accuracy value: 14.6 - type: f1 value: 11.820611790170615 - type: precision value: 11.022616224355355 - type: recall value: 14.6 - task: type: BitextMining dataset: name: MTEB Tatoeba (cat-eng) type: mteb/tatoeba-bitext-mining config: cat-eng split: test revision: 9080400076fbadbb4c4dcb136ff4eddc40b42553 metrics: - type: accuracy value: 95.89999999999999 - type: f1 value: 94.93333333333334 - type: precision value: 94.48666666666666 - type: recall value: 95.89999999999999 - task: type: BitextMining dataset: name: MTEB Tatoeba (eus-eng) type: mteb/tatoeba-bitext-mining config: eus-eng split: test revision: 9080400076fbadbb4c4dcb136ff4eddc40b42553 metrics: - type: accuracy value: 87.6 - type: f1 value: 84.72333333333334 - type: precision value: 83.44166666666666 - type: recall value: 87.6 - task: type: BitextMining dataset: name: MTEB Tatoeba (yue-eng) type: mteb/tatoeba-bitext-mining config: yue-eng split: test revision: 9080400076fbadbb4c4dcb136ff4eddc40b42553 metrics: - type: accuracy value: 94.8 - type: f1 value: 93.47333333333333 - type: precision value: 92.875 - type: recall value: 94.8 - task: type: BitextMining dataset: name: MTEB Tatoeba (swe-eng) type: mteb/tatoeba-bitext-mining config: swe-eng split: test revision: 9080400076fbadbb4c4dcb136ff4eddc40b42553 metrics: - type: accuracy value: 96.6 - type: f1 value: 95.71666666666665 - type: precision value: 95.28333333333335 - type: recall value: 96.6 - task: type: BitextMining dataset: name: MTEB Tatoeba (dtp-eng) type: mteb/tatoeba-bitext-mining config: dtp-eng split: test revision: 9080400076fbadbb4c4dcb136ff4eddc40b42553 metrics: - type: accuracy value: 17.8 - type: f1 value: 14.511074040901628 - type: precision value: 13.503791000666002 - type: recall value: 17.8 - task: type: BitextMining dataset: name: MTEB Tatoeba (kat-eng) type: mteb/tatoeba-bitext-mining config: kat-eng split: test revision: 9080400076fbadbb4c4dcb136ff4eddc40b42553 metrics: - type: accuracy value: 94.10187667560321 - type: f1 value: 92.46648793565683 - type: precision value: 91.71134941912423 - type: recall value: 94.10187667560321 - task: type: BitextMining dataset: name: MTEB Tatoeba (jpn-eng) type: mteb/tatoeba-bitext-mining config: jpn-eng split: test revision: 9080400076fbadbb4c4dcb136ff4eddc40b42553 metrics: - type: accuracy value: 97.0 - type: f1 value: 96.11666666666666 - type: precision value: 95.68333333333334 - type: recall value: 97.0 - task: type: BitextMining dataset: name: MTEB Tatoeba (csb-eng) type: mteb/tatoeba-bitext-mining config: csb-eng split: test revision: 9080400076fbadbb4c4dcb136ff4eddc40b42553 metrics: - type: accuracy value: 72.72727272727273 - type: f1 value: 66.58949745906267 - type: precision value: 63.86693017127799 - type: recall value: 72.72727272727273 - task: type: BitextMining dataset: name: MTEB Tatoeba (xho-eng) type: mteb/tatoeba-bitext-mining config: xho-eng split: test revision: 9080400076fbadbb4c4dcb136ff4eddc40b42553 metrics: - type: accuracy value: 90.14084507042254 - type: f1 value: 88.26291079812206 - type: precision value: 87.32394366197182 - type: recall value: 90.14084507042254 - task: type: BitextMining dataset: name: MTEB Tatoeba (orv-eng) type: mteb/tatoeba-bitext-mining config: orv-eng split: test revision: 9080400076fbadbb4c4dcb136ff4eddc40b42553 metrics: - type: accuracy value: 64.67065868263472 - type: f1 value: 58.2876627696987 - type: precision value: 55.79255774165953 - type: recall value: 64.67065868263472 - task: type: BitextMining dataset: name: MTEB Tatoeba (ind-eng) type: mteb/tatoeba-bitext-mining config: ind-eng split: test revision: 9080400076fbadbb4c4dcb136ff4eddc40b42553 metrics: - type: accuracy value: 95.6 - type: f1 value: 94.41666666666667 - type: precision value: 93.85 - type: recall value: 95.6 - task: type: BitextMining dataset: name: MTEB Tatoeba (tuk-eng) type: mteb/tatoeba-bitext-mining config: tuk-eng split: test revision: 9080400076fbadbb4c4dcb136ff4eddc40b42553 metrics: - type: accuracy value: 55.172413793103445 - type: f1 value: 49.63992493549144 - type: precision value: 47.71405113769646 - type: recall value: 55.172413793103445 - task: type: BitextMining dataset: name: MTEB Tatoeba (max-eng) type: mteb/tatoeba-bitext-mining config: max-eng split: test revision: 9080400076fbadbb4c4dcb136ff4eddc40b42553 metrics: - type: accuracy value: 77.46478873239437 - type: f1 value: 73.4417616811983 - type: precision value: 71.91607981220658 - type: recall value: 77.46478873239437 - task: type: BitextMining dataset: name: MTEB Tatoeba (swh-eng) type: mteb/tatoeba-bitext-mining config: swh-eng split: test revision: 9080400076fbadbb4c4dcb136ff4eddc40b42553 metrics: - type: accuracy value: 84.61538461538461 - type: f1 value: 80.91452991452994 - type: precision value: 79.33760683760683 - type: recall value: 84.61538461538461 - task: type: BitextMining dataset: name: MTEB Tatoeba (hin-eng) type: mteb/tatoeba-bitext-mining config: hin-eng split: test revision: 9080400076fbadbb4c4dcb136ff4eddc40b42553 metrics: - type: accuracy value: 98.2 - type: f1 value: 97.6 - type: precision value: 97.3 - type: recall value: 98.2 - task: type: BitextMining dataset: name: MTEB Tatoeba (dsb-eng) type: mteb/tatoeba-bitext-mining config: dsb-eng split: test revision: 9080400076fbadbb4c4dcb136ff4eddc40b42553 metrics: - type: accuracy value: 75.5741127348643 - type: f1 value: 72.00417536534445 - type: precision value: 70.53467872883321 - type: recall value: 75.5741127348643 - task: type: BitextMining dataset: name: MTEB Tatoeba (ber-eng) type: mteb/tatoeba-bitext-mining config: ber-eng split: test revision: 9080400076fbadbb4c4dcb136ff4eddc40b42553 metrics: - type: accuracy value: 62.2 - type: f1 value: 55.577460317460314 - type: precision value: 52.98583333333333 - type: recall value: 62.2 - task: type: BitextMining dataset: name: MTEB Tatoeba (tam-eng) type: mteb/tatoeba-bitext-mining config: tam-eng split: test revision: 9080400076fbadbb4c4dcb136ff4eddc40b42553 metrics: - type: accuracy value: 92.18241042345277 - type: f1 value: 90.6468124709167 - type: precision value: 89.95656894679696 - type: recall value: 92.18241042345277 - task: type: BitextMining dataset: name: MTEB Tatoeba (slk-eng) type: mteb/tatoeba-bitext-mining config: slk-eng split: test revision: 9080400076fbadbb4c4dcb136ff4eddc40b42553 metrics: - type: accuracy value: 96.1 - type: f1 value: 95.13333333333333 - type: precision value: 94.66666666666667 - type: recall value: 96.1 - task: type: BitextMining dataset: name: MTEB Tatoeba (tgl-eng) type: mteb/tatoeba-bitext-mining config: tgl-eng split: test revision: 9080400076fbadbb4c4dcb136ff4eddc40b42553 metrics: - type: accuracy value: 96.8 - type: f1 value: 95.85000000000001 - type: precision value: 95.39999999999999 - type: recall value: 96.8 - task: type: BitextMining dataset: name: MTEB Tatoeba (ast-eng) type: mteb/tatoeba-bitext-mining config: ast-eng split: test revision: 9080400076fbadbb4c4dcb136ff4eddc40b42553 metrics: - type: accuracy value: 92.1259842519685 - type: f1 value: 89.76377952755905 - type: precision value: 88.71391076115485 - type: recall value: 92.1259842519685 - task: type: BitextMining dataset: name: MTEB Tatoeba (mkd-eng) type: mteb/tatoeba-bitext-mining config: mkd-eng split: test revision: 9080400076fbadbb4c4dcb136ff4eddc40b42553 metrics: - type: accuracy value: 94.1 - type: f1 value: 92.49 - type: precision value: 91.725 - type: recall value: 94.1 - task: type: BitextMining dataset: name: MTEB Tatoeba (khm-eng) type: mteb/tatoeba-bitext-mining config: khm-eng split: test revision: 9080400076fbadbb4c4dcb136ff4eddc40b42553 metrics: - type: accuracy value: 77.5623268698061 - type: f1 value: 73.27364463791058 - type: precision value: 71.51947852086357 - type: recall value: 77.5623268698061 - task: type: BitextMining dataset: name: MTEB Tatoeba (ces-eng) type: mteb/tatoeba-bitext-mining config: ces-eng split: test revision: 9080400076fbadbb4c4dcb136ff4eddc40b42553 metrics: - type: accuracy value: 97.39999999999999 - type: f1 value: 96.56666666666666 - type: precision value: 96.16666666666667 - type: recall value: 97.39999999999999 - task: type: BitextMining dataset: name: MTEB Tatoeba (tzl-eng) type: mteb/tatoeba-bitext-mining config: tzl-eng split: test revision: 9080400076fbadbb4c4dcb136ff4eddc40b42553 metrics: - type: accuracy value: 66.34615384615384 - type: f1 value: 61.092032967032964 - type: precision value: 59.27197802197802 - type: recall value: 66.34615384615384 - task: type: BitextMining dataset: name: MTEB Tatoeba (urd-eng) type: mteb/tatoeba-bitext-mining config: urd-eng split: test revision: 9080400076fbadbb4c4dcb136ff4eddc40b42553 metrics: - type: accuracy value: 94.89999999999999 - type: f1 value: 93.41190476190476 - type: precision value: 92.7 - type: recall value: 94.89999999999999 - task: type: BitextMining dataset: name: MTEB Tatoeba (ara-eng) type: mteb/tatoeba-bitext-mining config: ara-eng split: test revision: 9080400076fbadbb4c4dcb136ff4eddc40b42553 metrics: - type: accuracy value: 93.10000000000001 - type: f1 value: 91.10000000000001 - type: precision value: 90.13333333333333 - type: recall value: 93.10000000000001 - task: type: BitextMining dataset: name: MTEB Tatoeba (kor-eng) type: mteb/tatoeba-bitext-mining config: kor-eng split: test revision: 9080400076fbadbb4c4dcb136ff4eddc40b42553 metrics: - type: accuracy value: 93.7 - type: f1 value: 91.97333333333334 - type: precision value: 91.14166666666667 - type: recall value: 93.7 - task: type: BitextMining dataset: name: MTEB Tatoeba (yid-eng) type: mteb/tatoeba-bitext-mining config: yid-eng split: test revision: 9080400076fbadbb4c4dcb136ff4eddc40b42553 metrics: - type: accuracy value: 92.21698113207547 - type: f1 value: 90.3796046720575 - type: precision value: 89.56367924528303 - type: recall value: 92.21698113207547 - task: type: BitextMining dataset: name: MTEB Tatoeba (fin-eng) type: mteb/tatoeba-bitext-mining config: fin-eng split: test revision: 9080400076fbadbb4c4dcb136ff4eddc40b42553 metrics: - type: accuracy value: 97.6 - type: f1 value: 96.91666666666667 - type: precision value: 96.6 - type: recall value: 97.6 - task: type: BitextMining dataset: name: MTEB Tatoeba (tha-eng) type: mteb/tatoeba-bitext-mining config: tha-eng split: test revision: 9080400076fbadbb4c4dcb136ff4eddc40b42553 metrics: - type: accuracy value: 97.44525547445255 - type: f1 value: 96.71532846715328 - type: precision value: 96.35036496350365 - type: recall value: 97.44525547445255 - task: type: BitextMining dataset: name: MTEB Tatoeba (wuu-eng) type: mteb/tatoeba-bitext-mining config: wuu-eng split: test revision: 9080400076fbadbb4c4dcb136ff4eddc40b42553 metrics: - type: accuracy value: 94.1 - type: f1 value: 92.34000000000002 - type: precision value: 91.49166666666667 - type: recall value: 94.1 - task: type: Retrieval dataset: name: MTEB Touche2020 type: webis-touche2020 config: default split: test revision: None metrics: - type: map_at_1 value: 3.2910000000000004 - type: map_at_10 value: 10.373000000000001 - type: map_at_100 value: 15.612 - type: map_at_1000 value: 17.06 - type: map_at_3 value: 6.119 - type: map_at_5 value: 7.917000000000001 - type: mrr_at_1 value: 44.897999999999996 - type: mrr_at_10 value: 56.054 - type: mrr_at_100 value: 56.82000000000001 - type: mrr_at_1000 value: 56.82000000000001 - type: mrr_at_3 value: 52.381 - type: mrr_at_5 value: 53.81 - type: ndcg_at_1 value: 42.857 - type: ndcg_at_10 value: 27.249000000000002 - type: ndcg_at_100 value: 36.529 - type: ndcg_at_1000 value: 48.136 - type: ndcg_at_3 value: 33.938 - type: ndcg_at_5 value: 29.951 - type: precision_at_1 value: 44.897999999999996 - type: precision_at_10 value: 22.653000000000002 - type: precision_at_100 value: 7.000000000000001 - type: precision_at_1000 value: 1.48 - type: precision_at_3 value: 32.653 - type: precision_at_5 value: 27.755000000000003 - type: recall_at_1 value: 3.2910000000000004 - type: recall_at_10 value: 16.16 - type: recall_at_100 value: 43.908 - type: recall_at_1000 value: 79.823 - type: recall_at_3 value: 7.156 - type: recall_at_5 value: 10.204 - task: type: Classification dataset: name: MTEB ToxicConversationsClassification type: mteb/toxic_conversations_50k config: default split: test revision: d7c0de2777da35d6aae2200a62c6e0e5af397c4c metrics: - type: accuracy value: 71.05879999999999 - type: ap value: 14.609748142799111 - type: f1 value: 54.878956295843096 - task: type: Classification dataset: name: MTEB TweetSentimentExtractionClassification type: mteb/tweet_sentiment_extraction config: default split: test revision: d604517c81ca91fe16a244d1248fc021f9ecee7a metrics: - type: accuracy value: 64.61799660441426 - type: f1 value: 64.8698191961434 - task: type: Clustering dataset: name: MTEB TwentyNewsgroupsClustering type: mteb/twentynewsgroups-clustering config: default split: test revision: 6125ec4e24fa026cec8a478383ee943acfbd5449 metrics: - type: v_measure value: 51.32860036611885 - task: type: PairClassification dataset: name: MTEB TwitterSemEval2015 type: mteb/twittersemeval2015-pairclassification config: default split: test revision: 70970daeab8776df92f5ea462b6173c0b46fd2d1 metrics: - type: cos_sim_accuracy value: 88.34714192048638 - type: cos_sim_ap value: 80.26732975975634 - type: cos_sim_f1 value: 73.53415148134374 - type: cos_sim_precision value: 69.34767360299276 - type: cos_sim_recall value: 78.25857519788919 - type: dot_accuracy value: 88.34714192048638 - type: dot_ap value: 80.26733698491206 - type: dot_f1 value: 73.53415148134374 - type: dot_precision value: 69.34767360299276 - type: dot_recall value: 78.25857519788919 - type: euclidean_accuracy value: 88.34714192048638 - type: euclidean_ap value: 80.26734337771738 - type: euclidean_f1 value: 73.53415148134374 - type: euclidean_precision value: 69.34767360299276 - type: euclidean_recall value: 78.25857519788919 - type: manhattan_accuracy value: 88.30541813196639 - type: manhattan_ap value: 80.19415808104145 - type: manhattan_f1 value: 73.55143870713441 - type: manhattan_precision value: 73.25307511122743 - type: manhattan_recall value: 73.85224274406332 - type: max_accuracy value: 88.34714192048638 - type: max_ap value: 80.26734337771738 - type: max_f1 value: 73.55143870713441 - task: type: PairClassification dataset: name: MTEB TwitterURLCorpus type: mteb/twitterurlcorpus-pairclassification config: default split: test revision: 8b6510b0b1fa4e4c4f879467980e9be563ec1cdf metrics: - type: cos_sim_accuracy value: 89.81061047075717 - type: cos_sim_ap value: 87.11747055081017 - type: cos_sim_f1 value: 80.04355498817256 - type: cos_sim_precision value: 78.1165262000733 - type: cos_sim_recall value: 82.06806282722513 - type: dot_accuracy value: 89.81061047075717 - type: dot_ap value: 87.11746902745236 - type: dot_f1 value: 80.04355498817256 - type: dot_precision value: 78.1165262000733 - type: dot_recall value: 82.06806282722513 - type: euclidean_accuracy value: 89.81061047075717 - type: euclidean_ap value: 87.11746919324248 - type: euclidean_f1 value: 80.04355498817256 - type: euclidean_precision value: 78.1165262000733 - type: euclidean_recall value: 82.06806282722513 - type: manhattan_accuracy value: 89.79508673885202 - type: manhattan_ap value: 87.11074390832218 - type: manhattan_f1 value: 80.13002540726349 - type: manhattan_precision value: 77.83826945412311 - type: manhattan_recall value: 82.56082537727133 - type: max_accuracy value: 89.81061047075717 - type: max_ap value: 87.11747055081017 - type: max_f1 value: 80.13002540726349 --- # KeyurRamoliya/multilingual-e5-large-instruct-Q8_0-GGUF This model was converted to GGUF format from [`intfloat/multilingual-e5-large-instruct`](https://huggingface.co/intfloat/multilingual-e5-large-instruct) using llama.cpp via the ggml.ai's [GGUF-my-repo](https://huggingface.co/spaces/ggml-org/gguf-my-repo) space. Refer to the [original model card](https://huggingface.co/intfloat/multilingual-e5-large-instruct) for more details on the model. ## Use with llama.cpp Install llama.cpp through brew (works on Mac and Linux) ```bash brew install llama.cpp ``` Invoke the llama.cpp server or the CLI. ### CLI: ```bash llama-cli --hf-repo KeyurRamoliya/multilingual-e5-large-instruct-Q8_0-GGUF --hf-file multilingual-e5-large-instruct-q8_0.gguf -p "The meaning to life and the universe is" ``` ### Server: ```bash llama-server --hf-repo KeyurRamoliya/multilingual-e5-large-instruct-Q8_0-GGUF --hf-file multilingual-e5-large-instruct-q8_0.gguf -c 2048 ``` Note: You can also use this checkpoint directly through the [usage steps](https://github.com/ggerganov/llama.cpp?tab=readme-ov-file#usage) listed in the Llama.cpp repo as well. Step 1: Clone llama.cpp from GitHub. ``` git clone https://github.com/ggerganov/llama.cpp ``` Step 2: Move into the llama.cpp folder and build it with `LLAMA_CURL=1` flag along with other hardware-specific flags (for ex: LLAMA_CUDA=1 for Nvidia GPUs on Linux). ``` cd llama.cpp && LLAMA_CURL=1 make ``` Step 3: Run inference through the main binary. ``` ./llama-cli --hf-repo KeyurRamoliya/multilingual-e5-large-instruct-Q8_0-GGUF --hf-file multilingual-e5-large-instruct-q8_0.gguf -p "The meaning to life and the universe is" ``` or ``` ./llama-server --hf-repo KeyurRamoliya/multilingual-e5-large-instruct-Q8_0-GGUF --hf-file multilingual-e5-large-instruct-q8_0.gguf -c 2048 ```
[ "SUMMARIZATION" ]
[ "BIOSSES", "SCIFACT" ]
RichardErkhov/aisingapore_-_gemma2-9b-cpt-sea-lionv3-instruct-gguf
RichardErkhov
null
[ "gguf", "arxiv:2309.06085", "arxiv:2311.07911", "arxiv:2306.05685", "endpoints_compatible", "region:us", "conversational" ]
2024-11-12T14:51:37
2024-11-12T20:58:39
138
0
--- {} --- Quantization made by Richard Erkhov. [Github](https://github.com/RichardErkhov) [Discord](https://discord.gg/pvy7H8DZMG) [Request more models](https://github.com/RichardErkhov/quant_request) gemma2-9b-cpt-sea-lionv3-instruct - GGUF - Model creator: https://huggingface.co/aisingapore/ - Original model: https://huggingface.co/aisingapore/gemma2-9b-cpt-sea-lionv3-instruct/ | Name | Quant method | Size | | ---- | ---- | ---- | | [gemma2-9b-cpt-sea-lionv3-instruct.Q2_K.gguf](https://huggingface.co/RichardErkhov/aisingapore_-_gemma2-9b-cpt-sea-lionv3-instruct-gguf/blob/main/gemma2-9b-cpt-sea-lionv3-instruct.Q2_K.gguf) | Q2_K | 3.54GB | | [gemma2-9b-cpt-sea-lionv3-instruct.Q3_K_S.gguf](https://huggingface.co/RichardErkhov/aisingapore_-_gemma2-9b-cpt-sea-lionv3-instruct-gguf/blob/main/gemma2-9b-cpt-sea-lionv3-instruct.Q3_K_S.gguf) | Q3_K_S | 4.04GB | | [gemma2-9b-cpt-sea-lionv3-instruct.Q3_K.gguf](https://huggingface.co/RichardErkhov/aisingapore_-_gemma2-9b-cpt-sea-lionv3-instruct-gguf/blob/main/gemma2-9b-cpt-sea-lionv3-instruct.Q3_K.gguf) | Q3_K | 4.43GB | | [gemma2-9b-cpt-sea-lionv3-instruct.Q3_K_M.gguf](https://huggingface.co/RichardErkhov/aisingapore_-_gemma2-9b-cpt-sea-lionv3-instruct-gguf/blob/main/gemma2-9b-cpt-sea-lionv3-instruct.Q3_K_M.gguf) | Q3_K_M | 4.43GB | | [gemma2-9b-cpt-sea-lionv3-instruct.Q3_K_L.gguf](https://huggingface.co/RichardErkhov/aisingapore_-_gemma2-9b-cpt-sea-lionv3-instruct-gguf/blob/main/gemma2-9b-cpt-sea-lionv3-instruct.Q3_K_L.gguf) | Q3_K_L | 4.78GB | | [gemma2-9b-cpt-sea-lionv3-instruct.IQ4_XS.gguf](https://huggingface.co/RichardErkhov/aisingapore_-_gemma2-9b-cpt-sea-lionv3-instruct-gguf/blob/main/gemma2-9b-cpt-sea-lionv3-instruct.IQ4_XS.gguf) | IQ4_XS | 4.86GB | | [gemma2-9b-cpt-sea-lionv3-instruct.Q4_0.gguf](https://huggingface.co/RichardErkhov/aisingapore_-_gemma2-9b-cpt-sea-lionv3-instruct-gguf/blob/main/gemma2-9b-cpt-sea-lionv3-instruct.Q4_0.gguf) | Q4_0 | 5.07GB | | [gemma2-9b-cpt-sea-lionv3-instruct.IQ4_NL.gguf](https://huggingface.co/RichardErkhov/aisingapore_-_gemma2-9b-cpt-sea-lionv3-instruct-gguf/blob/main/gemma2-9b-cpt-sea-lionv3-instruct.IQ4_NL.gguf) | IQ4_NL | 5.1GB | | [gemma2-9b-cpt-sea-lionv3-instruct.Q4_K_S.gguf](https://huggingface.co/RichardErkhov/aisingapore_-_gemma2-9b-cpt-sea-lionv3-instruct-gguf/blob/main/gemma2-9b-cpt-sea-lionv3-instruct.Q4_K_S.gguf) | Q4_K_S | 5.1GB | | [gemma2-9b-cpt-sea-lionv3-instruct.Q4_K.gguf](https://huggingface.co/RichardErkhov/aisingapore_-_gemma2-9b-cpt-sea-lionv3-instruct-gguf/blob/main/gemma2-9b-cpt-sea-lionv3-instruct.Q4_K.gguf) | Q4_K | 5.37GB | | [gemma2-9b-cpt-sea-lionv3-instruct.Q4_K_M.gguf](https://huggingface.co/RichardErkhov/aisingapore_-_gemma2-9b-cpt-sea-lionv3-instruct-gguf/blob/main/gemma2-9b-cpt-sea-lionv3-instruct.Q4_K_M.gguf) | Q4_K_M | 5.37GB | | [gemma2-9b-cpt-sea-lionv3-instruct.Q4_1.gguf](https://huggingface.co/RichardErkhov/aisingapore_-_gemma2-9b-cpt-sea-lionv3-instruct-gguf/blob/main/gemma2-9b-cpt-sea-lionv3-instruct.Q4_1.gguf) | Q4_1 | 5.55GB | | [gemma2-9b-cpt-sea-lionv3-instruct.Q5_0.gguf](https://huggingface.co/RichardErkhov/aisingapore_-_gemma2-9b-cpt-sea-lionv3-instruct-gguf/blob/main/gemma2-9b-cpt-sea-lionv3-instruct.Q5_0.gguf) | Q5_0 | 6.04GB | | [gemma2-9b-cpt-sea-lionv3-instruct.Q5_K_S.gguf](https://huggingface.co/RichardErkhov/aisingapore_-_gemma2-9b-cpt-sea-lionv3-instruct-gguf/blob/main/gemma2-9b-cpt-sea-lionv3-instruct.Q5_K_S.gguf) | Q5_K_S | 6.04GB | | [gemma2-9b-cpt-sea-lionv3-instruct.Q5_K.gguf](https://huggingface.co/RichardErkhov/aisingapore_-_gemma2-9b-cpt-sea-lionv3-instruct-gguf/blob/main/gemma2-9b-cpt-sea-lionv3-instruct.Q5_K.gguf) | Q5_K | 6.19GB | | [gemma2-9b-cpt-sea-lionv3-instruct.Q5_K_M.gguf](https://huggingface.co/RichardErkhov/aisingapore_-_gemma2-9b-cpt-sea-lionv3-instruct-gguf/blob/main/gemma2-9b-cpt-sea-lionv3-instruct.Q5_K_M.gguf) | Q5_K_M | 6.19GB | | [gemma2-9b-cpt-sea-lionv3-instruct.Q5_1.gguf](https://huggingface.co/RichardErkhov/aisingapore_-_gemma2-9b-cpt-sea-lionv3-instruct-gguf/blob/main/gemma2-9b-cpt-sea-lionv3-instruct.Q5_1.gguf) | Q5_1 | 6.52GB | | [gemma2-9b-cpt-sea-lionv3-instruct.Q6_K.gguf](https://huggingface.co/RichardErkhov/aisingapore_-_gemma2-9b-cpt-sea-lionv3-instruct-gguf/blob/main/gemma2-9b-cpt-sea-lionv3-instruct.Q6_K.gguf) | Q6_K | 7.07GB | | [gemma2-9b-cpt-sea-lionv3-instruct.Q8_0.gguf](https://huggingface.co/RichardErkhov/aisingapore_-_gemma2-9b-cpt-sea-lionv3-instruct-gguf/blob/main/gemma2-9b-cpt-sea-lionv3-instruct.Q8_0.gguf) | Q8_0 | 9.15GB | Original model description: --- library_name: transformers pipeline_tag: text-generation base_model: - aisingapore/gemma2-9b-cpt-sea-lionv3-base language: - en - zh - vi - id - th - fil - ta - ms - km - lo - my - jv - su license: gemma --- # Gemma2 9B CPT SEA-LIONv3 Instruct SEA-LION is a collection of Large Language Models (LLMs) which have been pretrained and instruct-tuned for the Southeast Asia (SEA) region. Gemma2 9B CPT SEA-LIONv3 Instruct is a multilingual model which has been fine-tuned with around **500,000 English instruction-completion pairs** alongside a larger pool of around **1,000,000 instruction-completion pairs** from other ASEAN languages, such as Indonesian, Thai and Vietnamese. SEA-LION stands for _Southeast Asian Languages In One Network_. - **Developed by:** Products Pillar, AI Singapore - **Funded by:** Singapore NRF - **Model type:** Decoder - **Languages:** English, Chinese, Vietnamese, Indonesian, Thai, Filipino, Tamil, Malay, Khmer, Lao, Burmese, Javanese, Sundanese - **License:** [Gemma Community License](https://ai.google.dev/gemma/terms) ## Model Details ### Model Description We performed instruction tuning in English and also in ASEAN languages such as Indonesian, Thai and Vietnamese on our [continued pre-trained Gemma2 9B CPT SEA-LIONv3](https://huggingface.co/aisingapore/gemma2-9b-cpt-sea-lionv3-base), a decoder model using the Gemma2 architecture, to create Gemma2 9B CPT SEA-LIONv3 Instruct. For tokenisation, the model employs the default tokenizer used in Gemma-2-9B. The model has a context length of 8192. ### Benchmark Performance We evaluated Gemma2 9B CPT SEA-LIONv3 Instruct on both general language capabilities and instruction-following capabilities. #### General Language Capabilities For the evaluation of general language capabilities, we employed the [SEA HELM (also known as BHASA) evaluation benchmark](https://arxiv.org/abs/2309.06085v2) across a variety of tasks. These tasks include Question Answering (QA), Sentiment Analysis (Sentiment), Toxicity Detection (Toxicity), Translation in both directions (Eng>Lang & Lang>Eng), Abstractive Summarization (Summ), Causal Reasoning (Causal) and Natural Language Inference (NLI). Note: SEA HELM is implemented using prompts to elicit answers in a strict format. For all tasks, the model is expected to provide an answer tag from which the answer is automatically extracted. For tasks where options are provided, the answer should comprise one of the pre-defined options. The scores for each task is normalised to account for baseline performance due to random chance. The evaluation was done **zero-shot** with native prompts on a sample of 100-1000 instances for each dataset. #### Instruction-following Capabilities Since Gemma2 9B CPT SEA-LIONv3 Instruct is an instruction-following model, we also evaluated it on instruction-following capabilities with two datasets, [IFEval](https://arxiv.org/abs/2311.07911) and [MT-Bench](https://arxiv.org/abs/2306.05685). As these two datasets were originally in English, the linguists and native speakers in the team worked together to filter, localize and translate the datasets into the respective target languages to ensure that the examples remained reasonable, meaningful and natural. **IFEval** IFEval evaluates a model's ability to adhere to constraints provided in the prompt, for example beginning a response with a specific word/phrase or answering with a certain number of sections. Additionally, accuracy is normalized by the proportion of responses in the correct language (if the model performs the task correctly but responds in the wrong language, it is judged to have failed the task). **MT-Bench** MT-Bench evaluates a model's ability to engage in multi-turn (2 turns) conversations and respond in ways that align with human needs. We use `gpt-4-1106-preview` as the judge model and compare against `gpt-3.5-turbo-0125` as the baseline model. The metric used is the weighted win rate against the baseline model (i.e. average win rate across each category: Math, Reasoning, STEM, Humanities, Roleplay, Writing, Extraction). A tie is given a score of 0.5. For more details on Gemma2 9B CPT SEA-LIONv3 Instruct benchmark performance, please refer to the SEA HELM leaderboard, https://leaderboard.sea-lion.ai/ ### Usage Gemma2 9B CPT SEA-LIONv3 Instruct can be run using the 🤗 Transformers library ```python # Please use transformers==4.45.2 import transformers import torch model_id = "aisingapore/gemma2-9b-cpt-sea-lionv3-instruct" pipeline = transformers.pipeline( "text-generation", model=model_id, model_kwargs={"torch_dtype": torch.bfloat16}, device_map="auto", ) messages = [ {"role": "user", "content": "Apa sentimen dari kalimat berikut ini?\nKalimat: Buku ini sangat membosankan.\nJawaban: "}, ] outputs = pipeline( messages, max_new_tokens=256, ) print(outputs[0]["generated_text"][-1]) ``` ### Caveats It is important for users to be aware that our model exhibits certain limitations that warrant consideration. Like many LLMs, the model can hallucinate and occasionally generates irrelevant content, introducing fictional elements that are not grounded in the provided context. Users should also exercise caution in interpreting and validating the model's responses due to the potential inconsistencies in its reasoning. ## Limitations ### Safety Current SEA-LION models, including this commercially permissive release, have not been aligned for safety. Developers and users should perform their own safety fine-tuning and related security measures. In no event shall the authors be held liable for any claim, damages, or other liability arising from the use of the released weights and codes. ## Technical Specifications ### Fine-Tuning Details Gemma2 9B CPT SEA-LIONv3 Instruct was built using a combination of a full parameter fine-tune, on-policy alignment, and model merges of the best performing checkpoints. The training process for fine-tuning was approximately 15 hours, with alignment taking 2 hours, both on 8x H100-80GB GPUs. ## Data Gemma2 9B CPT SEA-LIONv3 Instruct was trained on a wide range of synthetic instructions, alongside publicly available instructions hand-curated by the team with the assistance of native speakers. In addition, special care was taken to ensure that the datasets used had commercially permissive licenses through verification with the original data source. ## Call for Contributions We encourage researchers, developers, and language enthusiasts to actively contribute to the enhancement and expansion of SEA-LION. Contributions can involve identifying and reporting bugs, sharing pre-training, instruction, and preference data, improving documentation usability, proposing and implementing new model evaluation tasks and metrics, or training versions of the model in additional Southeast Asian languages. Join us in shaping the future of SEA-LION by sharing your expertise and insights to make these models more accessible, accurate, and versatile. Please check out our GitHub for further information on the call for contributions. ## The Team Chan Adwin, Choa Esther, Cheng Nicholas, Huang Yuli, Lau Wayne, Lee Chwan Ren, Leong Wai Yi, Leong Wei Qi, Limkonchotiwat Peerat, Liu Bing Jie Darius, Montalan Jann Railey, Ng Boon Cheong Raymond, Ngui Jian Gang, Nguyen Thanh Ngan, Ong Brandon, Ong Tat-Wee David, Ong Zhi Hao, Rengarajan Hamsawardhini, Siow Bryan, Susanto Yosephine, Tai Ngee Chia, Tan Choon Meng, Teo Eng Sipp Leslie, Teo Wei Yi, Tjhi William, Teng Walter, Yeo Yeow Tong, Yong Xianbin ## Acknowledgements [AI Singapore](​​https://aisingapore.org/) is a national programme supported by the National Research Foundation, Singapore and hosted by the National University of Singapore. Any opinions, findings and conclusions or recommendations expressed in this material are those of the author(s) and do not reflect the views of the National Research Foundation or the National University of Singapore. ## Contact For more info, please contact us using this [SEA-LION Inquiry Form](https://forms.gle/sLCUVb95wmGf43hi6) [Link to SEA-LION's GitHub repository](https://github.com/aisingapore/sealion) ## Disclaimer This is the repository for the commercial instruction-tuned model. The model has _not_ been aligned for safety. Developers and users should perform their own safety fine-tuning and related security measures. In no event shall the authors be held liable for any claims, damages, or other liabilities arising from the use of the released weights and codes.
[ "QUESTION_ANSWERING", "TRANSLATION", "SUMMARIZATION" ]
[ "CHIA" ]
hongkeon/granite-embedding-278m-multilingual-Q8_0-GGUF
hongkeon
sentence-similarity
[ "transformers", "gguf", "language", "granite", "embeddings", "multilingual", "mteb", "llama-cpp", "gguf-my-repo", "sentence-similarity", "en", "ar", "cs", "de", "es", "fr", "it", "ja", "ko", "nl", "pt", "zh", "base_model:ibm-granite/granite-embedding-278m-multilingual", "base_model:quantized:ibm-granite/granite-embedding-278m-multilingual", "license:apache-2.0", "model-index", "endpoints_compatible", "region:us", "feature-extraction" ]
2025-01-27T00:28:24
2025-01-27T00:28:34
138
0
--- base_model: ibm-granite/granite-embedding-278m-multilingual language: - en - ar - cs - de - es - fr - it - ja - ko - nl - pt - zh library_name: transformers license: apache-2.0 pipeline_tag: sentence-similarity tags: - language - granite - embeddings - multilingual - mteb - llama-cpp - gguf-my-repo model-index: - name: ibm-granite/granite-embedding-278m-multilingual results: - task: type: Classification dataset: name: MTEB AmazonCounterfactualClassification (en-ext) type: mteb/amazon_counterfactual config: en-ext split: test revision: e8379541af4e31359cca9fbcf4b00f2671dba205 metrics: - type: accuracy value: 73.4333 - type: f1 value: 61.2301 - type: f1_weighted value: 78.40899999999999 - type: ap value: 23.347 - type: ap_weighted value: 23.347 - type: main_score value: 73.4333 - task: type: Classification dataset: name: MTEB AmazonCounterfactualClassification (en) type: mteb/amazon_counterfactual config: en split: test revision: e8379541af4e31359cca9fbcf4b00f2671dba205 metrics: - type: accuracy value: 71.806 - type: f1 value: 65.6467 - type: f1_weighted value: 74.4815 - type: ap value: 34.045700000000004 - type: ap_weighted value: 34.045700000000004 - type: main_score value: 71.806 - task: type: Classification dataset: name: MTEB AmazonPolarityClassification (default) type: mteb/amazon_polarity config: default split: test revision: e2d317d38cd51312af73b3d32a06d1a08b442046 metrics: - type: accuracy value: 67.5907 - type: f1 value: 67.36370000000001 - type: f1_weighted value: 67.36370000000001 - type: ap value: 62.0368 - type: ap_weighted value: 62.0368 - type: main_score value: 67.5907 - task: type: Classification dataset: name: MTEB AmazonReviewsClassification (en) type: mteb/amazon_reviews_multi config: en split: test revision: 1399c76144fd37290681b995c656ef9b2e06e26d metrics: - type: accuracy value: 37.278 - type: f1 value: 36.4099 - type: f1_weighted value: 36.4099 - type: main_score value: 37.278 - task: type: Retrieval dataset: name: MTEB AppsRetrieval (default) type: CoIR-Retrieval/apps config: default split: test revision: f22508f96b7a36c2415181ed8bb76f76e04ae2d5 metrics: - type: ndcg_at_1 value: 3.453 - type: ndcg_at_3 value: 4.882000000000001 - type: ndcg_at_5 value: 5.564 - type: ndcg_at_10 value: 6.214 - type: ndcg_at_20 value: 6.814000000000001 - type: ndcg_at_100 value: 8.581 - type: ndcg_at_1000 value: 12.215 - type: map_at_1 value: 3.453 - type: map_at_3 value: 4.515000000000001 - type: map_at_5 value: 4.89 - type: map_at_10 value: 5.151 - type: map_at_20 value: 5.313 - type: map_at_100 value: 5.539000000000001 - type: map_at_1000 value: 5.638 - type: recall_at_1 value: 3.453 - type: recall_at_3 value: 5.949999999999999 - type: recall_at_5 value: 7.623 - type: recall_at_10 value: 9.668000000000001 - type: recall_at_20 value: 12.058 - type: recall_at_100 value: 21.859 - type: recall_at_1000 value: 52.722 - type: precision_at_1 value: 3.453 - type: precision_at_3 value: 1.983 - type: precision_at_5 value: 1.525 - type: precision_at_10 value: 0.967 - type: precision_at_20 value: 0.603 - type: precision_at_100 value: 0.219 - type: precision_at_1000 value: 0.053 - type: mrr_at_1 value: 3.4528999999999996 - type: mrr_at_3 value: 4.5153 - type: mrr_at_5 value: 4.889799999999999 - type: mrr_at_10 value: 5.1507 - type: mrr_at_20 value: 5.3135 - type: mrr_at_100 value: 5.5391 - type: mrr_at_1000 value: 5.6382 - type: nauc_ndcg_at_1_max value: 37.1714 - type: nauc_ndcg_at_1_std value: 15.306700000000001 - type: nauc_ndcg_at_1_diff1 value: 46.2252 - type: nauc_ndcg_at_3_max value: 32.0309 - type: nauc_ndcg_at_3_std value: 14.2983 - type: nauc_ndcg_at_3_diff1 value: 34.7174 - type: nauc_ndcg_at_5_max value: 29.3613 - type: nauc_ndcg_at_5_std value: 13.0358 - type: nauc_ndcg_at_5_diff1 value: 30.8369 - type: nauc_ndcg_at_10_max value: 26.820100000000004 - type: nauc_ndcg_at_10_std value: 12.3422 - type: nauc_ndcg_at_10_diff1 value: 27.3719 - type: nauc_ndcg_at_20_max value: 25.5643 - type: nauc_ndcg_at_20_std value: 11.383000000000001 - type: nauc_ndcg_at_20_diff1 value: 25.7058 - type: nauc_ndcg_at_100_max value: 23.2131 - type: nauc_ndcg_at_100_std value: 12.4787 - type: nauc_ndcg_at_100_diff1 value: 21.6874 - type: nauc_ndcg_at_1000_max value: 22.900499999999997 - type: nauc_ndcg_at_1000_std value: 13.2218 - type: nauc_ndcg_at_1000_diff1 value: 19.668 - type: nauc_map_at_1_max value: 37.1714 - type: nauc_map_at_1_std value: 15.306700000000001 - type: nauc_map_at_1_diff1 value: 46.2252 - type: nauc_map_at_3_max value: 33.1012 - type: nauc_map_at_3_std value: 14.4117 - type: nauc_map_at_3_diff1 value: 36.8859 - type: nauc_map_at_5_max value: 31.404700000000002 - type: nauc_map_at_5_std value: 13.5956 - type: nauc_map_at_5_diff1 value: 34.3454 - type: nauc_map_at_10_max value: 30.1013 - type: nauc_map_at_10_std value: 13.2253 - type: nauc_map_at_10_diff1 value: 32.487 - type: nauc_map_at_20_max value: 29.5747 - type: nauc_map_at_20_std value: 12.843499999999999 - type: nauc_map_at_20_diff1 value: 31.8252 - type: nauc_map_at_100_max value: 28.968899999999998 - type: nauc_map_at_100_std value: 12.967699999999999 - type: nauc_map_at_100_diff1 value: 30.924000000000003 - type: nauc_map_at_1000_max value: 28.894599999999997 - type: nauc_map_at_1000_std value: 12.997800000000002 - type: nauc_map_at_1000_diff1 value: 30.7653 - type: nauc_recall_at_1_max value: 37.1714 - type: nauc_recall_at_1_std value: 15.306700000000001 - type: nauc_recall_at_1_diff1 value: 46.2252 - type: nauc_recall_at_3_max value: 29.6485 - type: nauc_recall_at_3_std value: 14.072799999999999 - type: nauc_recall_at_3_diff1 value: 29.9536 - type: nauc_recall_at_5_max value: 25.251099999999997 - type: nauc_recall_at_5_std value: 11.9121 - type: nauc_recall_at_5_diff1 value: 23.9203 - type: nauc_recall_at_10_max value: 20.8856 - type: nauc_recall_at_10_std value: 10.7653 - type: nauc_recall_at_10_diff1 value: 18.3716 - type: nauc_recall_at_20_max value: 18.9378 - type: nauc_recall_at_20_std value: 8.8933 - type: nauc_recall_at_20_diff1 value: 15.7693 - type: nauc_recall_at_100_max value: 15.7027 - type: nauc_recall_at_100_std value: 12.6519 - type: nauc_recall_at_100_diff1 value: 9.2726 - type: nauc_recall_at_1000_max value: 16.2321 - type: nauc_recall_at_1000_std value: 15.2717 - type: nauc_recall_at_1000_diff1 value: 4.4337 - type: nauc_precision_at_1_max value: 37.1714 - type: nauc_precision_at_1_std value: 15.306700000000001 - type: nauc_precision_at_1_diff1 value: 46.2252 - type: nauc_precision_at_3_max value: 29.6485 - type: nauc_precision_at_3_std value: 14.072799999999999 - type: nauc_precision_at_3_diff1 value: 29.9536 - type: nauc_precision_at_5_max value: 25.251099999999997 - type: nauc_precision_at_5_std value: 11.9121 - type: nauc_precision_at_5_diff1 value: 23.9203 - type: nauc_precision_at_10_max value: 20.8856 - type: nauc_precision_at_10_std value: 10.7653 - type: nauc_precision_at_10_diff1 value: 18.3716 - type: nauc_precision_at_20_max value: 18.9378 - type: nauc_precision_at_20_std value: 8.8933 - type: nauc_precision_at_20_diff1 value: 15.7693 - type: nauc_precision_at_100_max value: 15.7027 - type: nauc_precision_at_100_std value: 12.6519 - type: nauc_precision_at_100_diff1 value: 9.2726 - type: nauc_precision_at_1000_max value: 16.2321 - type: nauc_precision_at_1000_std value: 15.2717 - type: nauc_precision_at_1000_diff1 value: 4.4337 - type: nauc_mrr_at_1_max value: 37.1714 - type: nauc_mrr_at_1_std value: 15.306700000000001 - type: nauc_mrr_at_1_diff1 value: 46.2252 - type: nauc_mrr_at_3_max value: 33.1012 - type: nauc_mrr_at_3_std value: 14.4117 - type: nauc_mrr_at_3_diff1 value: 36.8859 - type: nauc_mrr_at_5_max value: 31.404700000000002 - type: nauc_mrr_at_5_std value: 13.5956 - type: nauc_mrr_at_5_diff1 value: 34.3454 - type: nauc_mrr_at_10_max value: 30.1013 - type: nauc_mrr_at_10_std value: 13.2253 - type: nauc_mrr_at_10_diff1 value: 32.487 - type: nauc_mrr_at_20_max value: 29.5747 - type: nauc_mrr_at_20_std value: 12.843499999999999 - type: nauc_mrr_at_20_diff1 value: 31.8252 - type: nauc_mrr_at_100_max value: 28.968899999999998 - type: nauc_mrr_at_100_std value: 12.967699999999999 - type: nauc_mrr_at_100_diff1 value: 30.9239 - type: nauc_mrr_at_1000_max value: 28.894599999999997 - type: nauc_mrr_at_1000_std value: 12.997800000000002 - type: nauc_mrr_at_1000_diff1 value: 30.7653 - type: main_score value: 6.214 - task: type: Retrieval dataset: name: MTEB ArguAna (default) type: mteb/arguana config: default split: test revision: c22ab2a51041ffd869aaddef7af8d8215647e41a metrics: - type: ndcg_at_1 value: 31.152 - type: ndcg_at_3 value: 45.050000000000004 - type: ndcg_at_5 value: 50.458999999999996 - type: ndcg_at_10 value: 55.24400000000001 - type: ndcg_at_20 value: 57.918000000000006 - type: ndcg_at_100 value: 58.97 - type: ndcg_at_1000 value: 59.080999999999996 - type: map_at_1 value: 31.152 - type: map_at_3 value: 41.513 - type: map_at_5 value: 44.542 - type: map_at_10 value: 46.544000000000004 - type: map_at_20 value: 47.304 - type: map_at_100 value: 47.467999999999996 - type: map_at_1000 value: 47.473 - type: recall_at_1 value: 31.152 - type: recall_at_3 value: 55.334 - type: recall_at_5 value: 68.35 - type: recall_at_10 value: 83.001 - type: recall_at_20 value: 93.38499999999999 - type: recall_at_100 value: 98.791 - type: recall_at_1000 value: 99.644 - type: precision_at_1 value: 31.152 - type: precision_at_3 value: 18.445 - type: precision_at_5 value: 13.669999999999998 - type: precision_at_10 value: 8.3 - type: precision_at_20 value: 4.6690000000000005 - type: precision_at_100 value: 0.988 - type: precision_at_1000 value: 0.1 - type: mrr_at_1 value: 31.7212 - type: mrr_at_3 value: 41.7141 - type: mrr_at_5 value: 44.754599999999996 - type: mrr_at_10 value: 46.7491 - type: mrr_at_20 value: 47.515299999999996 - type: mrr_at_100 value: 47.679300000000005 - type: mrr_at_1000 value: 47.6841 - type: nauc_ndcg_at_1_max value: -7.8191 - type: nauc_ndcg_at_1_std value: -4.0581 - type: nauc_ndcg_at_1_diff1 value: 14.383199999999999 - type: nauc_ndcg_at_3_max value: -4.6856 - type: nauc_ndcg_at_3_std value: -3.4165 - type: nauc_ndcg_at_3_diff1 value: 10.7764 - type: nauc_ndcg_at_5_max value: -3.2999 - type: nauc_ndcg_at_5_std value: -3.6675 - type: nauc_ndcg_at_5_diff1 value: 11.6249 - type: nauc_ndcg_at_10_max value: -3.2984 - type: nauc_ndcg_at_10_std value: -3.0373 - type: nauc_ndcg_at_10_diff1 value: 11.9938 - type: nauc_ndcg_at_20_max value: -3.147 - type: nauc_ndcg_at_20_std value: -2.9219 - type: nauc_ndcg_at_20_diff1 value: 12.4893 - type: nauc_ndcg_at_100_max value: -4.2572 - type: nauc_ndcg_at_100_std value: -2.8537 - type: nauc_ndcg_at_100_diff1 value: 12.1039 - type: nauc_ndcg_at_1000_max value: -4.3526 - type: nauc_ndcg_at_1000_std value: -3.0145 - type: nauc_ndcg_at_1000_diff1 value: 12.1685 - type: nauc_map_at_1_max value: -7.8191 - type: nauc_map_at_1_std value: -4.0581 - type: nauc_map_at_1_diff1 value: 14.383199999999999 - type: nauc_map_at_3_max value: -5.5556 - type: nauc_map_at_3_std value: -3.515 - type: nauc_map_at_3_diff1 value: 11.5486 - type: nauc_map_at_5_max value: -4.840599999999999 - type: nauc_map_at_5_std value: -3.6663 - type: nauc_map_at_5_diff1 value: 12.053899999999999 - type: nauc_map_at_10_max value: -4.9401 - type: nauc_map_at_10_std value: -3.3724 - type: nauc_map_at_10_diff1 value: 12.1558 - type: nauc_map_at_20_max value: -4.9365 - type: nauc_map_at_20_std value: -3.3676999999999997 - type: nauc_map_at_20_diff1 value: 12.2729 - type: nauc_map_at_100_max value: -5.0695 - type: nauc_map_at_100_std value: -3.3561 - type: nauc_map_at_100_diff1 value: 12.237 - type: nauc_map_at_1000_max value: -5.0709 - type: nauc_map_at_1000_std value: -3.3594 - type: nauc_map_at_1000_diff1 value: 12.2408 - type: nauc_recall_at_1_max value: -7.8191 - type: nauc_recall_at_1_std value: -4.0581 - type: nauc_recall_at_1_diff1 value: 14.383199999999999 - type: nauc_recall_at_3_max value: -2.0358 - type: nauc_recall_at_3_std value: -3.1464 - type: nauc_recall_at_3_diff1 value: 8.510900000000001 - type: nauc_recall_at_5_max value: 2.4358999999999997 - type: nauc_recall_at_5_std value: -3.727 - type: nauc_recall_at_5_diff1 value: 10.2867 - type: nauc_recall_at_10_max value: 6.5777 - type: nauc_recall_at_10_std value: -1.0198 - type: nauc_recall_at_10_diff1 value: 11.9244 - type: nauc_recall_at_20_max value: 22.8541 - type: nauc_recall_at_20_std value: 4.1539 - type: nauc_recall_at_20_diff1 value: 19.3648 - type: nauc_recall_at_100_max value: 18.5148 - type: nauc_recall_at_100_std value: 41.1822 - type: nauc_recall_at_100_diff1 value: 5.1883 - type: nauc_recall_at_1000_max value: 13.995099999999999 - type: nauc_recall_at_1000_std value: 53.7961 - type: nauc_recall_at_1000_diff1 value: 14.8451 - type: nauc_precision_at_1_max value: -7.8191 - type: nauc_precision_at_1_std value: -4.0581 - type: nauc_precision_at_1_diff1 value: 14.383199999999999 - type: nauc_precision_at_3_max value: -2.0358 - type: nauc_precision_at_3_std value: -3.1464 - type: nauc_precision_at_3_diff1 value: 8.510900000000001 - type: nauc_precision_at_5_max value: 2.4358999999999997 - type: nauc_precision_at_5_std value: -3.727 - type: nauc_precision_at_5_diff1 value: 10.2867 - type: nauc_precision_at_10_max value: 6.5777 - type: nauc_precision_at_10_std value: -1.0198 - type: nauc_precision_at_10_diff1 value: 11.9244 - type: nauc_precision_at_20_max value: 22.8541 - type: nauc_precision_at_20_std value: 4.1539 - type: nauc_precision_at_20_diff1 value: 19.3648 - type: nauc_precision_at_100_max value: 18.5148 - type: nauc_precision_at_100_std value: 41.1822 - type: nauc_precision_at_100_diff1 value: 5.1883 - type: nauc_precision_at_1000_max value: 13.995099999999999 - type: nauc_precision_at_1000_std value: 53.7961 - type: nauc_precision_at_1000_diff1 value: 14.8451 - type: nauc_mrr_at_1_max value: -8.1904 - type: nauc_mrr_at_1_std value: -4.0896 - type: nauc_mrr_at_1_diff1 value: 12.7103 - type: nauc_mrr_at_3_max value: -6.6608 - type: nauc_mrr_at_3_std value: -3.6741 - type: nauc_mrr_at_3_diff1 value: 9.851 - type: nauc_mrr_at_5_max value: -5.7596 - type: nauc_mrr_at_5_std value: -3.7391 - type: nauc_mrr_at_5_diff1 value: 10.4908 - type: nauc_mrr_at_10_max value: -5.8613 - type: nauc_mrr_at_10_std value: -3.4377999999999997 - type: nauc_mrr_at_10_diff1 value: 10.5641 - type: nauc_mrr_at_20_max value: -5.8497 - type: nauc_mrr_at_20_std value: -3.4543 - type: nauc_mrr_at_20_diff1 value: 10.6822 - type: nauc_mrr_at_100_max value: -5.9873 - type: nauc_mrr_at_100_std value: -3.4431000000000003 - type: nauc_mrr_at_100_diff1 value: 10.6379 - type: nauc_mrr_at_1000_max value: -5.9887999999999995 - type: nauc_mrr_at_1000_std value: -3.4465000000000003 - type: nauc_mrr_at_1000_diff1 value: 10.641399999999999 - type: main_score value: 55.24400000000001 - task: type: Clustering dataset: name: MTEB ArxivClusteringP2P (default) type: mteb/arxiv-clustering-p2p config: default split: test revision: a122ad7f3f0291bf49cc6f4d32aa80929df69d5d metrics: - type: v_measure value: 43.1321 - type: v_measure_std value: 13.594000000000001 - type: main_score value: 43.1321 - task: type: Clustering dataset: name: MTEB ArxivClusteringS2S (default) type: mteb/arxiv-clustering-s2s config: default split: test revision: f910caf1a6075f7329cdf8c1a6135696f37dbd53 metrics: - type: v_measure value: 32.9343 - type: v_measure_std value: 14.2478 - type: main_score value: 32.9343 - task: type: Reranking dataset: name: MTEB AskUbuntuDupQuestions (default) type: mteb/askubuntudupquestions-reranking config: default split: test revision: 2000358ca161889fa9c082cb41daa8dcfb161a54 metrics: - type: map value: 62.3443 - type: mrr value: 76.3882 - type: nAUC_map_max value: 28.3073 - type: nAUC_map_std value: 15.5307 - type: nAUC_map_diff1 value: 12.6855 - type: nAUC_mrr_max value: 36.409200000000006 - type: nAUC_mrr_std value: 22.6271 - type: nAUC_mrr_diff1 value: 19.1211 - type: main_score value: 62.3443 - task: type: STS dataset: name: MTEB BIOSSES (default) type: mteb/biosses-sts config: default split: test revision: d3fb88f8f02e40887cd149695127462bbcf29b4a metrics: - type: pearson value: 84.3253 - type: spearman value: 81.6362 - type: cosine_pearson value: 84.3253 - type: cosine_spearman value: 81.6362 - type: manhattan_pearson value: 82.70960000000001 - type: manhattan_spearman value: 81.3037 - type: euclidean_pearson value: 82.6906 - type: euclidean_spearman value: 81.6362 - type: main_score value: 81.6362 - task: type: Classification dataset: name: MTEB Banking77Classification (default) type: mteb/banking77 config: default split: test revision: 0fd18e25b25c072e09e0d92ab615fda904d66300 metrics: - type: accuracy value: 78.0617 - type: f1 value: 77.2085 - type: f1_weighted value: 77.2085 - type: main_score value: 78.0617 - task: type: Clustering dataset: name: MTEB BiorxivClusteringP2P (default) type: mteb/biorxiv-clustering-p2p config: default split: test revision: 65b79d1d13f80053f67aca9498d9402c2d9f1f40 metrics: - type: v_measure value: 35.8271 - type: v_measure_std value: 0.7191000000000001 - type: main_score value: 35.8271 - task: type: Clustering dataset: name: MTEB BiorxivClusteringS2S (default) type: mteb/biorxiv-clustering-s2s config: default split: test revision: 258694dd0231531bc1fd9de6ceb52a0853c6d908 metrics: - type: v_measure value: 30.3905 - type: v_measure_std value: 0.7136 - type: main_score value: 30.3905 - task: type: Retrieval dataset: name: MTEB COIRCodeSearchNetRetrieval (python) type: CoIR-Retrieval/CodeSearchNet config: python split: test revision: 4adc7bc41202b5c13543c9c886a25f340634dab3 metrics: - type: ndcg_at_1 value: 83.22800000000001 - type: ndcg_at_3 value: 87.41799999999999 - type: ndcg_at_5 value: 88.089 - type: ndcg_at_10 value: 88.789 - type: ndcg_at_20 value: 89.156 - type: ndcg_at_100 value: 89.60900000000001 - type: ndcg_at_1000 value: 89.79 - type: map_at_1 value: 83.22800000000001 - type: map_at_3 value: 86.431 - type: map_at_5 value: 86.80499999999999 - type: map_at_10 value: 87.09599999999999 - type: map_at_20 value: 87.198 - type: map_at_100 value: 87.263 - type: map_at_1000 value: 87.27000000000001 - type: recall_at_1 value: 83.22800000000001 - type: recall_at_3 value: 90.253 - type: recall_at_5 value: 91.876 - type: recall_at_10 value: 94.03399999999999 - type: recall_at_20 value: 95.475 - type: recall_at_100 value: 97.882 - type: recall_at_1000 value: 99.316 - type: precision_at_1 value: 83.22800000000001 - type: precision_at_3 value: 30.084 - type: precision_at_5 value: 18.375 - type: precision_at_10 value: 9.403 - type: precision_at_20 value: 4.774 - type: precision_at_100 value: 0.979 - type: precision_at_1000 value: 0.099 - type: mrr_at_1 value: 83.235 - type: mrr_at_3 value: 86.4336 - type: mrr_at_5 value: 86.8077 - type: mrr_at_10 value: 87.0979 - type: mrr_at_20 value: 87.2001 - type: mrr_at_100 value: 87.26509999999999 - type: mrr_at_1000 value: 87.2718 - type: nauc_ndcg_at_1_max value: 82.2462 - type: nauc_ndcg_at_1_std value: 11.4635 - type: nauc_ndcg_at_1_diff1 value: 90.5106 - type: nauc_ndcg_at_3_max value: 83.9742 - type: nauc_ndcg_at_3_std value: 12.7085 - type: nauc_ndcg_at_3_diff1 value: 88.2182 - type: nauc_ndcg_at_5_max value: 84.18870000000001 - type: nauc_ndcg_at_5_std value: 13.167499999999999 - type: nauc_ndcg_at_5_diff1 value: 88.44999999999999 - type: nauc_ndcg_at_10_max value: 84.2219 - type: nauc_ndcg_at_10_std value: 13.5219 - type: nauc_ndcg_at_10_diff1 value: 88.6386 - type: nauc_ndcg_at_20_max value: 84.2289 - type: nauc_ndcg_at_20_std value: 14.0686 - type: nauc_ndcg_at_20_diff1 value: 88.7516 - type: nauc_ndcg_at_100_max value: 84.12049999999999 - type: nauc_ndcg_at_100_std value: 14.1778 - type: nauc_ndcg_at_100_diff1 value: 88.8592 - type: nauc_ndcg_at_1000_max value: 84.0367 - type: nauc_ndcg_at_1000_std value: 13.9125 - type: nauc_ndcg_at_1000_diff1 value: 88.9054 - type: nauc_map_at_1_max value: 82.2462 - type: nauc_map_at_1_std value: 11.4635 - type: nauc_map_at_1_diff1 value: 90.5106 - type: nauc_map_at_3_max value: 83.5638 - type: nauc_map_at_3_std value: 12.3576 - type: nauc_map_at_3_diff1 value: 88.8502 - type: nauc_map_at_5_max value: 83.6625 - type: nauc_map_at_5_std value: 12.582099999999999 - type: nauc_map_at_5_diff1 value: 88.9876 - type: nauc_map_at_10_max value: 83.6605 - type: nauc_map_at_10_std value: 12.6859 - type: nauc_map_at_10_diff1 value: 89.07119999999999 - type: nauc_map_at_20_max value: 83.65629999999999 - type: nauc_map_at_20_std value: 12.8105 - type: nauc_map_at_20_diff1 value: 89.1036 - type: nauc_map_at_100_max value: 83.6413 - type: nauc_map_at_100_std value: 12.823699999999999 - type: nauc_map_at_100_diff1 value: 89.1193 - type: nauc_map_at_1000_max value: 83.6386 - type: nauc_map_at_1000_std value: 12.815999999999999 - type: nauc_map_at_1000_diff1 value: 89.1209 - type: nauc_recall_at_1_max value: 82.2462 - type: nauc_recall_at_1_std value: 11.4635 - type: nauc_recall_at_1_diff1 value: 90.5106 - type: nauc_recall_at_3_max value: 85.512 - type: nauc_recall_at_3_std value: 14.061399999999999 - type: nauc_recall_at_3_diff1 value: 85.7898 - type: nauc_recall_at_5_max value: 86.5434 - type: nauc_recall_at_5_std value: 15.894400000000001 - type: nauc_recall_at_5_diff1 value: 86.0934 - type: nauc_recall_at_10_max value: 87.59909999999999 - type: nauc_recall_at_10_std value: 18.9872 - type: nauc_recall_at_10_diff1 value: 86.26740000000001 - type: nauc_recall_at_20_max value: 88.76190000000001 - type: nauc_recall_at_20_std value: 25.6618 - type: nauc_recall_at_20_diff1 value: 86.5002 - type: nauc_recall_at_100_max value: 91.0976 - type: nauc_recall_at_100_std value: 40.9161 - type: nauc_recall_at_100_diff1 value: 86.5441 - type: nauc_recall_at_1000_max value: 96.018 - type: nauc_recall_at_1000_std value: 65.6217 - type: nauc_recall_at_1000_diff1 value: 86.8456 - type: nauc_precision_at_1_max value: 82.2462 - type: nauc_precision_at_1_std value: 11.4635 - type: nauc_precision_at_1_diff1 value: 90.5106 - type: nauc_precision_at_3_max value: 85.512 - type: nauc_precision_at_3_std value: 14.061399999999999 - type: nauc_precision_at_3_diff1 value: 85.7898 - type: nauc_precision_at_5_max value: 86.5434 - type: nauc_precision_at_5_std value: 15.894400000000001 - type: nauc_precision_at_5_diff1 value: 86.0934 - type: nauc_precision_at_10_max value: 87.59909999999999 - type: nauc_precision_at_10_std value: 18.9872 - type: nauc_precision_at_10_diff1 value: 86.26740000000001 - type: nauc_precision_at_20_max value: 88.76190000000001 - type: nauc_precision_at_20_std value: 25.6618 - type: nauc_precision_at_20_diff1 value: 86.5002 - type: nauc_precision_at_100_max value: 91.0976 - type: nauc_precision_at_100_std value: 40.9161 - type: nauc_precision_at_100_diff1 value: 86.5441 - type: nauc_precision_at_1000_max value: 96.018 - type: nauc_precision_at_1000_std value: 65.6217 - type: nauc_precision_at_1000_diff1 value: 86.8456 - type: nauc_mrr_at_1_max value: 82.2393 - type: nauc_mrr_at_1_std value: 11.5163 - type: nauc_mrr_at_1_diff1 value: 90.50160000000001 - type: nauc_mrr_at_3_max value: 83.5623 - type: nauc_mrr_at_3_std value: 12.395 - type: nauc_mrr_at_3_diff1 value: 88.8463 - type: nauc_mrr_at_5_max value: 83.6609 - type: nauc_mrr_at_5_std value: 12.620700000000001 - type: nauc_mrr_at_5_diff1 value: 88.9836 - type: nauc_mrr_at_10_max value: 83.6589 - type: nauc_mrr_at_10_std value: 12.7255 - type: nauc_mrr_at_10_diff1 value: 89.0672 - type: nauc_mrr_at_20_max value: 83.6546 - type: nauc_mrr_at_20_std value: 12.8504 - type: nauc_mrr_at_20_diff1 value: 89.09949999999999 - type: nauc_mrr_at_100_max value: 83.6396 - type: nauc_mrr_at_100_std value: 12.8638 - type: nauc_mrr_at_100_diff1 value: 89.1152 - type: nauc_mrr_at_1000_max value: 83.6369 - type: nauc_mrr_at_1000_std value: 12.856100000000001 - type: nauc_mrr_at_1000_diff1 value: 89.1168 - type: main_score value: 88.789 - task: type: Retrieval dataset: name: MTEB COIRCodeSearchNetRetrieval (javascript) type: CoIR-Retrieval/CodeSearchNet config: javascript split: test revision: 4adc7bc41202b5c13543c9c886a25f340634dab3 metrics: - type: ndcg_at_1 value: 29.14 - type: ndcg_at_3 value: 35.185 - type: ndcg_at_5 value: 37.013 - type: ndcg_at_10 value: 38.778 - type: ndcg_at_20 value: 40.184999999999995 - type: ndcg_at_100 value: 42.394999999999996 - type: ndcg_at_1000 value: 44.243 - type: map_at_1 value: 29.14 - type: map_at_3 value: 33.703 - type: map_at_5 value: 34.717999999999996 - type: map_at_10 value: 35.443999999999996 - type: map_at_20 value: 35.831 - type: map_at_100 value: 36.132999999999996 - type: map_at_1000 value: 36.193999999999996 - type: recall_at_1 value: 29.14 - type: recall_at_3 value: 39.471000000000004 - type: recall_at_5 value: 43.908 - type: recall_at_10 value: 49.376999999999995 - type: recall_at_20 value: 54.937999999999995 - type: recall_at_100 value: 66.91 - type: recall_at_1000 value: 81.98100000000001 - type: precision_at_1 value: 29.14 - type: precision_at_3 value: 13.157 - type: precision_at_5 value: 8.782 - type: precision_at_10 value: 4.938 - type: precision_at_20 value: 2.7470000000000003 - type: precision_at_100 value: 0.6689999999999999 - type: precision_at_1000 value: 0.082 - type: mrr_at_1 value: 29.140100000000004 - type: mrr_at_3 value: 33.703 - type: mrr_at_5 value: 34.7179 - type: mrr_at_10 value: 35.4443 - type: mrr_at_20 value: 35.830600000000004 - type: mrr_at_100 value: 36.1332 - type: mrr_at_1000 value: 36.1935 - type: nauc_ndcg_at_1_max value: 46.9222 - type: nauc_ndcg_at_1_std value: 3.3564999999999996 - type: nauc_ndcg_at_1_diff1 value: 60.583 - type: nauc_ndcg_at_3_max value: 49.205799999999996 - type: nauc_ndcg_at_3_std value: 5.976299999999999 - type: nauc_ndcg_at_3_diff1 value: 55.09610000000001 - type: nauc_ndcg_at_5_max value: 49.0533 - type: nauc_ndcg_at_5_std value: 6.5834 - type: nauc_ndcg_at_5_diff1 value: 54.430800000000005 - type: nauc_ndcg_at_10_max value: 48.626799999999996 - type: nauc_ndcg_at_10_std value: 7.4441 - type: nauc_ndcg_at_10_diff1 value: 53.1986 - type: nauc_ndcg_at_20_max value: 48.7498 - type: nauc_ndcg_at_20_std value: 8.3344 - type: nauc_ndcg_at_20_diff1 value: 52.844 - type: nauc_ndcg_at_100_max value: 48.7164 - type: nauc_ndcg_at_100_std value: 9.1646 - type: nauc_ndcg_at_100_diff1 value: 52.6307 - type: nauc_ndcg_at_1000_max value: 48.634699999999995 - type: nauc_ndcg_at_1000_std value: 9.3865 - type: nauc_ndcg_at_1000_diff1 value: 53.100899999999996 - type: nauc_map_at_1_max value: 46.9222 - type: nauc_map_at_1_std value: 3.3564999999999996 - type: nauc_map_at_1_diff1 value: 60.583 - type: nauc_map_at_3_max value: 48.7099 - type: nauc_map_at_3_std value: 5.2638 - type: nauc_map_at_3_diff1 value: 56.370200000000004 - type: nauc_map_at_5_max value: 48.6303 - type: nauc_map_at_5_std value: 5.5931 - type: nauc_map_at_5_diff1 value: 55.9968 - type: nauc_map_at_10_max value: 48.4549 - type: nauc_map_at_10_std value: 5.949800000000001 - type: nauc_map_at_10_diff1 value: 55.4941 - type: nauc_map_at_20_max value: 48.4854 - type: nauc_map_at_20_std value: 6.1861 - type: nauc_map_at_20_diff1 value: 55.4072 - type: nauc_map_at_100_max value: 48.4835 - type: nauc_map_at_100_std value: 6.2885 - type: nauc_map_at_100_diff1 value: 55.3743 - type: nauc_map_at_1000_max value: 48.4769 - type: nauc_map_at_1000_std value: 6.2978000000000005 - type: nauc_map_at_1000_diff1 value: 55.3852 - type: nauc_recall_at_1_max value: 46.9222 - type: nauc_recall_at_1_std value: 3.3564999999999996 - type: nauc_recall_at_1_diff1 value: 60.583 - type: nauc_recall_at_3_max value: 50.5754 - type: nauc_recall_at_3_std value: 8.005700000000001 - type: nauc_recall_at_3_diff1 value: 51.542100000000005 - type: nauc_recall_at_5_max value: 50.199000000000005 - type: nauc_recall_at_5_std value: 9.5088 - type: nauc_recall_at_5_diff1 value: 49.9358 - type: nauc_recall_at_10_max value: 48.899100000000004 - type: nauc_recall_at_10_std value: 12.2017 - type: nauc_recall_at_10_diff1 value: 46.042 - type: nauc_recall_at_20_max value: 49.433899999999994 - type: nauc_recall_at_20_std value: 16.1228 - type: nauc_recall_at_20_diff1 value: 44.1762 - type: nauc_recall_at_100_max value: 49.2626 - type: nauc_recall_at_100_std value: 23.1356 - type: nauc_recall_at_100_diff1 value: 41.2386 - type: nauc_recall_at_1000_max value: 48.7068 - type: nauc_recall_at_1000_std value: 34.4874 - type: nauc_recall_at_1000_diff1 value: 42.088 - type: nauc_precision_at_1_max value: 46.9222 - type: nauc_precision_at_1_std value: 3.3564999999999996 - type: nauc_precision_at_1_diff1 value: 60.583 - type: nauc_precision_at_3_max value: 50.5754 - type: nauc_precision_at_3_std value: 8.005700000000001 - type: nauc_precision_at_3_diff1 value: 51.542100000000005 - type: nauc_precision_at_5_max value: 50.199000000000005 - type: nauc_precision_at_5_std value: 9.5088 - type: nauc_precision_at_5_diff1 value: 49.9358 - type: nauc_precision_at_10_max value: 48.899100000000004 - type: nauc_precision_at_10_std value: 12.2017 - type: nauc_precision_at_10_diff1 value: 46.042 - type: nauc_precision_at_20_max value: 49.433899999999994 - type: nauc_precision_at_20_std value: 16.1228 - type: nauc_precision_at_20_diff1 value: 44.1762 - type: nauc_precision_at_100_max value: 49.2626 - type: nauc_precision_at_100_std value: 23.1356 - type: nauc_precision_at_100_diff1 value: 41.2386 - type: nauc_precision_at_1000_max value: 48.7068 - type: nauc_precision_at_1000_std value: 34.4874 - type: nauc_precision_at_1000_diff1 value: 42.088 - type: nauc_mrr_at_1_max value: 46.9222 - type: nauc_mrr_at_1_std value: 3.3564999999999996 - type: nauc_mrr_at_1_diff1 value: 60.583 - type: nauc_mrr_at_3_max value: 48.7099 - type: nauc_mrr_at_3_std value: 5.2638 - type: nauc_mrr_at_3_diff1 value: 56.370200000000004 - type: nauc_mrr_at_5_max value: 48.6303 - type: nauc_mrr_at_5_std value: 5.5931 - type: nauc_mrr_at_5_diff1 value: 55.9968 - type: nauc_mrr_at_10_max value: 48.4549 - type: nauc_mrr_at_10_std value: 5.949800000000001 - type: nauc_mrr_at_10_diff1 value: 55.4941 - type: nauc_mrr_at_20_max value: 48.4854 - type: nauc_mrr_at_20_std value: 6.1861 - type: nauc_mrr_at_20_diff1 value: 55.4072 - type: nauc_mrr_at_100_max value: 48.4835 - type: nauc_mrr_at_100_std value: 6.2885 - type: nauc_mrr_at_100_diff1 value: 55.3743 - type: nauc_mrr_at_1000_max value: 48.4769 - type: nauc_mrr_at_1000_std value: 6.2978000000000005 - type: nauc_mrr_at_1000_diff1 value: 55.3852 - type: main_score value: 38.778 - task: type: Retrieval dataset: name: MTEB COIRCodeSearchNetRetrieval (go) type: CoIR-Retrieval/CodeSearchNet config: go split: test revision: 4adc7bc41202b5c13543c9c886a25f340634dab3 metrics: - type: ndcg_at_1 value: 42.809999999999995 - type: ndcg_at_3 value: 51.949999999999996 - type: ndcg_at_5 value: 54.217000000000006 - type: ndcg_at_10 value: 56.296 - type: ndcg_at_20 value: 57.735 - type: ndcg_at_100 value: 59.68599999999999 - type: ndcg_at_1000 value: 60.812 - type: map_at_1 value: 42.809999999999995 - type: map_at_3 value: 49.727 - type: map_at_5 value: 50.988 - type: map_at_10 value: 51.847 - type: map_at_20 value: 52.248000000000005 - type: map_at_100 value: 52.52 - type: map_at_1000 value: 52.561 - type: recall_at_1 value: 42.809999999999995 - type: recall_at_3 value: 58.372 - type: recall_at_5 value: 63.864 - type: recall_at_10 value: 70.291 - type: recall_at_20 value: 75.92999999999999 - type: recall_at_100 value: 86.432 - type: recall_at_1000 value: 95.371 - type: precision_at_1 value: 42.809999999999995 - type: precision_at_3 value: 19.457 - type: precision_at_5 value: 12.773000000000001 - type: precision_at_10 value: 7.029000000000001 - type: precision_at_20 value: 3.7960000000000003 - type: precision_at_100 value: 0.864 - type: precision_at_1000 value: 0.095 - type: mrr_at_1 value: 42.8097 - type: mrr_at_3 value: 49.7271 - type: mrr_at_5 value: 50.987899999999996 - type: mrr_at_10 value: 51.847100000000005 - type: mrr_at_20 value: 52.2483 - type: mrr_at_100 value: 52.519499999999994 - type: mrr_at_1000 value: 52.560700000000004 - type: nauc_ndcg_at_1_max value: 42.5169 - type: nauc_ndcg_at_1_std value: -2.56 - type: nauc_ndcg_at_1_diff1 value: 61.5235 - type: nauc_ndcg_at_3_max value: 43.897999999999996 - type: nauc_ndcg_at_3_std value: -0.927 - type: nauc_ndcg_at_3_diff1 value: 55.5453 - type: nauc_ndcg_at_5_max value: 44.069199999999995 - type: nauc_ndcg_at_5_std value: -0.5125000000000001 - type: nauc_ndcg_at_5_diff1 value: 55.095000000000006 - type: nauc_ndcg_at_10_max value: 43.9261 - type: nauc_ndcg_at_10_std value: 0.218 - type: nauc_ndcg_at_10_diff1 value: 54.7159 - type: nauc_ndcg_at_20_max value: 44.0206 - type: nauc_ndcg_at_20_std value: 0.8718999999999999 - type: nauc_ndcg_at_20_diff1 value: 54.830400000000004 - type: nauc_ndcg_at_100_max value: 43.7526 - type: nauc_ndcg_at_100_std value: 0.9793 - type: nauc_ndcg_at_100_diff1 value: 54.9701 - type: nauc_ndcg_at_1000_max value: 43.8809 - type: nauc_ndcg_at_1000_std value: 0.7155 - type: nauc_ndcg_at_1000_diff1 value: 55.3053 - type: nauc_map_at_1_max value: 42.5169 - type: nauc_map_at_1_std value: -2.56 - type: nauc_map_at_1_diff1 value: 61.5235 - type: nauc_map_at_3_max value: 43.5908 - type: nauc_map_at_3_std value: -1.3469 - type: nauc_map_at_3_diff1 value: 56.9825 - type: nauc_map_at_5_max value: 43.674099999999996 - type: nauc_map_at_5_std value: -1.1391 - type: nauc_map_at_5_diff1 value: 56.7628 - type: nauc_map_at_10_max value: 43.6154 - type: nauc_map_at_10_std value: -0.861 - type: nauc_map_at_10_diff1 value: 56.6439 - type: nauc_map_at_20_max value: 43.650099999999995 - type: nauc_map_at_20_std value: -0.6788 - type: nauc_map_at_20_diff1 value: 56.6917 - type: nauc_map_at_100_max value: 43.6075 - type: nauc_map_at_100_std value: -0.6773 - type: nauc_map_at_100_diff1 value: 56.7132 - type: nauc_map_at_1000_max value: 43.6113 - type: nauc_map_at_1000_std value: -0.6847 - type: nauc_map_at_1000_diff1 value: 56.725300000000004 - type: nauc_recall_at_1_max value: 42.5169 - type: nauc_recall_at_1_std value: -2.56 - type: nauc_recall_at_1_diff1 value: 61.5235 - type: nauc_recall_at_3_max value: 44.8282 - type: nauc_recall_at_3_std value: 0.3731 - type: nauc_recall_at_3_diff1 value: 51.139199999999995 - type: nauc_recall_at_5_max value: 45.3912 - type: nauc_recall_at_5_std value: 1.6466999999999998 - type: nauc_recall_at_5_diff1 value: 49.5336 - type: nauc_recall_at_10_max value: 45.0172 - type: nauc_recall_at_10_std value: 4.702 - type: nauc_recall_at_10_diff1 value: 47.287600000000005 - type: nauc_recall_at_20_max value: 45.5956 - type: nauc_recall_at_20_std value: 8.8859 - type: nauc_recall_at_20_diff1 value: 46.5039 - type: nauc_recall_at_100_max value: 43.7193 - type: nauc_recall_at_100_std value: 15.4564 - type: nauc_recall_at_100_diff1 value: 42.9843 - type: nauc_recall_at_1000_max value: 49.6578 - type: nauc_recall_at_1000_std value: 28.1802 - type: nauc_recall_at_1000_diff1 value: 37.0098 - type: nauc_precision_at_1_max value: 42.5169 - type: nauc_precision_at_1_std value: -2.56 - type: nauc_precision_at_1_diff1 value: 61.5235 - type: nauc_precision_at_3_max value: 44.8282 - type: nauc_precision_at_3_std value: 0.3731 - type: nauc_precision_at_3_diff1 value: 51.139199999999995 - type: nauc_precision_at_5_max value: 45.3912 - type: nauc_precision_at_5_std value: 1.6466999999999998 - type: nauc_precision_at_5_diff1 value: 49.5336 - type: nauc_precision_at_10_max value: 45.0172 - type: nauc_precision_at_10_std value: 4.702 - type: nauc_precision_at_10_diff1 value: 47.287600000000005 - type: nauc_precision_at_20_max value: 45.5956 - type: nauc_precision_at_20_std value: 8.8859 - type: nauc_precision_at_20_diff1 value: 46.5039 - type: nauc_precision_at_100_max value: 43.7193 - type: nauc_precision_at_100_std value: 15.4564 - type: nauc_precision_at_100_diff1 value: 42.9843 - type: nauc_precision_at_1000_max value: 49.6578 - type: nauc_precision_at_1000_std value: 28.1802 - type: nauc_precision_at_1000_diff1 value: 37.0098 - type: nauc_mrr_at_1_max value: 42.5169 - type: nauc_mrr_at_1_std value: -2.56 - type: nauc_mrr_at_1_diff1 value: 61.5235 - type: nauc_mrr_at_3_max value: 43.5908 - type: nauc_mrr_at_3_std value: -1.3469 - type: nauc_mrr_at_3_diff1 value: 56.9825 - type: nauc_mrr_at_5_max value: 43.674099999999996 - type: nauc_mrr_at_5_std value: -1.1391 - type: nauc_mrr_at_5_diff1 value: 56.7628 - type: nauc_mrr_at_10_max value: 43.6154 - type: nauc_mrr_at_10_std value: -0.861 - type: nauc_mrr_at_10_diff1 value: 56.6439 - type: nauc_mrr_at_20_max value: 43.650099999999995 - type: nauc_mrr_at_20_std value: -0.6788 - type: nauc_mrr_at_20_diff1 value: 56.6917 - type: nauc_mrr_at_100_max value: 43.6075 - type: nauc_mrr_at_100_std value: -0.6773 - type: nauc_mrr_at_100_diff1 value: 56.7132 - type: nauc_mrr_at_1000_max value: 43.6113 - type: nauc_mrr_at_1000_std value: -0.6847 - type: nauc_mrr_at_1000_diff1 value: 56.725300000000004 - type: main_score value: 56.296 - task: type: Retrieval dataset: name: MTEB COIRCodeSearchNetRetrieval (ruby) type: CoIR-Retrieval/CodeSearchNet config: ruby split: test revision: 4adc7bc41202b5c13543c9c886a25f340634dab3 metrics: - type: ndcg_at_1 value: 31.721 - type: ndcg_at_3 value: 38.559 - type: ndcg_at_5 value: 40.303 - type: ndcg_at_10 value: 42.536 - type: ndcg_at_20 value: 44.05 - type: ndcg_at_100 value: 46.565 - type: ndcg_at_1000 value: 48.447 - type: map_at_1 value: 31.721 - type: map_at_3 value: 36.915 - type: map_at_5 value: 37.891000000000005 - type: map_at_10 value: 38.814 - type: map_at_20 value: 39.236 - type: map_at_100 value: 39.574 - type: map_at_1000 value: 39.641999999999996 - type: recall_at_1 value: 31.721 - type: recall_at_3 value: 43.299 - type: recall_at_5 value: 47.502 - type: recall_at_10 value: 54.400999999999996 - type: recall_at_20 value: 60.349 - type: recall_at_100 value: 74.068 - type: recall_at_1000 value: 89.056 - type: precision_at_1 value: 31.721 - type: precision_at_3 value: 14.433000000000002 - type: precision_at_5 value: 9.5 - type: precision_at_10 value: 5.4399999999999995 - type: precision_at_20 value: 3.017 - type: precision_at_100 value: 0.741 - type: precision_at_1000 value: 0.089 - type: mrr_at_1 value: 31.7209 - type: mrr_at_3 value: 36.9151 - type: mrr_at_5 value: 37.8906 - type: mrr_at_10 value: 38.8144 - type: mrr_at_20 value: 39.2355 - type: mrr_at_100 value: 39.5737 - type: mrr_at_1000 value: 39.641999999999996 - type: nauc_ndcg_at_1_max value: 46.428999999999995 - type: nauc_ndcg_at_1_std value: 0.0014 - type: nauc_ndcg_at_1_diff1 value: 59.6017 - type: nauc_ndcg_at_3_max value: 45.9805 - type: nauc_ndcg_at_3_std value: 0.5511 - type: nauc_ndcg_at_3_diff1 value: 53.4978 - type: nauc_ndcg_at_5_max value: 45.5339 - type: nauc_ndcg_at_5_std value: 1.2229 - type: nauc_ndcg_at_5_diff1 value: 51.798500000000004 - type: nauc_ndcg_at_10_max value: 44.018 - type: nauc_ndcg_at_10_std value: 1.6709 - type: nauc_ndcg_at_10_diff1 value: 50.428799999999995 - type: nauc_ndcg_at_20_max value: 43.5252 - type: nauc_ndcg_at_20_std value: 2.4627 - type: nauc_ndcg_at_20_diff1 value: 50.6172 - type: nauc_ndcg_at_100_max value: 43.723099999999995 - type: nauc_ndcg_at_100_std value: 4.0416 - type: nauc_ndcg_at_100_diff1 value: 50.135600000000004 - type: nauc_ndcg_at_1000_max value: 43.7739 - type: nauc_ndcg_at_1000_std value: 3.4729 - type: nauc_ndcg_at_1000_diff1 value: 50.6595 - type: nauc_map_at_1_max value: 46.428999999999995 - type: nauc_map_at_1_std value: 0.0014 - type: nauc_map_at_1_diff1 value: 59.6017 - type: nauc_map_at_3_max value: 46.217999999999996 - type: nauc_map_at_3_std value: 0.43889999999999996 - type: nauc_map_at_3_diff1 value: 54.882299999999994 - type: nauc_map_at_5_max value: 45.9757 - type: nauc_map_at_5_std value: 0.8049999999999999 - type: nauc_map_at_5_diff1 value: 53.950900000000004 - type: nauc_map_at_10_max value: 45.3363 - type: nauc_map_at_10_std value: 0.9662999999999999 - type: nauc_map_at_10_diff1 value: 53.369 - type: nauc_map_at_20_max value: 45.2008 - type: nauc_map_at_20_std value: 1.1801000000000001 - type: nauc_map_at_20_diff1 value: 53.4425 - type: nauc_map_at_100_max value: 45.226699999999994 - type: nauc_map_at_100_std value: 1.3667 - type: nauc_map_at_100_diff1 value: 53.4089 - type: nauc_map_at_1000_max value: 45.2252 - type: nauc_map_at_1000_std value: 1.3433000000000002 - type: nauc_map_at_1000_diff1 value: 53.4268 - type: nauc_recall_at_1_max value: 46.428999999999995 - type: nauc_recall_at_1_std value: 0.0014 - type: nauc_recall_at_1_diff1 value: 59.6017 - type: nauc_recall_at_3_max value: 45.2499 - type: nauc_recall_at_3_std value: 0.8637 - type: nauc_recall_at_3_diff1 value: 49.5773 - type: nauc_recall_at_5_max value: 44.1355 - type: nauc_recall_at_5_std value: 2.5255 - type: nauc_recall_at_5_diff1 value: 45.3656 - type: nauc_recall_at_10_max value: 39.313700000000004 - type: nauc_recall_at_10_std value: 4.1421 - type: nauc_recall_at_10_diff1 value: 40.8109 - type: nauc_recall_at_20_max value: 36.923 - type: nauc_recall_at_20_std value: 7.691199999999999 - type: nauc_recall_at_20_diff1 value: 40.8715 - type: nauc_recall_at_100_max value: 36.296 - type: nauc_recall_at_100_std value: 22.020999999999997 - type: nauc_recall_at_100_diff1 value: 33.400800000000004 - type: nauc_recall_at_1000_max value: 30.508999999999997 - type: nauc_recall_at_1000_std value: 29.497600000000002 - type: nauc_recall_at_1000_diff1 value: 27.5001 - type: nauc_precision_at_1_max value: 46.428999999999995 - type: nauc_precision_at_1_std value: 0.0014 - type: nauc_precision_at_1_diff1 value: 59.6017 - type: nauc_precision_at_3_max value: 45.2499 - type: nauc_precision_at_3_std value: 0.8637 - type: nauc_precision_at_3_diff1 value: 49.5773 - type: nauc_precision_at_5_max value: 44.1355 - type: nauc_precision_at_5_std value: 2.5255 - type: nauc_precision_at_5_diff1 value: 45.3656 - type: nauc_precision_at_10_max value: 39.313700000000004 - type: nauc_precision_at_10_std value: 4.1421 - type: nauc_precision_at_10_diff1 value: 40.8109 - type: nauc_precision_at_20_max value: 36.923 - type: nauc_precision_at_20_std value: 7.691199999999999 - type: nauc_precision_at_20_diff1 value: 40.8715 - type: nauc_precision_at_100_max value: 36.296 - type: nauc_precision_at_100_std value: 22.020999999999997 - type: nauc_precision_at_100_diff1 value: 33.400800000000004 - type: nauc_precision_at_1000_max value: 30.508999999999997 - type: nauc_precision_at_1000_std value: 29.497600000000002 - type: nauc_precision_at_1000_diff1 value: 27.5001 - type: nauc_mrr_at_1_max value: 46.428999999999995 - type: nauc_mrr_at_1_std value: 0.0014 - type: nauc_mrr_at_1_diff1 value: 59.6017 - type: nauc_mrr_at_3_max value: 46.217999999999996 - type: nauc_mrr_at_3_std value: 0.43889999999999996 - type: nauc_mrr_at_3_diff1 value: 54.882299999999994 - type: nauc_mrr_at_5_max value: 45.9757 - type: nauc_mrr_at_5_std value: 0.8049999999999999 - type: nauc_mrr_at_5_diff1 value: 53.950900000000004 - type: nauc_mrr_at_10_max value: 45.3363 - type: nauc_mrr_at_10_std value: 0.9662999999999999 - type: nauc_mrr_at_10_diff1 value: 53.369 - type: nauc_mrr_at_20_max value: 45.2008 - type: nauc_mrr_at_20_std value: 1.1801000000000001 - type: nauc_mrr_at_20_diff1 value: 53.4425 - type: nauc_mrr_at_100_max value: 45.226699999999994 - type: nauc_mrr_at_100_std value: 1.3667 - type: nauc_mrr_at_100_diff1 value: 53.4089 - type: nauc_mrr_at_1000_max value: 45.2252 - type: nauc_mrr_at_1000_std value: 1.3433000000000002 - type: nauc_mrr_at_1000_diff1 value: 53.4268 - type: main_score value: 42.536 - task: type: Retrieval dataset: name: MTEB COIRCodeSearchNetRetrieval (java) type: CoIR-Retrieval/CodeSearchNet config: java split: test revision: 4adc7bc41202b5c13543c9c886a25f340634dab3 metrics: - type: ndcg_at_1 value: 36.887 - type: ndcg_at_3 value: 44.671 - type: ndcg_at_5 value: 46.619 - type: ndcg_at_10 value: 48.54 - type: ndcg_at_20 value: 49.881 - type: ndcg_at_100 value: 51.847 - type: ndcg_at_1000 value: 53.286 - type: map_at_1 value: 36.887 - type: map_at_3 value: 42.805 - type: map_at_5 value: 43.884 - type: map_at_10 value: 44.68 - type: map_at_20 value: 45.051 - type: map_at_100 value: 45.316 - type: map_at_1000 value: 45.364 - type: recall_at_1 value: 36.887 - type: recall_at_3 value: 50.05 - type: recall_at_5 value: 54.788000000000004 - type: recall_at_10 value: 60.711999999999996 - type: recall_at_20 value: 65.997 - type: recall_at_100 value: 76.696 - type: recall_at_1000 value: 88.371 - type: precision_at_1 value: 36.887 - type: precision_at_3 value: 16.683 - type: precision_at_5 value: 10.958 - type: precision_at_10 value: 6.071 - type: precision_at_20 value: 3.3000000000000003 - type: precision_at_100 value: 0.767 - type: precision_at_1000 value: 0.08800000000000001 - type: mrr_at_1 value: 36.9147 - type: mrr_at_3 value: 42.823699999999995 - type: mrr_at_5 value: 43.8985 - type: mrr_at_10 value: 44.6961 - type: mrr_at_20 value: 45.067 - type: mrr_at_100 value: 45.3318 - type: mrr_at_1000 value: 45.3801 - type: nauc_ndcg_at_1_max value: 42.8063 - type: nauc_ndcg_at_1_std value: -5.3001 - type: nauc_ndcg_at_1_diff1 value: 63.370099999999994 - type: nauc_ndcg_at_3_max value: 44.0649 - type: nauc_ndcg_at_3_std value: -4.0304 - type: nauc_ndcg_at_3_diff1 value: 57.7429 - type: nauc_ndcg_at_5_max value: 43.864799999999995 - type: nauc_ndcg_at_5_std value: -3.2800000000000002 - type: nauc_ndcg_at_5_diff1 value: 57.0472 - type: nauc_ndcg_at_10_max value: 43.614799999999995 - type: nauc_ndcg_at_10_std value: -2.424 - type: nauc_ndcg_at_10_diff1 value: 56.3498 - type: nauc_ndcg_at_20_max value: 43.6108 - type: nauc_ndcg_at_20_std value: -1.699 - type: nauc_ndcg_at_20_diff1 value: 56.2153 - type: nauc_ndcg_at_100_max value: 43.4705 - type: nauc_ndcg_at_100_std value: -0.7144 - type: nauc_ndcg_at_100_diff1 value: 56.0679 - type: nauc_ndcg_at_1000_max value: 43.6856 - type: nauc_ndcg_at_1000_std value: -0.7129 - type: nauc_ndcg_at_1000_diff1 value: 56.40540000000001 - type: nauc_map_at_1_max value: 42.8063 - type: nauc_map_at_1_std value: -5.3001 - type: nauc_map_at_1_diff1 value: 63.370099999999994 - type: nauc_map_at_3_max value: 43.797999999999995 - type: nauc_map_at_3_std value: -4.3491 - type: nauc_map_at_3_diff1 value: 59.0673 - type: nauc_map_at_5_max value: 43.6812 - type: nauc_map_at_5_std value: -3.9397 - type: nauc_map_at_5_diff1 value: 58.6982 - type: nauc_map_at_10_max value: 43.5745 - type: nauc_map_at_10_std value: -3.6122 - type: nauc_map_at_10_diff1 value: 58.431999999999995 - type: nauc_map_at_20_max value: 43.573 - type: nauc_map_at_20_std value: -3.4323 - type: nauc_map_at_20_diff1 value: 58.4168 - type: nauc_map_at_100_max value: 43.5448 - type: nauc_map_at_100_std value: -3.3167 - type: nauc_map_at_100_diff1 value: 58.394999999999996 - type: nauc_map_at_1000_max value: 43.5506 - type: nauc_map_at_1000_std value: -3.3144 - type: nauc_map_at_1000_diff1 value: 58.4057 - type: nauc_recall_at_1_max value: 42.8063 - type: nauc_recall_at_1_std value: -5.3001 - type: nauc_recall_at_1_diff1 value: 63.370099999999994 - type: nauc_recall_at_3_max value: 44.8286 - type: nauc_recall_at_3_std value: -3.0949999999999998 - type: nauc_recall_at_3_diff1 value: 53.8907 - type: nauc_recall_at_5_max value: 44.3801 - type: nauc_recall_at_5_std value: -1.1593 - type: nauc_recall_at_5_diff1 value: 51.948899999999995 - type: nauc_recall_at_10_max value: 43.6005 - type: nauc_recall_at_10_std value: 1.9532999999999998 - type: nauc_recall_at_10_diff1 value: 49.2211 - type: nauc_recall_at_20_max value: 43.5839 - type: nauc_recall_at_20_std value: 5.8288 - type: nauc_recall_at_20_diff1 value: 47.7761 - type: nauc_recall_at_100_max value: 42.6633 - type: nauc_recall_at_100_std value: 16.4317 - type: nauc_recall_at_100_diff1 value: 44.0676 - type: nauc_recall_at_1000_max value: 46.698 - type: nauc_recall_at_1000_std value: 30.054799999999997 - type: nauc_recall_at_1000_diff1 value: 41.5816 - type: nauc_precision_at_1_max value: 42.8063 - type: nauc_precision_at_1_std value: -5.3001 - type: nauc_precision_at_1_diff1 value: 63.370099999999994 - type: nauc_precision_at_3_max value: 44.8286 - type: nauc_precision_at_3_std value: -3.0949999999999998 - type: nauc_precision_at_3_diff1 value: 53.8907 - type: nauc_precision_at_5_max value: 44.3801 - type: nauc_precision_at_5_std value: -1.1593 - type: nauc_precision_at_5_diff1 value: 51.948899999999995 - type: nauc_precision_at_10_max value: 43.6005 - type: nauc_precision_at_10_std value: 1.9532999999999998 - type: nauc_precision_at_10_diff1 value: 49.2211 - type: nauc_precision_at_20_max value: 43.5839 - type: nauc_precision_at_20_std value: 5.8288 - type: nauc_precision_at_20_diff1 value: 47.7761 - type: nauc_precision_at_100_max value: 42.6633 - type: nauc_precision_at_100_std value: 16.4317 - type: nauc_precision_at_100_diff1 value: 44.0676 - type: nauc_precision_at_1000_max value: 46.698 - type: nauc_precision_at_1000_std value: 30.054799999999997 - type: nauc_precision_at_1000_diff1 value: 41.5816 - type: nauc_mrr_at_1_max value: 42.7425 - type: nauc_mrr_at_1_std value: -5.2358 - type: nauc_mrr_at_1_diff1 value: 63.285199999999996 - type: nauc_mrr_at_3_max value: 43.763200000000005 - type: nauc_mrr_at_3_std value: -4.2973 - type: nauc_mrr_at_3_diff1 value: 59.031 - type: nauc_mrr_at_5_max value: 43.650800000000004 - type: nauc_mrr_at_5_std value: -3.8918 - type: nauc_mrr_at_5_diff1 value: 58.6636 - type: nauc_mrr_at_10_max value: 43.5429 - type: nauc_mrr_at_10_std value: -3.5659000000000005 - type: nauc_mrr_at_10_diff1 value: 58.3946 - type: nauc_mrr_at_20_max value: 43.5411 - type: nauc_mrr_at_20_std value: -3.3855000000000004 - type: nauc_mrr_at_20_diff1 value: 58.379099999999994 - type: nauc_mrr_at_100_max value: 43.5128 - type: nauc_mrr_at_100_std value: -3.2696000000000005 - type: nauc_mrr_at_100_diff1 value: 58.3572 - type: nauc_mrr_at_1000_max value: 43.5186 - type: nauc_mrr_at_1000_std value: -3.2672 - type: nauc_mrr_at_1000_diff1 value: 58.3678 - type: main_score value: 48.54 - task: type: Retrieval dataset: name: MTEB COIRCodeSearchNetRetrieval (php) type: CoIR-Retrieval/CodeSearchNet config: php split: test revision: 4adc7bc41202b5c13543c9c886a25f340634dab3 metrics: - type: ndcg_at_1 value: 30.734 - type: ndcg_at_3 value: 38.155 - type: ndcg_at_5 value: 40.306999999999995 - type: ndcg_at_10 value: 42.510999999999996 - type: ndcg_at_20 value: 44.156 - type: ndcg_at_100 value: 46.641 - type: ndcg_at_1000 value: 48.359 - type: map_at_1 value: 30.734 - type: map_at_3 value: 36.347 - type: map_at_5 value: 37.539 - type: map_at_10 value: 38.455 - type: map_at_20 value: 38.906 - type: map_at_100 value: 39.24 - type: map_at_1000 value: 39.300000000000004 - type: recall_at_1 value: 30.734 - type: recall_at_3 value: 43.378 - type: recall_at_5 value: 48.616 - type: recall_at_10 value: 55.395 - type: recall_at_20 value: 61.91 - type: recall_at_100 value: 75.432 - type: recall_at_1000 value: 89.254 - type: precision_at_1 value: 30.734 - type: precision_at_3 value: 14.459 - type: precision_at_5 value: 9.722999999999999 - type: precision_at_10 value: 5.539000000000001 - type: precision_at_20 value: 3.0949999999999998 - type: precision_at_100 value: 0.754 - type: precision_at_1000 value: 0.089 - type: mrr_at_1 value: 30.6907 - type: mrr_at_3 value: 36.3137 - type: mrr_at_5 value: 37.5121 - type: mrr_at_10 value: 38.4289 - type: mrr_at_20 value: 38.8786 - type: mrr_at_100 value: 39.2136 - type: mrr_at_1000 value: 39.2729 - type: nauc_ndcg_at_1_max value: 36.8055 - type: nauc_ndcg_at_1_std value: -1.5909 - type: nauc_ndcg_at_1_diff1 value: 55.9244 - type: nauc_ndcg_at_3_max value: 38.4262 - type: nauc_ndcg_at_3_std value: 0.5292 - type: nauc_ndcg_at_3_diff1 value: 49.7477 - type: nauc_ndcg_at_5_max value: 38.0552 - type: nauc_ndcg_at_5_std value: 1.102 - type: nauc_ndcg_at_5_diff1 value: 48.5308 - type: nauc_ndcg_at_10_max value: 38.0054 - type: nauc_ndcg_at_10_std value: 1.9313 - type: nauc_ndcg_at_10_diff1 value: 48.016999999999996 - type: nauc_ndcg_at_20_max value: 37.8808 - type: nauc_ndcg_at_20_std value: 2.56 - type: nauc_ndcg_at_20_diff1 value: 47.5649 - type: nauc_ndcg_at_100_max value: 38.3754 - type: nauc_ndcg_at_100_std value: 3.6703 - type: nauc_ndcg_at_100_diff1 value: 47.6154 - type: nauc_ndcg_at_1000_max value: 38.534600000000005 - type: nauc_ndcg_at_1000_std value: 3.7317000000000005 - type: nauc_ndcg_at_1000_diff1 value: 48.0299 - type: nauc_map_at_1_max value: 36.8055 - type: nauc_map_at_1_std value: -1.5909 - type: nauc_map_at_1_diff1 value: 55.9244 - type: nauc_map_at_3_max value: 38.0383 - type: nauc_map_at_3_std value: 0.0207 - type: nauc_map_at_3_diff1 value: 51.137299999999996 - type: nauc_map_at_5_max value: 37.8223 - type: nauc_map_at_5_std value: 0.3179 - type: nauc_map_at_5_diff1 value: 50.4641 - type: nauc_map_at_10_max value: 37.8022 - type: nauc_map_at_10_std value: 0.6617999999999999 - type: nauc_map_at_10_diff1 value: 50.269 - type: nauc_map_at_20_max value: 37.7686 - type: nauc_map_at_20_std value: 0.8326999999999999 - type: nauc_map_at_20_diff1 value: 50.153499999999994 - type: nauc_map_at_100_max value: 37.832300000000004 - type: nauc_map_at_100_std value: 0.9767 - type: nauc_map_at_100_diff1 value: 50.174099999999996 - type: nauc_map_at_1000_max value: 37.838300000000004 - type: nauc_map_at_1000_std value: 0.9815 - type: nauc_map_at_1000_diff1 value: 50.1882 - type: nauc_recall_at_1_max value: 36.8055 - type: nauc_recall_at_1_std value: -1.5909 - type: nauc_recall_at_1_diff1 value: 55.9244 - type: nauc_recall_at_3_max value: 39.5304 - type: nauc_recall_at_3_std value: 1.9767 - type: nauc_recall_at_3_diff1 value: 45.8281 - type: nauc_recall_at_5_max value: 38.6851 - type: nauc_recall_at_5_std value: 3.4711 - type: nauc_recall_at_5_diff1 value: 42.8172 - type: nauc_recall_at_10_max value: 38.5524 - type: nauc_recall_at_10_std value: 6.2315000000000005 - type: nauc_recall_at_10_diff1 value: 40.801 - type: nauc_recall_at_20_max value: 38.048300000000005 - type: nauc_recall_at_20_std value: 9.3045 - type: nauc_recall_at_20_diff1 value: 38.222 - type: nauc_recall_at_100_max value: 42.054399999999994 - type: nauc_recall_at_100_std value: 20.4425 - type: nauc_recall_at_100_diff1 value: 35.0773 - type: nauc_recall_at_1000_max value: 49.2856 - type: nauc_recall_at_1000_std value: 38.4529 - type: nauc_recall_at_1000_diff1 value: 31.7647 - type: nauc_precision_at_1_max value: 36.8055 - type: nauc_precision_at_1_std value: -1.5909 - type: nauc_precision_at_1_diff1 value: 55.9244 - type: nauc_precision_at_3_max value: 39.5304 - type: nauc_precision_at_3_std value: 1.9767 - type: nauc_precision_at_3_diff1 value: 45.8281 - type: nauc_precision_at_5_max value: 38.6851 - type: nauc_precision_at_5_std value: 3.4711 - type: nauc_precision_at_5_diff1 value: 42.8172 - type: nauc_precision_at_10_max value: 38.5524 - type: nauc_precision_at_10_std value: 6.2315000000000005 - type: nauc_precision_at_10_diff1 value: 40.801 - type: nauc_precision_at_20_max value: 38.048300000000005 - type: nauc_precision_at_20_std value: 9.3045 - type: nauc_precision_at_20_diff1 value: 38.222 - type: nauc_precision_at_100_max value: 42.054399999999994 - type: nauc_precision_at_100_std value: 20.4425 - type: nauc_precision_at_100_diff1 value: 35.0773 - type: nauc_precision_at_1000_max value: 49.2856 - type: nauc_precision_at_1000_std value: 38.4529 - type: nauc_precision_at_1000_diff1 value: 31.7647 - type: nauc_mrr_at_1_max value: 36.8365 - type: nauc_mrr_at_1_std value: -1.4754 - type: nauc_mrr_at_1_diff1 value: 56.0597 - type: nauc_mrr_at_3_max value: 38.054 - type: nauc_mrr_at_3_std value: 0.09430000000000001 - type: nauc_mrr_at_3_diff1 value: 51.2016 - type: nauc_mrr_at_5_max value: 37.8431 - type: nauc_mrr_at_5_std value: 0.3829 - type: nauc_mrr_at_5_diff1 value: 50.5285 - type: nauc_mrr_at_10_max value: 37.8231 - type: nauc_mrr_at_10_std value: 0.7271 - type: nauc_mrr_at_10_diff1 value: 50.333099999999995 - type: nauc_mrr_at_20_max value: 37.7905 - type: nauc_mrr_at_20_std value: 0.8992999999999999 - type: nauc_mrr_at_20_diff1 value: 50.2181 - type: nauc_mrr_at_100_max value: 37.853500000000004 - type: nauc_mrr_at_100_std value: 1.0428 - type: nauc_mrr_at_100_diff1 value: 50.239 - type: nauc_mrr_at_1000_max value: 37.859500000000004 - type: nauc_mrr_at_1000_std value: 1.0477 - type: nauc_mrr_at_1000_diff1 value: 50.2532 - type: main_score value: 42.510999999999996 - task: type: Retrieval dataset: name: MTEB CQADupstackAndroidRetrieval (default) type: mteb/cqadupstack-android config: default split: test revision: f46a197baaae43b4f621051089b82a364682dfeb metrics: - type: ndcg_at_1 value: 42.918 - type: ndcg_at_3 value: 47.992000000000004 - type: ndcg_at_5 value: 50.298 - type: ndcg_at_10 value: 53.047999999999995 - type: ndcg_at_20 value: 55.36600000000001 - type: ndcg_at_100 value: 58.18 - type: ndcg_at_1000 value: 59.992999999999995 - type: map_at_1 value: 35.147 - type: map_at_3 value: 42.985 - type: map_at_5 value: 44.895 - type: map_at_10 value: 46.568 - type: map_at_20 value: 47.527 - type: map_at_100 value: 48.178 - type: map_at_1000 value: 48.303000000000004 - type: recall_at_1 value: 35.147 - type: recall_at_3 value: 50.229 - type: recall_at_5 value: 56.586999999999996 - type: recall_at_10 value: 64.656 - type: recall_at_20 value: 72.875 - type: recall_at_100 value: 85.397 - type: recall_at_1000 value: 96.799 - type: precision_at_1 value: 42.918 - type: precision_at_3 value: 22.698999999999998 - type: precision_at_5 value: 16.309 - type: precision_at_10 value: 10.100000000000001 - type: precision_at_20 value: 6.0440000000000005 - type: precision_at_100 value: 1.5890000000000002 - type: precision_at_1000 value: 0.209 - type: mrr_at_1 value: 42.9185 - type: mrr_at_3 value: 50.1907 - type: mrr_at_5 value: 51.9003 - type: mrr_at_10 value: 52.824400000000004 - type: mrr_at_20 value: 53.3002 - type: mrr_at_100 value: 53.5134 - type: mrr_at_1000 value: 53.5569 - type: nauc_ndcg_at_1_max value: 45.115300000000005 - type: nauc_ndcg_at_1_std value: -5.3469999999999995 - type: nauc_ndcg_at_1_diff1 value: 50.792899999999996 - type: nauc_ndcg_at_3_max value: 44.379000000000005 - type: nauc_ndcg_at_3_std value: -2.628 - type: nauc_ndcg_at_3_diff1 value: 45.6678 - type: nauc_ndcg_at_5_max value: 44.8852 - type: nauc_ndcg_at_5_std value: -1.7051 - type: nauc_ndcg_at_5_diff1 value: 46.0814 - type: nauc_ndcg_at_10_max value: 43.969500000000004 - type: nauc_ndcg_at_10_std value: -0.4902 - type: nauc_ndcg_at_10_diff1 value: 46.2439 - type: nauc_ndcg_at_20_max value: 44.588499999999996 - type: nauc_ndcg_at_20_std value: 0.5193 - type: nauc_ndcg_at_20_diff1 value: 45.9229 - type: nauc_ndcg_at_100_max value: 45.0779 - type: nauc_ndcg_at_100_std value: 1.1967999999999999 - type: nauc_ndcg_at_100_diff1 value: 46.090199999999996 - type: nauc_ndcg_at_1000_max value: 45.082 - type: nauc_ndcg_at_1000_std value: 0.3457 - type: nauc_ndcg_at_1000_diff1 value: 46.366 - type: nauc_map_at_1_max value: 38.731 - type: nauc_map_at_1_std value: -7.1701 - type: nauc_map_at_1_diff1 value: 52.0087 - type: nauc_map_at_3_max value: 42.126799999999996 - type: nauc_map_at_3_std value: -4.8249 - type: nauc_map_at_3_diff1 value: 47.7841 - type: nauc_map_at_5_max value: 43.2155 - type: nauc_map_at_5_std value: -3.9702 - type: nauc_map_at_5_diff1 value: 47.9376 - type: nauc_map_at_10_max value: 43.4398 - type: nauc_map_at_10_std value: -2.8201 - type: nauc_map_at_10_diff1 value: 47.9726 - type: nauc_map_at_20_max value: 43.9625 - type: nauc_map_at_20_std value: -2.4088 - type: nauc_map_at_20_diff1 value: 47.7323 - type: nauc_map_at_100_max value: 44.0439 - type: nauc_map_at_100_std value: -2.1932 - type: nauc_map_at_100_diff1 value: 47.672399999999996 - type: nauc_map_at_1000_max value: 44.059599999999996 - type: nauc_map_at_1000_std value: -2.2453999999999996 - type: nauc_map_at_1000_diff1 value: 47.6659 - type: nauc_recall_at_1_max value: 38.731 - type: nauc_recall_at_1_std value: -7.1701 - type: nauc_recall_at_1_diff1 value: 52.0087 - type: nauc_recall_at_3_max value: 40.5229 - type: nauc_recall_at_3_std value: -1.3240999999999998 - type: nauc_recall_at_3_diff1 value: 41.1764 - type: nauc_recall_at_5_max value: 41.248000000000005 - type: nauc_recall_at_5_std value: 1.4647999999999999 - type: nauc_recall_at_5_diff1 value: 41.044799999999995 - type: nauc_recall_at_10_max value: 38.6375 - type: nauc_recall_at_10_std value: 5.3439 - type: nauc_recall_at_10_diff1 value: 39.8162 - type: nauc_recall_at_20_max value: 39.6813 - type: nauc_recall_at_20_std value: 11.1138 - type: nauc_recall_at_20_diff1 value: 36.8881 - type: nauc_recall_at_100_max value: 44.9346 - type: nauc_recall_at_100_std value: 22.5203 - type: nauc_recall_at_100_diff1 value: 34.8792 - type: nauc_recall_at_1000_max value: 52.49979999999999 - type: nauc_recall_at_1000_std value: 50.954299999999996 - type: nauc_recall_at_1000_diff1 value: 36.1016 - type: nauc_precision_at_1_max value: 45.115300000000005 - type: nauc_precision_at_1_std value: -5.3469999999999995 - type: nauc_precision_at_1_diff1 value: 50.792899999999996 - type: nauc_precision_at_3_max value: 41.841 - type: nauc_precision_at_3_std value: 3.3930000000000002 - type: nauc_precision_at_3_diff1 value: 27.495399999999997 - type: nauc_precision_at_5_max value: 38.527 - type: nauc_precision_at_5_std value: 8.2496 - type: nauc_precision_at_5_diff1 value: 19.3628 - type: nauc_precision_at_10_max value: 27.5499 - type: nauc_precision_at_10_std value: 13.264100000000001 - type: nauc_precision_at_10_diff1 value: 9.9718 - type: nauc_precision_at_20_max value: 21.431 - type: nauc_precision_at_20_std value: 14.426400000000001 - type: nauc_precision_at_20_diff1 value: -0.11030000000000001 - type: nauc_precision_at_100_max value: 6.8088 - type: nauc_precision_at_100_std value: 9.8979 - type: nauc_precision_at_100_diff1 value: -10.1603 - type: nauc_precision_at_1000_max value: -6.4949 - type: nauc_precision_at_1000_std value: -3.9967999999999995 - type: nauc_precision_at_1000_diff1 value: -17.765800000000002 - type: nauc_mrr_at_1_max value: 45.115300000000005 - type: nauc_mrr_at_1_std value: -5.3469999999999995 - type: nauc_mrr_at_1_diff1 value: 50.792899999999996 - type: nauc_mrr_at_3_max value: 45.8581 - type: nauc_mrr_at_3_std value: -2.9239 - type: nauc_mrr_at_3_diff1 value: 47.079 - type: nauc_mrr_at_5_max value: 45.5453 - type: nauc_mrr_at_5_std value: -2.2778 - type: nauc_mrr_at_5_diff1 value: 47.0394 - type: nauc_mrr_at_10_max value: 45.2727 - type: nauc_mrr_at_10_std value: -2.1793 - type: nauc_mrr_at_10_diff1 value: 46.7719 - type: nauc_mrr_at_20_max value: 45.232 - type: nauc_mrr_at_20_std value: -2.0842 - type: nauc_mrr_at_20_diff1 value: 46.75 - type: nauc_mrr_at_100_max value: 45.3233 - type: nauc_mrr_at_100_std value: -2.0778000000000003 - type: nauc_mrr_at_100_diff1 value: 46.7919 - type: nauc_mrr_at_1000_max value: 45.325700000000005 - type: nauc_mrr_at_1000_std value: -2.0868 - type: nauc_mrr_at_1000_diff1 value: 46.812799999999996 - type: main_score value: 53.047999999999995 - task: type: Retrieval dataset: name: MTEB CQADupstackEnglishRetrieval (default) type: mteb/cqadupstack-english config: default split: test revision: ad9991cb51e31e31e430383c75ffb2885547b5f0 metrics: - type: ndcg_at_1 value: 35.796 - type: ndcg_at_3 value: 40.036 - type: ndcg_at_5 value: 41.778 - type: ndcg_at_10 value: 43.868 - type: ndcg_at_20 value: 45.777 - type: ndcg_at_100 value: 48.771 - type: ndcg_at_1000 value: 51.001 - type: map_at_1 value: 28.177000000000003 - type: map_at_3 value: 35.445 - type: map_at_5 value: 36.976 - type: map_at_10 value: 38.25 - type: map_at_20 value: 38.981 - type: map_at_100 value: 39.585 - type: map_at_1000 value: 39.728 - type: recall_at_1 value: 28.177000000000003 - type: recall_at_3 value: 41.782000000000004 - type: recall_at_5 value: 46.861000000000004 - type: recall_at_10 value: 53.464 - type: recall_at_20 value: 60.621 - type: recall_at_100 value: 74.628 - type: recall_at_1000 value: 88.839 - type: precision_at_1 value: 35.796 - type: precision_at_3 value: 19.639 - type: precision_at_5 value: 13.924 - type: precision_at_10 value: 8.439 - type: precision_at_20 value: 5.016 - type: precision_at_100 value: 1.394 - type: precision_at_1000 value: 0.189 - type: mrr_at_1 value: 35.7962 - type: mrr_at_3 value: 42.1019 - type: mrr_at_5 value: 43.4172 - type: mrr_at_10 value: 44.2407 - type: mrr_at_20 value: 44.6907 - type: mrr_at_100 value: 45.0075 - type: mrr_at_1000 value: 45.059 - type: nauc_ndcg_at_1_max value: 47.856 - type: nauc_ndcg_at_1_std value: 3.0363 - type: nauc_ndcg_at_1_diff1 value: 48.7364 - type: nauc_ndcg_at_3_max value: 49.2728 - type: nauc_ndcg_at_3_std value: 4.1776 - type: nauc_ndcg_at_3_diff1 value: 45.1449 - type: nauc_ndcg_at_5_max value: 49.5649 - type: nauc_ndcg_at_5_std value: 3.7340999999999998 - type: nauc_ndcg_at_5_diff1 value: 44.6651 - type: nauc_ndcg_at_10_max value: 50.1977 - type: nauc_ndcg_at_10_std value: 4.5302 - type: nauc_ndcg_at_10_diff1 value: 45.0403 - type: nauc_ndcg_at_20_max value: 49.9326 - type: nauc_ndcg_at_20_std value: 5.5147 - type: nauc_ndcg_at_20_diff1 value: 44.5055 - type: nauc_ndcg_at_100_max value: 50.3035 - type: nauc_ndcg_at_100_std value: 7.1086 - type: nauc_ndcg_at_100_diff1 value: 44.451 - type: nauc_ndcg_at_1000_max value: 50.1836 - type: nauc_ndcg_at_1000_std value: 7.4503 - type: nauc_ndcg_at_1000_diff1 value: 44.301899999999996 - type: nauc_map_at_1_max value: 41.2555 - type: nauc_map_at_1_std value: -5.2668 - type: nauc_map_at_1_diff1 value: 52.0284 - type: nauc_map_at_3_max value: 46.6939 - type: nauc_map_at_3_std value: -0.8533000000000001 - type: nauc_map_at_3_diff1 value: 47.9095 - type: nauc_map_at_5_max value: 47.5024 - type: nauc_map_at_5_std value: -0.05109999999999999 - type: nauc_map_at_5_diff1 value: 47.1421 - type: nauc_map_at_10_max value: 48.1632 - type: nauc_map_at_10_std value: 0.8672 - type: nauc_map_at_10_diff1 value: 46.9929 - type: nauc_map_at_20_max value: 48.2708 - type: nauc_map_at_20_std value: 1.5195 - type: nauc_map_at_20_diff1 value: 46.7349 - type: nauc_map_at_100_max value: 48.5516 - type: nauc_map_at_100_std value: 2.1593 - type: nauc_map_at_100_diff1 value: 46.6641 - type: nauc_map_at_1000_max value: 48.6017 - type: nauc_map_at_1000_std value: 2.2745 - type: nauc_map_at_1000_diff1 value: 46.649 - type: nauc_recall_at_1_max value: 41.2555 - type: nauc_recall_at_1_std value: -5.2668 - type: nauc_recall_at_1_diff1 value: 52.0284 - type: nauc_recall_at_3_max value: 47.0403 - type: nauc_recall_at_3_std value: 1.5399 - type: nauc_recall_at_3_diff1 value: 42.998599999999996 - type: nauc_recall_at_5_max value: 47.7652 - type: nauc_recall_at_5_std value: 2.5079000000000002 - type: nauc_recall_at_5_diff1 value: 40.131099999999996 - type: nauc_recall_at_10_max value: 49.215199999999996 - type: nauc_recall_at_10_std value: 5.6207 - type: nauc_recall_at_10_diff1 value: 40.0067 - type: nauc_recall_at_20_max value: 47.6907 - type: nauc_recall_at_20_std value: 10.0091 - type: nauc_recall_at_20_diff1 value: 36.548 - type: nauc_recall_at_100_max value: 49.8978 - type: nauc_recall_at_100_std value: 20.7533 - type: nauc_recall_at_100_diff1 value: 34.463100000000004 - type: nauc_recall_at_1000_max value: 49.2751 - type: nauc_recall_at_1000_std value: 33.7021 - type: nauc_recall_at_1000_diff1 value: 27.995199999999997 - type: nauc_precision_at_1_max value: 47.856 - type: nauc_precision_at_1_std value: 3.0363 - type: nauc_precision_at_1_diff1 value: 48.7364 - type: nauc_precision_at_3_max value: 48.0591 - type: nauc_precision_at_3_std value: 16.0079 - type: nauc_precision_at_3_diff1 value: 28.286099999999998 - type: nauc_precision_at_5_max value: 45.3901 - type: nauc_precision_at_5_std value: 18.939500000000002 - type: nauc_precision_at_5_diff1 value: 20.7183 - type: nauc_precision_at_10_max value: 40.2901 - type: nauc_precision_at_10_std value: 24.1368 - type: nauc_precision_at_10_diff1 value: 13.1708 - type: nauc_precision_at_20_max value: 34.5736 - type: nauc_precision_at_20_std value: 28.524 - type: nauc_precision_at_20_diff1 value: 6.0857 - type: nauc_precision_at_100_max value: 24.0575 - type: nauc_precision_at_100_std value: 32.7048 - type: nauc_precision_at_100_diff1 value: -4.175800000000001 - type: nauc_precision_at_1000_max value: 11.3804 - type: nauc_precision_at_1000_std value: 28.917700000000004 - type: nauc_precision_at_1000_diff1 value: -11.994100000000001 - type: nauc_mrr_at_1_max value: 47.856 - type: nauc_mrr_at_1_std value: 3.0363 - type: nauc_mrr_at_1_diff1 value: 48.7364 - type: nauc_mrr_at_3_max value: 50.048 - type: nauc_mrr_at_3_std value: 6.464300000000001 - type: nauc_mrr_at_3_diff1 value: 45.5115 - type: nauc_mrr_at_5_max value: 50.0947 - type: nauc_mrr_at_5_std value: 6.3483 - type: nauc_mrr_at_5_diff1 value: 44.8476 - type: nauc_mrr_at_10_max value: 50.244699999999995 - type: nauc_mrr_at_10_std value: 6.666900000000001 - type: nauc_mrr_at_10_diff1 value: 45.0222 - type: nauc_mrr_at_20_max value: 50.1332 - type: nauc_mrr_at_20_std value: 6.868200000000001 - type: nauc_mrr_at_20_diff1 value: 44.8895 - type: nauc_mrr_at_100_max value: 50.1173 - type: nauc_mrr_at_100_std value: 6.930600000000001 - type: nauc_mrr_at_100_diff1 value: 44.8887 - type: nauc_mrr_at_1000_max value: 50.11259999999999 - type: nauc_mrr_at_1000_std value: 6.923799999999999 - type: nauc_mrr_at_1000_diff1 value: 44.8928 - type: main_score value: 43.868 - task: type: Retrieval dataset: name: MTEB CQADupstackGamingRetrieval (default) type: mteb/cqadupstack-gaming config: default split: test revision: 4885aa143210c98657558c04aaf3dc47cfb54340 metrics: - type: ndcg_at_1 value: 43.448 - type: ndcg_at_3 value: 51.032999999999994 - type: ndcg_at_5 value: 53.73 - type: ndcg_at_10 value: 56.369 - type: ndcg_at_20 value: 58.167 - type: ndcg_at_100 value: 60.28 - type: ndcg_at_1000 value: 61.511 - type: map_at_1 value: 38.115 - type: map_at_3 value: 47.355999999999995 - type: map_at_5 value: 49.221 - type: map_at_10 value: 50.57000000000001 - type: map_at_20 value: 51.2 - type: map_at_100 value: 51.568999999999996 - type: map_at_1000 value: 51.627 - type: recall_at_1 value: 38.115 - type: recall_at_3 value: 55.733 - type: recall_at_5 value: 62.41100000000001 - type: recall_at_10 value: 70.11800000000001 - type: recall_at_20 value: 76.714 - type: recall_at_100 value: 87.071 - type: recall_at_1000 value: 95.921 - type: precision_at_1 value: 43.448 - type: precision_at_3 value: 22.947 - type: precision_at_5 value: 15.799 - type: precision_at_10 value: 9.154 - type: precision_at_20 value: 5.141 - type: precision_at_100 value: 1.196 - type: precision_at_1000 value: 0.135 - type: mrr_at_1 value: 43.4483 - type: mrr_at_3 value: 51.3689 - type: mrr_at_5 value: 52.8955 - type: mrr_at_10 value: 53.809200000000004 - type: mrr_at_20 value: 54.224700000000006 - type: mrr_at_100 value: 54.4617 - type: mrr_at_1000 value: 54.49079999999999 - type: nauc_ndcg_at_1_max value: 41.9268 - type: nauc_ndcg_at_1_std value: -6.0252 - type: nauc_ndcg_at_1_diff1 value: 55.4978 - type: nauc_ndcg_at_3_max value: 43.5492 - type: nauc_ndcg_at_3_std value: -4.7010000000000005 - type: nauc_ndcg_at_3_diff1 value: 51.0898 - type: nauc_ndcg_at_5_max value: 44.7544 - type: nauc_ndcg_at_5_std value: -2.9584 - type: nauc_ndcg_at_5_diff1 value: 50.6481 - type: nauc_ndcg_at_10_max value: 45.2203 - type: nauc_ndcg_at_10_std value: -1.6934 - type: nauc_ndcg_at_10_diff1 value: 49.9874 - type: nauc_ndcg_at_20_max value: 45.002199999999995 - type: nauc_ndcg_at_20_std value: -0.9383 - type: nauc_ndcg_at_20_diff1 value: 49.666700000000006 - type: nauc_ndcg_at_100_max value: 45.448699999999995 - type: nauc_ndcg_at_100_std value: -0.1934 - type: nauc_ndcg_at_100_diff1 value: 50.0483 - type: nauc_ndcg_at_1000_max value: 45.3335 - type: nauc_ndcg_at_1000_std value: -0.42389999999999994 - type: nauc_ndcg_at_1000_diff1 value: 50.5614 - type: nauc_map_at_1_max value: 35.7022 - type: nauc_map_at_1_std value: -6.6763 - type: nauc_map_at_1_diff1 value: 54.848699999999994 - type: nauc_map_at_3_max value: 41.5987 - type: nauc_map_at_3_std value: -6.3043000000000005 - type: nauc_map_at_3_diff1 value: 52.058400000000006 - type: nauc_map_at_5_max value: 42.5887 - type: nauc_map_at_5_std value: -5.0012 - type: nauc_map_at_5_diff1 value: 51.804300000000005 - type: nauc_map_at_10_max value: 43.085 - type: nauc_map_at_10_std value: -4.1721 - type: nauc_map_at_10_diff1 value: 51.524499999999996 - type: nauc_map_at_20_max value: 43.185 - type: nauc_map_at_20_std value: -3.6862 - type: nauc_map_at_20_diff1 value: 51.4297 - type: nauc_map_at_100_max value: 43.3473 - type: nauc_map_at_100_std value: -3.4286999999999996 - type: nauc_map_at_100_diff1 value: 51.497099999999996 - type: nauc_map_at_1000_max value: 43.358799999999995 - type: nauc_map_at_1000_std value: -3.3894 - type: nauc_map_at_1000_diff1 value: 51.5155 - type: nauc_recall_at_1_max value: 35.7022 - type: nauc_recall_at_1_std value: -6.6763 - type: nauc_recall_at_1_diff1 value: 54.848699999999994 - type: nauc_recall_at_3_max value: 42.9096 - type: nauc_recall_at_3_std value: -5.9907 - type: nauc_recall_at_3_diff1 value: 47.407 - type: nauc_recall_at_5_max value: 45.9891 - type: nauc_recall_at_5_std value: -0.5341 - type: nauc_recall_at_5_diff1 value: 45.336 - type: nauc_recall_at_10_max value: 47.457899999999995 - type: nauc_recall_at_10_std value: 4.2982 - type: nauc_recall_at_10_diff1 value: 41.6 - type: nauc_recall_at_20_max value: 47.3364 - type: nauc_recall_at_20_std value: 9.667100000000001 - type: nauc_recall_at_20_diff1 value: 38.4822 - type: nauc_recall_at_100_max value: 52.0554 - type: nauc_recall_at_100_std value: 21.6585 - type: nauc_recall_at_100_diff1 value: 35.2361 - type: nauc_recall_at_1000_max value: 62.38590000000001 - type: nauc_recall_at_1000_std value: 42.5442 - type: nauc_recall_at_1000_diff1 value: 37.1857 - type: nauc_precision_at_1_max value: 41.9268 - type: nauc_precision_at_1_std value: -6.0252 - type: nauc_precision_at_1_diff1 value: 55.4978 - type: nauc_precision_at_3_max value: 44.0934 - type: nauc_precision_at_3_std value: 2.4657 - type: nauc_precision_at_3_diff1 value: 33.468399999999995 - type: nauc_precision_at_5_max value: 41.8649 - type: nauc_precision_at_5_std value: 8.4992 - type: nauc_precision_at_5_diff1 value: 25.8132 - type: nauc_precision_at_10_max value: 36.8909 - type: nauc_precision_at_10_std value: 15.173200000000001 - type: nauc_precision_at_10_diff1 value: 16.0022 - type: nauc_precision_at_20_max value: 31.3774 - type: nauc_precision_at_20_std value: 21.304100000000002 - type: nauc_precision_at_20_diff1 value: 7.8406 - type: nauc_precision_at_100_max value: 23.828 - type: nauc_precision_at_100_std value: 27.3387 - type: nauc_precision_at_100_diff1 value: -0.5574 - type: nauc_precision_at_1000_max value: 14.3787 - type: nauc_precision_at_1000_std value: 27.8714 - type: nauc_precision_at_1000_diff1 value: -6.372400000000001 - type: nauc_mrr_at_1_max value: 41.9268 - type: nauc_mrr_at_1_std value: -6.0252 - type: nauc_mrr_at_1_diff1 value: 55.4978 - type: nauc_mrr_at_3_max value: 44.3228 - type: nauc_mrr_at_3_std value: -4.8039 - type: nauc_mrr_at_3_diff1 value: 52.6895 - type: nauc_mrr_at_5_max value: 45.0053 - type: nauc_mrr_at_5_std value: -3.5381000000000005 - type: nauc_mrr_at_5_diff1 value: 52.321 - type: nauc_mrr_at_10_max value: 44.9242 - type: nauc_mrr_at_10_std value: -3.2841 - type: nauc_mrr_at_10_diff1 value: 52.0518 - type: nauc_mrr_at_20_max value: 44.8189 - type: nauc_mrr_at_20_std value: -3.1717000000000004 - type: nauc_mrr_at_20_diff1 value: 52.0415 - type: nauc_mrr_at_100_max value: 44.8679 - type: nauc_mrr_at_100_std value: -3.1606 - type: nauc_mrr_at_100_diff1 value: 52.1083 - type: nauc_mrr_at_1000_max value: 44.864599999999996 - type: nauc_mrr_at_1000_std value: -3.167 - type: nauc_mrr_at_1000_diff1 value: 52.121399999999994 - type: main_score value: 56.369 - task: type: Retrieval dataset: name: MTEB CQADupstackGisRetrieval (default) type: mteb/cqadupstack-gis config: default split: test revision: 5003b3064772da1887988e05400cf3806fe491f2 metrics: - type: ndcg_at_1 value: 31.863999999999997 - type: ndcg_at_3 value: 38.537 - type: ndcg_at_5 value: 41.104 - type: ndcg_at_10 value: 43.503 - type: ndcg_at_20 value: 45.413 - type: ndcg_at_100 value: 48.291000000000004 - type: ndcg_at_1000 value: 50.26199999999999 - type: map_at_1 value: 29.37 - type: map_at_3 value: 35.824 - type: map_at_5 value: 37.408 - type: map_at_10 value: 38.452999999999996 - type: map_at_20 value: 39.004 - type: map_at_100 value: 39.421 - type: map_at_1000 value: 39.501 - type: recall_at_1 value: 29.37 - type: recall_at_3 value: 43.442 - type: recall_at_5 value: 49.551 - type: recall_at_10 value: 56.791000000000004 - type: recall_at_20 value: 63.93 - type: recall_at_100 value: 78.666 - type: recall_at_1000 value: 93.354 - type: precision_at_1 value: 31.863999999999997 - type: precision_at_3 value: 16.083 - type: precision_at_5 value: 11.254 - type: precision_at_10 value: 6.508 - type: precision_at_20 value: 3.712 - type: precision_at_100 value: 0.9390000000000001 - type: precision_at_1000 value: 0.11399999999999999 - type: mrr_at_1 value: 31.8644 - type: mrr_at_3 value: 38.5122 - type: mrr_at_5 value: 39.873799999999996 - type: mrr_at_10 value: 40.8308 - type: mrr_at_20 value: 41.3284 - type: mrr_at_100 value: 41.6819 - type: mrr_at_1000 value: 41.7416 - type: nauc_ndcg_at_1_max value: 33.7601 - type: nauc_ndcg_at_1_std value: -9.8717 - type: nauc_ndcg_at_1_diff1 value: 42.2537 - type: nauc_ndcg_at_3_max value: 34.409600000000005 - type: nauc_ndcg_at_3_std value: -10.6027 - type: nauc_ndcg_at_3_diff1 value: 40.0317 - type: nauc_ndcg_at_5_max value: 34.0482 - type: nauc_ndcg_at_5_std value: -9.0778 - type: nauc_ndcg_at_5_diff1 value: 39.421499999999995 - type: nauc_ndcg_at_10_max value: 34.5365 - type: nauc_ndcg_at_10_std value: -7.3511999999999995 - type: nauc_ndcg_at_10_diff1 value: 38.6886 - type: nauc_ndcg_at_20_max value: 35.335699999999996 - type: nauc_ndcg_at_20_std value: -5.9596 - type: nauc_ndcg_at_20_diff1 value: 38.6051 - type: nauc_ndcg_at_100_max value: 34.6961 - type: nauc_ndcg_at_100_std value: -6.5812 - type: nauc_ndcg_at_100_diff1 value: 37.8079 - type: nauc_ndcg_at_1000_max value: 34.3938 - type: nauc_ndcg_at_1000_std value: -6.9155 - type: nauc_ndcg_at_1000_diff1 value: 38.2247 - type: nauc_map_at_1_max value: 32.231500000000004 - type: nauc_map_at_1_std value: -11.4991 - type: nauc_map_at_1_diff1 value: 44.7044 - type: nauc_map_at_3_max value: 34.0411 - type: nauc_map_at_3_std value: -10.8111 - type: nauc_map_at_3_diff1 value: 41.6004 - type: nauc_map_at_5_max value: 33.9275 - type: nauc_map_at_5_std value: -9.9881 - type: nauc_map_at_5_diff1 value: 41.1704 - type: nauc_map_at_10_max value: 34.1806 - type: nauc_map_at_10_std value: -9.2606 - type: nauc_map_at_10_diff1 value: 40.9213 - type: nauc_map_at_20_max value: 34.474 - type: nauc_map_at_20_std value: -8.798599999999999 - type: nauc_map_at_20_diff1 value: 40.9088 - type: nauc_map_at_100_max value: 34.381699999999995 - type: nauc_map_at_100_std value: -8.869 - type: nauc_map_at_100_diff1 value: 40.7894 - type: nauc_map_at_1000_max value: 34.3718 - type: nauc_map_at_1000_std value: -8.8674 - type: nauc_map_at_1000_diff1 value: 40.801700000000004 - type: nauc_recall_at_1_max value: 32.231500000000004 - type: nauc_recall_at_1_std value: -11.4991 - type: nauc_recall_at_1_diff1 value: 44.7044 - type: nauc_recall_at_3_max value: 33.4997 - type: nauc_recall_at_3_std value: -10.793999999999999 - type: nauc_recall_at_3_diff1 value: 36.8971 - type: nauc_recall_at_5_max value: 33.217600000000004 - type: nauc_recall_at_5_std value: -7.4771 - type: nauc_recall_at_5_diff1 value: 35.7378 - type: nauc_recall_at_10_max value: 34.3881 - type: nauc_recall_at_10_std value: -1.9206 - type: nauc_recall_at_10_diff1 value: 33.024300000000004 - type: nauc_recall_at_20_max value: 37.1734 - type: nauc_recall_at_20_std value: 4.5757 - type: nauc_recall_at_20_diff1 value: 31.7119 - type: nauc_recall_at_100_max value: 33.3328 - type: nauc_recall_at_100_std value: 4.0235 - type: nauc_recall_at_100_diff1 value: 23.5836 - type: nauc_recall_at_1000_max value: 23.6203 - type: nauc_recall_at_1000_std value: 10.4212 - type: nauc_recall_at_1000_diff1 value: 16.5204 - type: nauc_precision_at_1_max value: 33.7601 - type: nauc_precision_at_1_std value: -9.8717 - type: nauc_precision_at_1_diff1 value: 42.2537 - type: nauc_precision_at_3_max value: 37.046099999999996 - type: nauc_precision_at_3_std value: -8.1696 - type: nauc_precision_at_3_diff1 value: 32.893699999999995 - type: nauc_precision_at_5_max value: 33.5411 - type: nauc_precision_at_5_std value: -3.8621000000000003 - type: nauc_precision_at_5_diff1 value: 28.4192 - type: nauc_precision_at_10_max value: 33.8177 - type: nauc_precision_at_10_std value: 1.4605 - type: nauc_precision_at_10_diff1 value: 23.8779 - type: nauc_precision_at_20_max value: 33.2362 - type: nauc_precision_at_20_std value: 6.8675 - type: nauc_precision_at_20_diff1 value: 19.12 - type: nauc_precision_at_100_max value: 22.0581 - type: nauc_precision_at_100_std value: 5.6537999999999995 - type: nauc_precision_at_100_diff1 value: 2.677 - type: nauc_precision_at_1000_max value: 6.4192 - type: nauc_precision_at_1000_std value: 5.2604999999999995 - type: nauc_precision_at_1000_diff1 value: -12.5191 - type: nauc_mrr_at_1_max value: 33.7601 - type: nauc_mrr_at_1_std value: -9.8717 - type: nauc_mrr_at_1_diff1 value: 42.2537 - type: nauc_mrr_at_3_max value: 34.590700000000005 - type: nauc_mrr_at_3_std value: -9.3063 - type: nauc_mrr_at_3_diff1 value: 39.157599999999995 - type: nauc_mrr_at_5_max value: 34.262 - type: nauc_mrr_at_5_std value: -8.6629 - type: nauc_mrr_at_5_diff1 value: 38.7425 - type: nauc_mrr_at_10_max value: 34.3456 - type: nauc_mrr_at_10_std value: -8.0433 - type: nauc_mrr_at_10_diff1 value: 38.474199999999996 - type: nauc_mrr_at_20_max value: 34.504400000000004 - type: nauc_mrr_at_20_std value: -7.7764 - type: nauc_mrr_at_20_diff1 value: 38.4646 - type: nauc_mrr_at_100_max value: 34.407700000000006 - type: nauc_mrr_at_100_std value: -7.8669 - type: nauc_mrr_at_100_diff1 value: 38.4062 - type: nauc_mrr_at_1000_max value: 34.400999999999996 - type: nauc_mrr_at_1000_std value: -7.8653 - type: nauc_mrr_at_1000_diff1 value: 38.4264 - type: main_score value: 43.503 - task: type: Retrieval dataset: name: MTEB CQADupstackMathematicaRetrieval (default) type: mteb/cqadupstack-mathematica config: default split: test revision: 90fceea13679c63fe563ded68f3b6f06e50061de metrics: - type: ndcg_at_1 value: 22.637 - type: ndcg_at_3 value: 26.865 - type: ndcg_at_5 value: 29.506 - type: ndcg_at_10 value: 32.024 - type: ndcg_at_20 value: 34.123999999999995 - type: ndcg_at_100 value: 38.013999999999996 - type: ndcg_at_1000 value: 40.681 - type: map_at_1 value: 18.354 - type: map_at_3 value: 23.777 - type: map_at_5 value: 25.380000000000003 - type: map_at_10 value: 26.588 - type: map_at_20 value: 27.227 - type: map_at_100 value: 27.851 - type: map_at_1000 value: 27.971 - type: recall_at_1 value: 18.354 - type: recall_at_3 value: 30.029 - type: recall_at_5 value: 36.716 - type: recall_at_10 value: 44.083 - type: recall_at_20 value: 51.653000000000006 - type: recall_at_100 value: 70.24000000000001 - type: recall_at_1000 value: 88.941 - type: precision_at_1 value: 22.637 - type: precision_at_3 value: 12.852 - type: precision_at_5 value: 9.652 - type: precision_at_10 value: 5.970000000000001 - type: precision_at_20 value: 3.557 - type: precision_at_100 value: 1.035 - type: precision_at_1000 value: 0.13899999999999998 - type: mrr_at_1 value: 22.6368 - type: mrr_at_3 value: 28.296 - type: mrr_at_5 value: 30.198999999999998 - type: mrr_at_10 value: 31.2411 - type: mrr_at_20 value: 31.773600000000002 - type: mrr_at_100 value: 32.230199999999996 - type: mrr_at_1000 value: 32.2949 - type: nauc_ndcg_at_1_max value: 31.0579 - type: nauc_ndcg_at_1_std value: -1.1154000000000002 - type: nauc_ndcg_at_1_diff1 value: 37.0188 - type: nauc_ndcg_at_3_max value: 30.6319 - type: nauc_ndcg_at_3_std value: 1.2079 - type: nauc_ndcg_at_3_diff1 value: 29.7055 - type: nauc_ndcg_at_5_max value: 29.2059 - type: nauc_ndcg_at_5_std value: 3.0105 - type: nauc_ndcg_at_5_diff1 value: 28.0947 - type: nauc_ndcg_at_10_max value: 29.2307 - type: nauc_ndcg_at_10_std value: 3.1515 - type: nauc_ndcg_at_10_diff1 value: 27.2115 - type: nauc_ndcg_at_20_max value: 29.1914 - type: nauc_ndcg_at_20_std value: 3.9833 - type: nauc_ndcg_at_20_diff1 value: 27.287899999999997 - type: nauc_ndcg_at_100_max value: 30.759999999999998 - type: nauc_ndcg_at_100_std value: 5.6163 - type: nauc_ndcg_at_100_diff1 value: 28.1445 - type: nauc_ndcg_at_1000_max value: 30.4012 - type: nauc_ndcg_at_1000_std value: 4.8586 - type: nauc_ndcg_at_1000_diff1 value: 27.7366 - type: nauc_map_at_1_max value: 26.9538 - type: nauc_map_at_1_std value: -0.9815 - type: nauc_map_at_1_diff1 value: 35.1964 - type: nauc_map_at_3_max value: 28.9516 - type: nauc_map_at_3_std value: 0.6373 - type: nauc_map_at_3_diff1 value: 30.476599999999998 - type: nauc_map_at_5_max value: 28.3735 - type: nauc_map_at_5_std value: 1.5893000000000002 - type: nauc_map_at_5_diff1 value: 29.4822 - type: nauc_map_at_10_max value: 28.4489 - type: nauc_map_at_10_std value: 1.7179 - type: nauc_map_at_10_diff1 value: 29.0721 - type: nauc_map_at_20_max value: 28.6443 - type: nauc_map_at_20_std value: 1.9567999999999999 - type: nauc_map_at_20_diff1 value: 29.2744 - type: nauc_map_at_100_max value: 28.9144 - type: nauc_map_at_100_std value: 2.2790999999999997 - type: nauc_map_at_100_diff1 value: 29.3889 - type: nauc_map_at_1000_max value: 28.8827 - type: nauc_map_at_1000_std value: 2.2127999999999997 - type: nauc_map_at_1000_diff1 value: 29.367700000000003 - type: nauc_recall_at_1_max value: 26.9538 - type: nauc_recall_at_1_std value: -0.9815 - type: nauc_recall_at_1_diff1 value: 35.1964 - type: nauc_recall_at_3_max value: 29.2823 - type: nauc_recall_at_3_std value: 2.2192 - type: nauc_recall_at_3_diff1 value: 25.174400000000002 - type: nauc_recall_at_5_max value: 26.098300000000002 - type: nauc_recall_at_5_std value: 5.870100000000001 - type: nauc_recall_at_5_diff1 value: 21.5717 - type: nauc_recall_at_10_max value: 26.3965 - type: nauc_recall_at_10_std value: 5.9524 - type: nauc_recall_at_10_diff1 value: 19.2576 - type: nauc_recall_at_20_max value: 25.014799999999997 - type: nauc_recall_at_20_std value: 8.889800000000001 - type: nauc_recall_at_20_diff1 value: 18.2048 - type: nauc_recall_at_100_max value: 32.664100000000005 - type: nauc_recall_at_100_std value: 20.66 - type: nauc_recall_at_100_diff1 value: 20.7167 - type: nauc_recall_at_1000_max value: 32.7425 - type: nauc_recall_at_1000_std value: 31.798 - type: nauc_recall_at_1000_diff1 value: 6.1744 - type: nauc_precision_at_1_max value: 31.0579 - type: nauc_precision_at_1_std value: -1.1154000000000002 - type: nauc_precision_at_1_diff1 value: 37.0188 - type: nauc_precision_at_3_max value: 34.0041 - type: nauc_precision_at_3_std value: 2.759 - type: nauc_precision_at_3_diff1 value: 26.0113 - type: nauc_precision_at_5_max value: 31.591599999999996 - type: nauc_precision_at_5_std value: 7.019499999999999 - type: nauc_precision_at_5_diff1 value: 22.5517 - type: nauc_precision_at_10_max value: 28.9779 - type: nauc_precision_at_10_std value: 6.0112 - type: nauc_precision_at_10_diff1 value: 18.4627 - type: nauc_precision_at_20_max value: 27.2677 - type: nauc_precision_at_20_std value: 7.9853 - type: nauc_precision_at_20_diff1 value: 17.6528 - type: nauc_precision_at_100_max value: 23.8248 - type: nauc_precision_at_100_std value: 9.9215 - type: nauc_precision_at_100_diff1 value: 13.5355 - type: nauc_precision_at_1000_max value: 9.9312 - type: nauc_precision_at_1000_std value: 1.8778 - type: nauc_precision_at_1000_diff1 value: 3.6692 - type: nauc_mrr_at_1_max value: 31.0579 - type: nauc_mrr_at_1_std value: -1.1154000000000002 - type: nauc_mrr_at_1_diff1 value: 37.0188 - type: nauc_mrr_at_3_max value: 32.265100000000004 - type: nauc_mrr_at_3_std value: 0.4738 - type: nauc_mrr_at_3_diff1 value: 31.6965 - type: nauc_mrr_at_5_max value: 31.610100000000003 - type: nauc_mrr_at_5_std value: 1.693 - type: nauc_mrr_at_5_diff1 value: 31.2068 - type: nauc_mrr_at_10_max value: 31.593500000000002 - type: nauc_mrr_at_10_std value: 1.6910999999999998 - type: nauc_mrr_at_10_diff1 value: 30.988300000000002 - type: nauc_mrr_at_20_max value: 31.4229 - type: nauc_mrr_at_20_std value: 1.9178000000000002 - type: nauc_mrr_at_20_diff1 value: 30.911 - type: nauc_mrr_at_100_max value: 31.510500000000004 - type: nauc_mrr_at_100_std value: 1.9404000000000001 - type: nauc_mrr_at_100_diff1 value: 30.928499999999996 - type: nauc_mrr_at_1000_max value: 31.499899999999997 - type: nauc_mrr_at_1000_std value: 1.9026999999999998 - type: nauc_mrr_at_1000_diff1 value: 30.9234 - type: main_score value: 32.024 - task: type: Retrieval dataset: name: MTEB CQADupstackPhysicsRetrieval (default) type: mteb/cqadupstack-physics config: default split: test revision: 79531abbd1fb92d06c6d6315a0cbbbf5bb247ea4 metrics: - type: ndcg_at_1 value: 36.477 - type: ndcg_at_3 value: 41.9 - type: ndcg_at_5 value: 44.352000000000004 - type: ndcg_at_10 value: 47.316 - type: ndcg_at_20 value: 49.262 - type: ndcg_at_100 value: 52.5 - type: ndcg_at_1000 value: 54.433 - type: map_at_1 value: 29.633 - type: map_at_3 value: 37.374 - type: map_at_5 value: 39.327 - type: map_at_10 value: 40.897 - type: map_at_20 value: 41.629 - type: map_at_100 value: 42.221 - type: map_at_1000 value: 42.337 - type: recall_at_1 value: 29.633 - type: recall_at_3 value: 45.141999999999996 - type: recall_at_5 value: 51.578 - type: recall_at_10 value: 60.465999999999994 - type: recall_at_20 value: 67.012 - type: recall_at_100 value: 82.174 - type: recall_at_1000 value: 94.65 - type: precision_at_1 value: 36.477 - type: precision_at_3 value: 20.308 - type: precision_at_5 value: 14.379 - type: precision_at_10 value: 8.816 - type: precision_at_20 value: 5.106 - type: precision_at_100 value: 1.3419999999999999 - type: precision_at_1000 value: 0.169 - type: mrr_at_1 value: 36.477399999999996 - type: mrr_at_3 value: 44.0648 - type: mrr_at_5 value: 45.4604 - type: mrr_at_10 value: 46.6132 - type: mrr_at_20 value: 47.0122 - type: mrr_at_100 value: 47.3432 - type: mrr_at_1000 value: 47.383900000000004 - type: nauc_ndcg_at_1_max value: 44.2532 - type: nauc_ndcg_at_1_std value: 0.27399999999999997 - type: nauc_ndcg_at_1_diff1 value: 56.0608 - type: nauc_ndcg_at_3_max value: 40.7243 - type: nauc_ndcg_at_3_std value: -3.0545 - type: nauc_ndcg_at_3_diff1 value: 48.4101 - type: nauc_ndcg_at_5_max value: 39.556999999999995 - type: nauc_ndcg_at_5_std value: -3.9035 - type: nauc_ndcg_at_5_diff1 value: 47.2832 - type: nauc_ndcg_at_10_max value: 39.6116 - type: nauc_ndcg_at_10_std value: -4.2111 - type: nauc_ndcg_at_10_diff1 value: 47.0266 - type: nauc_ndcg_at_20_max value: 40.1775 - type: nauc_ndcg_at_20_std value: -2.9367 - type: nauc_ndcg_at_20_diff1 value: 47.4448 - type: nauc_ndcg_at_100_max value: 41.9972 - type: nauc_ndcg_at_100_std value: 0.46740000000000004 - type: nauc_ndcg_at_100_diff1 value: 48.4355 - type: nauc_ndcg_at_1000_max value: 42.1182 - type: nauc_ndcg_at_1000_std value: 0.8456 - type: nauc_ndcg_at_1000_diff1 value: 48.1614 - type: nauc_map_at_1_max value: 37.5422 - type: nauc_map_at_1_std value: -4.2909999999999995 - type: nauc_map_at_1_diff1 value: 55.083800000000004 - type: nauc_map_at_3_max value: 39.0107 - type: nauc_map_at_3_std value: -4.3038 - type: nauc_map_at_3_diff1 value: 49.5355 - type: nauc_map_at_5_max value: 38.9933 - type: nauc_map_at_5_std value: -4.3489 - type: nauc_map_at_5_diff1 value: 48.9543 - type: nauc_map_at_10_max value: 39.2673 - type: nauc_map_at_10_std value: -4.1611 - type: nauc_map_at_10_diff1 value: 48.891400000000004 - type: nauc_map_at_20_max value: 39.533699999999996 - type: nauc_map_at_20_std value: -3.7303 - type: nauc_map_at_20_diff1 value: 49.001099999999994 - type: nauc_map_at_100_max value: 39.9274 - type: nauc_map_at_100_std value: -3.0797000000000003 - type: nauc_map_at_100_diff1 value: 49.1862 - type: nauc_map_at_1000_max value: 39.957100000000004 - type: nauc_map_at_1000_std value: -3.0084 - type: nauc_map_at_1000_diff1 value: 49.1595 - type: nauc_recall_at_1_max value: 37.5422 - type: nauc_recall_at_1_std value: -4.2909999999999995 - type: nauc_recall_at_1_diff1 value: 55.083800000000004 - type: nauc_recall_at_3_max value: 35.5355 - type: nauc_recall_at_3_std value: -7.140000000000001 - type: nauc_recall_at_3_diff1 value: 42.4278 - type: nauc_recall_at_5_max value: 33.9238 - type: nauc_recall_at_5_std value: -7.9919 - type: nauc_recall_at_5_diff1 value: 39.1808 - type: nauc_recall_at_10_max value: 33.4493 - type: nauc_recall_at_10_std value: -9.1861 - type: nauc_recall_at_10_diff1 value: 36.8475 - type: nauc_recall_at_20_max value: 34.9121 - type: nauc_recall_at_20_std value: -4.8026 - type: nauc_recall_at_20_diff1 value: 37.9247 - type: nauc_recall_at_100_max value: 44.1541 - type: nauc_recall_at_100_std value: 18.1134 - type: nauc_recall_at_100_diff1 value: 41.6633 - type: nauc_recall_at_1000_max value: 56.3385 - type: nauc_recall_at_1000_std value: 53.257299999999994 - type: nauc_recall_at_1000_diff1 value: 36.1232 - type: nauc_precision_at_1_max value: 44.2532 - type: nauc_precision_at_1_std value: 0.27399999999999997 - type: nauc_precision_at_1_diff1 value: 56.0608 - type: nauc_precision_at_3_max value: 41.179 - type: nauc_precision_at_3_std value: 5.588 - type: nauc_precision_at_3_diff1 value: 32.8574 - type: nauc_precision_at_5_max value: 34.808699999999995 - type: nauc_precision_at_5_std value: 6.261 - type: nauc_precision_at_5_diff1 value: 23.993100000000002 - type: nauc_precision_at_10_max value: 30.966500000000003 - type: nauc_precision_at_10_std value: 9.9887 - type: nauc_precision_at_10_diff1 value: 16.8352 - type: nauc_precision_at_20_max value: 26.977600000000002 - type: nauc_precision_at_20_std value: 14.0043 - type: nauc_precision_at_20_diff1 value: 10.9725 - type: nauc_precision_at_100_max value: 20.0541 - type: nauc_precision_at_100_std value: 24.0399 - type: nauc_precision_at_100_diff1 value: -0.46509999999999996 - type: nauc_precision_at_1000_max value: 8.1382 - type: nauc_precision_at_1000_std value: 21.7963 - type: nauc_precision_at_1000_diff1 value: -13.7289 - type: nauc_mrr_at_1_max value: 44.2532 - type: nauc_mrr_at_1_std value: 0.27399999999999997 - type: nauc_mrr_at_1_diff1 value: 56.0608 - type: nauc_mrr_at_3_max value: 43.0277 - type: nauc_mrr_at_3_std value: -0.8843 - type: nauc_mrr_at_3_diff1 value: 51.112899999999996 - type: nauc_mrr_at_5_max value: 42.852000000000004 - type: nauc_mrr_at_5_std value: -0.8572 - type: nauc_mrr_at_5_diff1 value: 50.4937 - type: nauc_mrr_at_10_max value: 43.0093 - type: nauc_mrr_at_10_std value: -0.8631 - type: nauc_mrr_at_10_diff1 value: 50.41909999999999 - type: nauc_mrr_at_20_max value: 43.0484 - type: nauc_mrr_at_20_std value: -0.6054999999999999 - type: nauc_mrr_at_20_diff1 value: 50.527100000000004 - type: nauc_mrr_at_100_max value: 43.175200000000004 - type: nauc_mrr_at_100_std value: -0.3019 - type: nauc_mrr_at_100_diff1 value: 50.5962 - type: nauc_mrr_at_1000_max value: 43.173899999999996 - type: nauc_mrr_at_1000_std value: -0.3115 - type: nauc_mrr_at_1000_diff1 value: 50.6012 - type: main_score value: 47.316 - task: type: Retrieval dataset: name: MTEB CQADupstackProgrammersRetrieval (default) type: mteb/cqadupstack-programmers config: default split: test revision: 6184bc1440d2dbc7612be22b50686b8826d22b32 metrics: - type: ndcg_at_1 value: 33.676 - type: ndcg_at_3 value: 38.7 - type: ndcg_at_5 value: 41.032999999999994 - type: ndcg_at_10 value: 43.580999999999996 - type: ndcg_at_20 value: 45.992 - type: ndcg_at_100 value: 49.192 - type: ndcg_at_1000 value: 51.473 - type: map_at_1 value: 27.389999999999997 - type: map_at_3 value: 34.660999999999994 - type: map_at_5 value: 36.38 - type: map_at_10 value: 37.768 - type: map_at_20 value: 38.534 - type: map_at_100 value: 39.091 - type: map_at_1000 value: 39.2 - type: recall_at_1 value: 27.389999999999997 - type: recall_at_3 value: 41.876000000000005 - type: recall_at_5 value: 47.961999999999996 - type: recall_at_10 value: 55.445 - type: recall_at_20 value: 64.143 - type: recall_at_100 value: 79.327 - type: recall_at_1000 value: 94.64200000000001 - type: precision_at_1 value: 33.676 - type: precision_at_3 value: 18.455 - type: precision_at_5 value: 13.128 - type: precision_at_10 value: 7.888000000000001 - type: precision_at_20 value: 4.697 - type: precision_at_100 value: 1.234 - type: precision_at_1000 value: 0.161 - type: mrr_at_1 value: 33.6758 - type: mrr_at_3 value: 40.7725 - type: mrr_at_5 value: 42.267900000000004 - type: mrr_at_10 value: 43.1813 - type: mrr_at_20 value: 43.769200000000005 - type: mrr_at_100 value: 44.0965 - type: mrr_at_1000 value: 44.149899999999995 - type: nauc_ndcg_at_1_max value: 47.957699999999996 - type: nauc_ndcg_at_1_std value: 11.211 - type: nauc_ndcg_at_1_diff1 value: 50.975899999999996 - type: nauc_ndcg_at_3_max value: 46.7077 - type: nauc_ndcg_at_3_std value: 11.8166 - type: nauc_ndcg_at_3_diff1 value: 44.183699999999995 - type: nauc_ndcg_at_5_max value: 46.5691 - type: nauc_ndcg_at_5_std value: 12.3224 - type: nauc_ndcg_at_5_diff1 value: 43.2912 - type: nauc_ndcg_at_10_max value: 45.989200000000004 - type: nauc_ndcg_at_10_std value: 13.4501 - type: nauc_ndcg_at_10_diff1 value: 41.3206 - type: nauc_ndcg_at_20_max value: 46.400400000000005 - type: nauc_ndcg_at_20_std value: 15.004000000000001 - type: nauc_ndcg_at_20_diff1 value: 40.8932 - type: nauc_ndcg_at_100_max value: 47.3346 - type: nauc_ndcg_at_100_std value: 16.5132 - type: nauc_ndcg_at_100_diff1 value: 42.126599999999996 - type: nauc_ndcg_at_1000_max value: 47.5217 - type: nauc_ndcg_at_1000_std value: 15.4551 - type: nauc_ndcg_at_1000_diff1 value: 42.5563 - type: nauc_map_at_1_max value: 42.549 - type: nauc_map_at_1_std value: 4.9833 - type: nauc_map_at_1_diff1 value: 52.14339999999999 - type: nauc_map_at_3_max value: 44.8114 - type: nauc_map_at_3_std value: 9.440800000000001 - type: nauc_map_at_3_diff1 value: 46.1197 - type: nauc_map_at_5_max value: 45.3059 - type: nauc_map_at_5_std value: 10.286900000000001 - type: nauc_map_at_5_diff1 value: 45.6263 - type: nauc_map_at_10_max value: 45.3517 - type: nauc_map_at_10_std value: 11.1304 - type: nauc_map_at_10_diff1 value: 44.6502 - type: nauc_map_at_20_max value: 45.5319 - type: nauc_map_at_20_std value: 11.5773 - type: nauc_map_at_20_diff1 value: 44.5681 - type: nauc_map_at_100_max value: 45.8019 - type: nauc_map_at_100_std value: 11.9772 - type: nauc_map_at_100_diff1 value: 44.7825 - type: nauc_map_at_1000_max value: 45.8134 - type: nauc_map_at_1000_std value: 11.9461 - type: nauc_map_at_1000_diff1 value: 44.7905 - type: nauc_recall_at_1_max value: 42.549 - type: nauc_recall_at_1_std value: 4.9833 - type: nauc_recall_at_1_diff1 value: 52.14339999999999 - type: nauc_recall_at_3_max value: 44.0409 - type: nauc_recall_at_3_std value: 11.9146 - type: nauc_recall_at_3_diff1 value: 38.6436 - type: nauc_recall_at_5_max value: 43.3961 - type: nauc_recall_at_5_std value: 12.6675 - type: nauc_recall_at_5_diff1 value: 35.5553 - type: nauc_recall_at_10_max value: 41.4966 - type: nauc_recall_at_10_std value: 16.1644 - type: nauc_recall_at_10_diff1 value: 29.2835 - type: nauc_recall_at_20_max value: 41.474 - type: nauc_recall_at_20_std value: 22.5684 - type: nauc_recall_at_20_diff1 value: 25.7308 - type: nauc_recall_at_100_max value: 45.1253 - type: nauc_recall_at_100_std value: 36.248799999999996 - type: nauc_recall_at_100_diff1 value: 28.799500000000002 - type: nauc_recall_at_1000_max value: 54.1747 - type: nauc_recall_at_1000_std value: 47.1501 - type: nauc_recall_at_1000_diff1 value: 23.198900000000002 - type: nauc_precision_at_1_max value: 47.957699999999996 - type: nauc_precision_at_1_std value: 11.211 - type: nauc_precision_at_1_diff1 value: 50.975899999999996 - type: nauc_precision_at_3_max value: 46.6181 - type: nauc_precision_at_3_std value: 19.475 - type: nauc_precision_at_3_diff1 value: 30.6784 - type: nauc_precision_at_5_max value: 43.5114 - type: nauc_precision_at_5_std value: 22.1293 - type: nauc_precision_at_5_diff1 value: 24.6525 - type: nauc_precision_at_10_max value: 37.47 - type: nauc_precision_at_10_std value: 23.8068 - type: nauc_precision_at_10_diff1 value: 14.9368 - type: nauc_precision_at_20_max value: 33.4529 - type: nauc_precision_at_20_std value: 25.4979 - type: nauc_precision_at_20_diff1 value: 9.4501 - type: nauc_precision_at_100_max value: 23.7406 - type: nauc_precision_at_100_std value: 22.8583 - type: nauc_precision_at_100_diff1 value: 3.6348 - type: nauc_precision_at_1000_max value: 4.5396 - type: nauc_precision_at_1000_std value: 6.0796 - type: nauc_precision_at_1000_diff1 value: -7.2498000000000005 - type: nauc_mrr_at_1_max value: 47.957699999999996 - type: nauc_mrr_at_1_std value: 11.211 - type: nauc_mrr_at_1_diff1 value: 50.975899999999996 - type: nauc_mrr_at_3_max value: 48.6226 - type: nauc_mrr_at_3_std value: 13.600000000000001 - type: nauc_mrr_at_3_diff1 value: 45.2881 - type: nauc_mrr_at_5_max value: 48.402499999999996 - type: nauc_mrr_at_5_std value: 13.616 - type: nauc_mrr_at_5_diff1 value: 44.7074 - type: nauc_mrr_at_10_max value: 48.0556 - type: nauc_mrr_at_10_std value: 13.7803 - type: nauc_mrr_at_10_diff1 value: 44.0852 - type: nauc_mrr_at_20_max value: 48.173500000000004 - type: nauc_mrr_at_20_std value: 14.1617 - type: nauc_mrr_at_20_diff1 value: 44.0396 - type: nauc_mrr_at_100_max value: 48.1841 - type: nauc_mrr_at_100_std value: 14.1827 - type: nauc_mrr_at_100_diff1 value: 44.210100000000004 - type: nauc_mrr_at_1000_max value: 48.1875 - type: nauc_mrr_at_1000_std value: 14.161000000000001 - type: nauc_mrr_at_1000_diff1 value: 44.222 - type: main_score value: 43.580999999999996 - task: type: Retrieval dataset: name: MTEB CQADupstackRetrieval (default) type: CQADupstackRetrieval_is_a_combined_dataset config: default split: test revision: 160c094312a0e1facb97e55eeddb698c0abe3571 metrics: - type: ndcg_at_1 value: 32.588499999999996 - type: ndcg_at_3 value: 37.949083333333334 - type: ndcg_at_5 value: 40.258833333333335 - type: ndcg_at_10 value: 42.74341666666667 - type: ndcg_at_20 value: 44.784 - type: ndcg_at_100 value: 47.903416666666665 - type: ndcg_at_1000 value: 50.067416666666674 - type: map_at_1 value: 27.52808333333333 - type: map_at_3 value: 34.321999999999996 - type: map_at_5 value: 35.96091666666666 - type: map_at_10 value: 37.22708333333333 - type: map_at_20 value: 37.914833333333334 - type: map_at_100 value: 38.462166666666675 - type: map_at_1000 value: 38.57725 - type: recall_at_1 value: 27.52808333333333 - type: recall_at_3 value: 41.30075 - type: recall_at_5 value: 47.26408333333334 - type: recall_at_10 value: 54.663833333333336 - type: recall_at_20 value: 62.11658333333333 - type: recall_at_100 value: 77.176 - type: recall_at_1000 value: 92.03791666666666 - type: precision_at_1 value: 32.588499999999996 - type: precision_at_3 value: 17.485 - type: precision_at_5 value: 12.427666666666669 - type: precision_at_10 value: 7.493333333333334 - type: precision_at_20 value: 4.413499999999999 - type: precision_at_100 value: 1.18675 - type: precision_at_1000 value: 0.15691666666666665 - type: mrr_at_1 value: 32.58871666666667 - type: mrr_at_3 value: 39.09032499999999 - type: mrr_at_5 value: 40.533125 - type: mrr_at_10 value: 41.51483333333333 - type: mrr_at_20 value: 42.01036666666667 - type: mrr_at_100 value: 42.35724166666667 - type: mrr_at_1000 value: 42.41010833333333 - type: nauc_ndcg_at_1_max value: 41.86760833333334 - type: nauc_ndcg_at_1_std value: -0.022441666666666443 - type: nauc_ndcg_at_1_diff1 value: 48.604266666666675 - type: nauc_ndcg_at_3_max value: 40.649825 - type: nauc_ndcg_at_3_std value: 0.9594416666666666 - type: nauc_ndcg_at_3_diff1 value: 42.754375 - type: nauc_ndcg_at_5_max value: 40.71646666666666 - type: nauc_ndcg_at_5_std value: 1.8118249999999998 - type: nauc_ndcg_at_5_diff1 value: 42.09031666666666 - type: nauc_ndcg_at_10_max value: 40.616033333333334 - type: nauc_ndcg_at_10_std value: 2.621475 - type: nauc_ndcg_at_10_diff1 value: 41.56405833333333 - type: nauc_ndcg_at_20_max value: 41.00335 - type: nauc_ndcg_at_20_std value: 3.5835 - type: nauc_ndcg_at_20_diff1 value: 41.526025 - type: nauc_ndcg_at_100_max value: 41.626575 - type: nauc_ndcg_at_100_std value: 4.921058333333334 - type: nauc_ndcg_at_100_diff1 value: 41.785700000000006 - type: nauc_ndcg_at_1000_max value: 41.623041666666666 - type: nauc_ndcg_at_1000_std value: 4.743416666666667 - type: nauc_ndcg_at_1000_diff1 value: 41.930049999999994 - type: nauc_map_at_1_max value: 37.757374999999996 - type: nauc_map_at_1_std value: -2.7256583333333335 - type: nauc_map_at_1_diff1 value: 49.68454166666667 - type: nauc_map_at_3_max value: 39.41603333333333 - type: nauc_map_at_3_std value: -0.7485333333333334 - type: nauc_map_at_3_diff1 value: 44.64258333333333 - type: nauc_map_at_5_max value: 39.84875833333333 - type: nauc_map_at_5_std value: 0.010733333333333428 - type: nauc_map_at_5_diff1 value: 44.133975 - type: nauc_map_at_10_max value: 40.05009166666666 - type: nauc_map_at_10_std value: 0.6503083333333333 - type: nauc_map_at_10_diff1 value: 43.826724999999996 - type: nauc_map_at_20_max value: 40.287733333333335 - type: nauc_map_at_20_std value: 1.0432333333333332 - type: nauc_map_at_20_diff1 value: 43.784241666666674 - type: nauc_map_at_100_max value: 40.44630833333334 - type: nauc_map_at_100_std value: 1.3809583333333333 - type: nauc_map_at_100_diff1 value: 43.81610833333333 - type: nauc_map_at_1000_max value: 40.45624166666667 - type: nauc_map_at_1000_std value: 1.4088416666666665 - type: nauc_map_at_1000_diff1 value: 43.81260833333333 - type: nauc_recall_at_1_max value: 37.757374999999996 - type: nauc_recall_at_1_std value: -2.7256583333333335 - type: nauc_recall_at_1_diff1 value: 49.68454166666667 - type: nauc_recall_at_3_max value: 37.99286666666667 - type: nauc_recall_at_3_std value: 0.5074666666666666 - type: nauc_recall_at_3_diff1 value: 38.458816666666664 - type: nauc_recall_at_5_max value: 38.23744166666667 - type: nauc_recall_at_5_std value: 2.8538000000000006 - type: nauc_recall_at_5_diff1 value: 36.16175833333334 - type: nauc_recall_at_10_max value: 37.54170833333333 - type: nauc_recall_at_10_std value: 5.354441666666667 - type: nauc_recall_at_10_diff1 value: 33.80731666666667 - type: nauc_recall_at_20_max value: 38.071758333333335 - type: nauc_recall_at_20_std value: 9.4403 - type: nauc_recall_at_20_diff1 value: 32.409758333333336 - type: nauc_recall_at_100_max value: 41.127158333333334 - type: nauc_recall_at_100_std value: 20.718875000000004 - type: nauc_recall_at_100_diff1 value: 30.971016666666664 - type: nauc_recall_at_1000_max value: 44.978608333333334 - type: nauc_recall_at_1000_std value: 39.36581666666667 - type: nauc_recall_at_1000_diff1 value: 27.076241666666668 - type: nauc_precision_at_1_max value: 41.86760833333334 - type: nauc_precision_at_1_std value: -0.022441666666666443 - type: nauc_precision_at_1_diff1 value: 48.604266666666675 - type: nauc_precision_at_3_max value: 40.53820000000001 - type: nauc_precision_at_3_std value: 6.682866666666667 - type: nauc_precision_at_3_diff1 value: 30.627458333333337 - type: nauc_precision_at_5_max value: 38.085708333333336 - type: nauc_precision_at_5_std value: 10.236816666666666 - type: nauc_precision_at_5_diff1 value: 24.589866666666666 - type: nauc_precision_at_10_max value: 33.795766666666665 - type: nauc_precision_at_10_std value: 13.644358333333335 - type: nauc_precision_at_10_diff1 value: 17.663875 - type: nauc_precision_at_20_max value: 30.67170833333333 - type: nauc_precision_at_20_std value: 16.899591666666666 - type: nauc_precision_at_20_diff1 value: 12.398666666666665 - type: nauc_precision_at_100_max value: 21.46699166666666 - type: nauc_precision_at_100_std value: 19.683266666666665 - type: nauc_precision_at_100_diff1 value: 2.3721666666666668 - type: nauc_precision_at_1000_max value: 6.773875 - type: nauc_precision_at_1000_std value: 13.712933333333336 - type: nauc_precision_at_1000_diff1 value: -9.302758333333333 - type: nauc_mrr_at_1_max value: 41.86760833333334 - type: nauc_mrr_at_1_std value: -0.022441666666666443 - type: nauc_mrr_at_1_diff1 value: 48.604266666666675 - type: nauc_mrr_at_3_max value: 42.065525 - type: nauc_mrr_at_3_std value: 1.6751166666666664 - type: nauc_mrr_at_3_diff1 value: 43.90220833333333 - type: nauc_mrr_at_5_max value: 42.07275833333333 - type: nauc_mrr_at_5_std value: 2.3014749999999995 - type: nauc_mrr_at_5_diff1 value: 43.440275 - type: nauc_mrr_at_10_max value: 41.955425000000005 - type: nauc_mrr_at_10_std value: 2.499491666666667 - type: nauc_mrr_at_10_diff1 value: 43.23685833333333 - type: nauc_mrr_at_20_max value: 41.98479166666666 - type: nauc_mrr_at_20_std value: 2.6983083333333333 - type: nauc_mrr_at_20_diff1 value: 43.24806666666667 - type: nauc_mrr_at_100_max value: 42.01090833333334 - type: nauc_mrr_at_100_std value: 2.7583083333333334 - type: nauc_mrr_at_100_diff1 value: 43.28899166666667 - type: nauc_mrr_at_1000_max value: 42.010841666666664 - type: nauc_mrr_at_1000_std value: 2.750433333333333 - type: nauc_mrr_at_1000_diff1 value: 43.299625 - type: main_score value: 42.74341666666667 - task: type: Retrieval dataset: name: MTEB CQADupstackRetrieval (default) type: CQADupstackRetrieval_is_a_combined_dataset config: default split: test revision: CQADupstackRetrieval_is_a_combined_dataset metrics: - type: main_score value: 42.743416666666675 - type: ndcg_at_10 value: 42.743416666666675 - task: type: Retrieval dataset: name: MTEB CQADupstackStatsRetrieval (default) type: mteb/cqadupstack-stats config: default split: test revision: 65ac3a16b8e91f9cee4c9828cc7c335575432a2a metrics: - type: ndcg_at_1 value: 27.607 - type: ndcg_at_3 value: 32.665 - type: ndcg_at_5 value: 34.876000000000005 - type: ndcg_at_10 value: 36.796 - type: ndcg_at_20 value: 38.405 - type: ndcg_at_100 value: 41.612 - type: ndcg_at_1000 value: 43.869 - type: map_at_1 value: 24.748 - type: map_at_3 value: 30.192999999999998 - type: map_at_5 value: 31.563999999999997 - type: map_at_10 value: 32.424 - type: map_at_20 value: 32.905 - type: map_at_100 value: 33.385 - type: map_at_1000 value: 33.476 - type: recall_at_1 value: 24.748 - type: recall_at_3 value: 36.14 - type: recall_at_5 value: 41.617 - type: recall_at_10 value: 47.49 - type: recall_at_20 value: 53.413 - type: recall_at_100 value: 69.461 - type: recall_at_1000 value: 86.014 - type: precision_at_1 value: 27.607 - type: precision_at_3 value: 13.957 - type: precision_at_5 value: 9.847 - type: precision_at_10 value: 5.782 - type: precision_at_20 value: 3.3360000000000003 - type: precision_at_100 value: 0.906 - type: precision_at_1000 value: 0.11800000000000001 - type: mrr_at_1 value: 27.6074 - type: mrr_at_3 value: 32.9499 - type: mrr_at_5 value: 34.2229 - type: mrr_at_10 value: 35.0668 - type: mrr_at_20 value: 35.4859 - type: mrr_at_100 value: 35.8948 - type: mrr_at_1000 value: 35.9562 - type: nauc_ndcg_at_1_max value: 49.1944 - type: nauc_ndcg_at_1_std value: 11.7093 - type: nauc_ndcg_at_1_diff1 value: 56.8806 - type: nauc_ndcg_at_3_max value: 46.7361 - type: nauc_ndcg_at_3_std value: 13.4354 - type: nauc_ndcg_at_3_diff1 value: 49.7927 - type: nauc_ndcg_at_5_max value: 47.280899999999995 - type: nauc_ndcg_at_5_std value: 14.5061 - type: nauc_ndcg_at_5_diff1 value: 48.9168 - type: nauc_ndcg_at_10_max value: 47.5137 - type: nauc_ndcg_at_10_std value: 15.4698 - type: nauc_ndcg_at_10_diff1 value: 48.4279 - type: nauc_ndcg_at_20_max value: 47.9904 - type: nauc_ndcg_at_20_std value: 15.7135 - type: nauc_ndcg_at_20_diff1 value: 48.4332 - type: nauc_ndcg_at_100_max value: 48.2942 - type: nauc_ndcg_at_100_std value: 17.502100000000002 - type: nauc_ndcg_at_100_diff1 value: 48.6035 - type: nauc_ndcg_at_1000_max value: 48.0957 - type: nauc_ndcg_at_1000_std value: 17.6368 - type: nauc_ndcg_at_1000_diff1 value: 48.7597 - type: nauc_map_at_1_max value: 45.6445 - type: nauc_map_at_1_std value: 6.9397 - type: nauc_map_at_1_diff1 value: 58.6992 - type: nauc_map_at_3_max value: 45.8449 - type: nauc_map_at_3_std value: 11.036200000000001 - type: nauc_map_at_3_diff1 value: 51.906 - type: nauc_map_at_5_max value: 46.3198 - type: nauc_map_at_5_std value: 11.921 - type: nauc_map_at_5_diff1 value: 51.2763 - type: nauc_map_at_10_max value: 46.5425 - type: nauc_map_at_10_std value: 12.5743 - type: nauc_map_at_10_diff1 value: 50.9536 - type: nauc_map_at_20_max value: 46.726 - type: nauc_map_at_20_std value: 12.6497 - type: nauc_map_at_20_diff1 value: 50.99510000000001 - type: nauc_map_at_100_max value: 46.7746 - type: nauc_map_at_100_std value: 12.881200000000002 - type: nauc_map_at_100_diff1 value: 51.011399999999995 - type: nauc_map_at_1000_max value: 46.785900000000005 - type: nauc_map_at_1000_std value: 12.898000000000001 - type: nauc_map_at_1000_diff1 value: 51.01480000000001 - type: nauc_recall_at_1_max value: 45.6445 - type: nauc_recall_at_1_std value: 6.9397 - type: nauc_recall_at_1_diff1 value: 58.6992 - type: nauc_recall_at_3_max value: 45.0182 - type: nauc_recall_at_3_std value: 14.2648 - type: nauc_recall_at_3_diff1 value: 45.3428 - type: nauc_recall_at_5_max value: 46.2258 - type: nauc_recall_at_5_std value: 17.2103 - type: nauc_recall_at_5_diff1 value: 42.5614 - type: nauc_recall_at_10_max value: 46.251799999999996 - type: nauc_recall_at_10_std value: 19.8669 - type: nauc_recall_at_10_diff1 value: 40.415 - type: nauc_recall_at_20_max value: 46.7318 - type: nauc_recall_at_20_std value: 20.3996 - type: nauc_recall_at_20_diff1 value: 39.0112 - type: nauc_recall_at_100_max value: 48.3756 - type: nauc_recall_at_100_std value: 33.558 - type: nauc_recall_at_100_diff1 value: 37.584 - type: nauc_recall_at_1000_max value: 46.1278 - type: nauc_recall_at_1000_std value: 50.2506 - type: nauc_recall_at_1000_diff1 value: 33.7694 - type: nauc_precision_at_1_max value: 49.1944 - type: nauc_precision_at_1_std value: 11.7093 - type: nauc_precision_at_1_diff1 value: 56.8806 - type: nauc_precision_at_3_max value: 49.9406 - type: nauc_precision_at_3_std value: 22.883200000000002 - type: nauc_precision_at_3_diff1 value: 40.5974 - type: nauc_precision_at_5_max value: 48.4187 - type: nauc_precision_at_5_std value: 25.9129 - type: nauc_precision_at_5_diff1 value: 34.863 - type: nauc_precision_at_10_max value: 46.734700000000004 - type: nauc_precision_at_10_std value: 28.5765 - type: nauc_precision_at_10_diff1 value: 30.071599999999997 - type: nauc_precision_at_20_max value: 45.2343 - type: nauc_precision_at_20_std value: 27.4324 - type: nauc_precision_at_20_diff1 value: 26.888299999999997 - type: nauc_precision_at_100_max value: 33.7511 - type: nauc_precision_at_100_std value: 30.084300000000002 - type: nauc_precision_at_100_diff1 value: 14.877099999999999 - type: nauc_precision_at_1000_max value: 15.059000000000001 - type: nauc_precision_at_1000_std value: 21.4471 - type: nauc_precision_at_1000_diff1 value: -1.2862 - type: nauc_mrr_at_1_max value: 49.1944 - type: nauc_mrr_at_1_std value: 11.7093 - type: nauc_mrr_at_1_diff1 value: 56.8806 - type: nauc_mrr_at_3_max value: 48.8173 - type: nauc_mrr_at_3_std value: 14.7023 - type: nauc_mrr_at_3_diff1 value: 50.9845 - type: nauc_mrr_at_5_max value: 49.0933 - type: nauc_mrr_at_5_std value: 15.5443 - type: nauc_mrr_at_5_diff1 value: 50.403299999999994 - type: nauc_mrr_at_10_max value: 49.058 - type: nauc_mrr_at_10_std value: 15.6592 - type: nauc_mrr_at_10_diff1 value: 50.3304 - type: nauc_mrr_at_20_max value: 49.104 - type: nauc_mrr_at_20_std value: 15.7446 - type: nauc_mrr_at_20_diff1 value: 50.2689 - type: nauc_mrr_at_100_max value: 49.071999999999996 - type: nauc_mrr_at_100_std value: 15.8584 - type: nauc_mrr_at_100_diff1 value: 50.3045 - type: nauc_mrr_at_1000_max value: 49.061 - type: nauc_mrr_at_1000_std value: 15.856700000000002 - type: nauc_mrr_at_1000_diff1 value: 50.3081 - type: main_score value: 36.796 - task: type: Retrieval dataset: name: MTEB CQADupstackTexRetrieval (default) type: mteb/cqadupstack-tex config: default split: test revision: 46989137a86843e03a6195de44b09deda022eec7 metrics: - type: ndcg_at_1 value: 23.159 - type: ndcg_at_3 value: 27.401999999999997 - type: ndcg_at_5 value: 29.354000000000003 - type: ndcg_at_10 value: 31.775 - type: ndcg_at_20 value: 33.743 - type: ndcg_at_100 value: 37.125 - type: ndcg_at_1000 value: 39.956 - type: map_at_1 value: 18.997 - type: map_at_3 value: 24.351 - type: map_at_5 value: 25.724999999999998 - type: map_at_10 value: 26.873 - type: map_at_20 value: 27.479 - type: map_at_100 value: 28.008 - type: map_at_1000 value: 28.133999999999997 - type: recall_at_1 value: 18.997 - type: recall_at_3 value: 30.14 - type: recall_at_5 value: 35.225 - type: recall_at_10 value: 42.447 - type: recall_at_20 value: 49.769000000000005 - type: recall_at_100 value: 66.39500000000001 - type: recall_at_1000 value: 86.434 - type: precision_at_1 value: 23.159 - type: precision_at_3 value: 12.995999999999999 - type: precision_at_5 value: 9.381 - type: precision_at_10 value: 5.778 - type: precision_at_20 value: 3.467 - type: precision_at_100 value: 0.9900000000000001 - type: precision_at_1000 value: 0.14200000000000002 - type: mrr_at_1 value: 23.159 - type: mrr_at_3 value: 28.676299999999998 - type: mrr_at_5 value: 29.9082 - type: mrr_at_10 value: 30.9286 - type: mrr_at_20 value: 31.4303 - type: mrr_at_100 value: 31.845000000000002 - type: mrr_at_1000 value: 31.9176 - type: nauc_ndcg_at_1_max value: 32.959500000000006 - type: nauc_ndcg_at_1_std value: -2.0082 - type: nauc_ndcg_at_1_diff1 value: 41.801500000000004 - type: nauc_ndcg_at_3_max value: 32.8362 - type: nauc_ndcg_at_3_std value: -0.9611 - type: nauc_ndcg_at_3_diff1 value: 36.248200000000004 - type: nauc_ndcg_at_5_max value: 32.650800000000004 - type: nauc_ndcg_at_5_std value: 0.13879999999999998 - type: nauc_ndcg_at_5_diff1 value: 35.2211 - type: nauc_ndcg_at_10_max value: 32.6256 - type: nauc_ndcg_at_10_std value: 1.0654000000000001 - type: nauc_ndcg_at_10_diff1 value: 34.6558 - type: nauc_ndcg_at_20_max value: 33.0706 - type: nauc_ndcg_at_20_std value: 2.2485 - type: nauc_ndcg_at_20_diff1 value: 34.5314 - type: nauc_ndcg_at_100_max value: 33.3131 - type: nauc_ndcg_at_100_std value: 3.4467 - type: nauc_ndcg_at_100_diff1 value: 34.4791 - type: nauc_ndcg_at_1000_max value: 33.644400000000005 - type: nauc_ndcg_at_1000_std value: 3.6159999999999997 - type: nauc_ndcg_at_1000_diff1 value: 34.9717 - type: nauc_map_at_1_max value: 30.2696 - type: nauc_map_at_1_std value: -3.3264 - type: nauc_map_at_1_diff1 value: 42.0066 - type: nauc_map_at_3_max value: 31.455899999999996 - type: nauc_map_at_3_std value: -1.8429999999999997 - type: nauc_map_at_3_diff1 value: 37.4893 - type: nauc_map_at_5_max value: 31.7755 - type: nauc_map_at_5_std value: -1.1461999999999999 - type: nauc_map_at_5_diff1 value: 36.8624 - type: nauc_map_at_10_max value: 31.9842 - type: nauc_map_at_10_std value: -0.6542 - type: nauc_map_at_10_diff1 value: 36.5911 - type: nauc_map_at_20_max value: 32.1745 - type: nauc_map_at_20_std value: -0.2191 - type: nauc_map_at_20_diff1 value: 36.552800000000005 - type: nauc_map_at_100_max value: 32.3001 - type: nauc_map_at_100_std value: 0.012199999999999999 - type: nauc_map_at_100_diff1 value: 36.5376 - type: nauc_map_at_1000_max value: 32.3571 - type: nauc_map_at_1000_std value: 0.0557 - type: nauc_map_at_1000_diff1 value: 36.5535 - type: nauc_recall_at_1_max value: 30.2696 - type: nauc_recall_at_1_std value: -3.3264 - type: nauc_recall_at_1_diff1 value: 42.0066 - type: nauc_recall_at_3_max value: 30.413600000000002 - type: nauc_recall_at_3_std value: -0.44530000000000003 - type: nauc_recall_at_3_diff1 value: 32.3805 - type: nauc_recall_at_5_max value: 30.075499999999998 - type: nauc_recall_at_5_std value: 1.8853000000000002 - type: nauc_recall_at_5_diff1 value: 29.8885 - type: nauc_recall_at_10_max value: 29.7039 - type: nauc_recall_at_10_std value: 4.1936 - type: nauc_recall_at_10_diff1 value: 27.9912 - type: nauc_recall_at_20_max value: 30.538700000000002 - type: nauc_recall_at_20_std value: 7.8352 - type: nauc_recall_at_20_diff1 value: 26.842 - type: nauc_recall_at_100_max value: 30.8116 - type: nauc_recall_at_100_std value: 15.1426 - type: nauc_recall_at_100_diff1 value: 23.9166 - type: nauc_recall_at_1000_max value: 31.9647 - type: nauc_recall_at_1000_std value: 26.5754 - type: nauc_recall_at_1000_diff1 value: 22.608 - type: nauc_precision_at_1_max value: 32.959500000000006 - type: nauc_precision_at_1_std value: -2.0082 - type: nauc_precision_at_1_diff1 value: 41.801500000000004 - type: nauc_precision_at_3_max value: 34.8709 - type: nauc_precision_at_3_std value: 1.5288 - type: nauc_precision_at_3_diff1 value: 30.6782 - type: nauc_precision_at_5_max value: 34.163700000000006 - type: nauc_precision_at_5_std value: 4.3446 - type: nauc_precision_at_5_diff1 value: 26.2964 - type: nauc_precision_at_10_max value: 33.1747 - type: nauc_precision_at_10_std value: 7.2109000000000005 - type: nauc_precision_at_10_diff1 value: 22.6126 - type: nauc_precision_at_20_max value: 32.8185 - type: nauc_precision_at_20_std value: 11.296100000000001 - type: nauc_precision_at_20_diff1 value: 19.4086 - type: nauc_precision_at_100_max value: 30.4363 - type: nauc_precision_at_100_std value: 14.23 - type: nauc_precision_at_100_diff1 value: 13.1689 - type: nauc_precision_at_1000_max value: 24.6263 - type: nauc_precision_at_1000_std value: 11.190999999999999 - type: nauc_precision_at_1000_diff1 value: 4.5375 - type: nauc_mrr_at_1_max value: 32.959500000000006 - type: nauc_mrr_at_1_std value: -2.0082 - type: nauc_mrr_at_1_diff1 value: 41.801500000000004 - type: nauc_mrr_at_3_max value: 33.949400000000004 - type: nauc_mrr_at_3_std value: -0.5342 - type: nauc_mrr_at_3_diff1 value: 37.3148 - type: nauc_mrr_at_5_max value: 33.7685 - type: nauc_mrr_at_5_std value: 0.2542 - type: nauc_mrr_at_5_diff1 value: 36.5632 - type: nauc_mrr_at_10_max value: 33.849000000000004 - type: nauc_mrr_at_10_std value: 0.6677 - type: nauc_mrr_at_10_diff1 value: 36.4741 - type: nauc_mrr_at_20_max value: 33.9586 - type: nauc_mrr_at_20_std value: 0.897 - type: nauc_mrr_at_20_diff1 value: 36.478899999999996 - type: nauc_mrr_at_100_max value: 33.9441 - type: nauc_mrr_at_100_std value: 0.9808000000000001 - type: nauc_mrr_at_100_diff1 value: 36.5049 - type: nauc_mrr_at_1000_max value: 33.9546 - type: nauc_mrr_at_1000_std value: 0.9831 - type: nauc_mrr_at_1000_diff1 value: 36.5259 - type: main_score value: 31.775 - task: type: Retrieval dataset: name: MTEB CQADupstackUnixRetrieval (default) type: mteb/cqadupstack-unix config: default split: test revision: 6c6430d3a6d36f8d2a829195bc5dc94d7e063e53 metrics: - type: ndcg_at_1 value: 34.981 - type: ndcg_at_3 value: 40.107 - type: ndcg_at_5 value: 42.842999999999996 - type: ndcg_at_10 value: 45.275 - type: ndcg_at_20 value: 47.455999999999996 - type: ndcg_at_100 value: 50.321000000000005 - type: ndcg_at_1000 value: 52.406 - type: map_at_1 value: 29.504 - type: map_at_3 value: 36.622 - type: map_at_5 value: 38.541 - type: map_at_10 value: 39.675 - type: map_at_20 value: 40.409 - type: map_at_100 value: 40.914 - type: map_at_1000 value: 41.012 - type: recall_at_1 value: 29.504 - type: recall_at_3 value: 43.807 - type: recall_at_5 value: 50.77700000000001 - type: recall_at_10 value: 57.898 - type: recall_at_20 value: 65.59899999999999 - type: recall_at_100 value: 78.974 - type: recall_at_1000 value: 93.33399999999999 - type: precision_at_1 value: 34.981 - type: precision_at_3 value: 18.315 - type: precision_at_5 value: 13.097 - type: precision_at_10 value: 7.631 - type: precision_at_20 value: 4.431 - type: precision_at_100 value: 1.13 - type: precision_at_1000 value: 0.14100000000000001 - type: mrr_at_1 value: 34.9813 - type: mrr_at_3 value: 41.3557 - type: mrr_at_5 value: 42.9602 - type: mrr_at_10 value: 43.9816 - type: mrr_at_20 value: 44.5 - type: mrr_at_100 value: 44.8076 - type: mrr_at_1000 value: 44.865 - type: nauc_ndcg_at_1_max value: 48.6102 - type: nauc_ndcg_at_1_std value: -5.6691 - type: nauc_ndcg_at_1_diff1 value: 56.008599999999994 - type: nauc_ndcg_at_3_max value: 46.388400000000004 - type: nauc_ndcg_at_3_std value: -4.877800000000001 - type: nauc_ndcg_at_3_diff1 value: 49.1768 - type: nauc_ndcg_at_5_max value: 46.3438 - type: nauc_ndcg_at_5_std value: -4.1069 - type: nauc_ndcg_at_5_diff1 value: 48.209999999999994 - type: nauc_ndcg_at_10_max value: 46.147 - type: nauc_ndcg_at_10_std value: -3.7115 - type: nauc_ndcg_at_10_diff1 value: 47.9846 - type: nauc_ndcg_at_20_max value: 46.2731 - type: nauc_ndcg_at_20_std value: -3.5068 - type: nauc_ndcg_at_20_diff1 value: 48.1901 - type: nauc_ndcg_at_100_max value: 46.886 - type: nauc_ndcg_at_100_std value: -1.8507 - type: nauc_ndcg_at_100_diff1 value: 49.058 - type: nauc_ndcg_at_1000_max value: 46.5984 - type: nauc_ndcg_at_1000_std value: -2.1614999999999998 - type: nauc_ndcg_at_1000_diff1 value: 49.1318 - type: nauc_map_at_1_max value: 45.5569 - type: nauc_map_at_1_std value: -7.604900000000001 - type: nauc_map_at_1_diff1 value: 56.3936 - type: nauc_map_at_3_max value: 46.0028 - type: nauc_map_at_3_std value: -6.334 - type: nauc_map_at_3_diff1 value: 51.3472 - type: nauc_map_at_5_max value: 46.2903 - type: nauc_map_at_5_std value: -5.475300000000001 - type: nauc_map_at_5_diff1 value: 50.5945 - type: nauc_map_at_10_max value: 46.3277 - type: nauc_map_at_10_std value: -5.1829 - type: nauc_map_at_10_diff1 value: 50.4714 - type: nauc_map_at_20_max value: 46.5326 - type: nauc_map_at_20_std value: -5.0456 - type: nauc_map_at_20_diff1 value: 50.5729 - type: nauc_map_at_100_max value: 46.6537 - type: nauc_map_at_100_std value: -4.7367 - type: nauc_map_at_100_diff1 value: 50.711 - type: nauc_map_at_1000_max value: 46.6406 - type: nauc_map_at_1000_std value: -4.7269 - type: nauc_map_at_1000_diff1 value: 50.6985 - type: nauc_recall_at_1_max value: 45.5569 - type: nauc_recall_at_1_std value: -7.604900000000001 - type: nauc_recall_at_1_diff1 value: 56.3936 - type: nauc_recall_at_3_max value: 43.1624 - type: nauc_recall_at_3_std value: -5.0664 - type: nauc_recall_at_3_diff1 value: 44.016 - type: nauc_recall_at_5_max value: 42.893 - type: nauc_recall_at_5_std value: -2.0581 - type: nauc_recall_at_5_diff1 value: 40.6813 - type: nauc_recall_at_10_max value: 41.3464 - type: nauc_recall_at_10_std value: -0.9026 - type: nauc_recall_at_10_diff1 value: 38.8716 - type: nauc_recall_at_20_max value: 40.7766 - type: nauc_recall_at_20_std value: -0.4664 - type: nauc_recall_at_20_diff1 value: 38.6801 - type: nauc_recall_at_100_max value: 43.856 - type: nauc_recall_at_100_std value: 12.148200000000001 - type: nauc_recall_at_100_diff1 value: 43.189899999999994 - type: nauc_recall_at_1000_max value: 36.6555 - type: nauc_recall_at_1000_std value: 25.7409 - type: nauc_recall_at_1000_diff1 value: 44.9133 - type: nauc_precision_at_1_max value: 48.6102 - type: nauc_precision_at_1_std value: -5.6691 - type: nauc_precision_at_1_diff1 value: 56.008599999999994 - type: nauc_precision_at_3_max value: 43.2148 - type: nauc_precision_at_3_std value: 0.0292 - type: nauc_precision_at_3_diff1 value: 35.75 - type: nauc_precision_at_5_max value: 39.8562 - type: nauc_precision_at_5_std value: 4.105 - type: nauc_precision_at_5_diff1 value: 28.4213 - type: nauc_precision_at_10_max value: 34.901199999999996 - type: nauc_precision_at_10_std value: 6.4718 - type: nauc_precision_at_10_diff1 value: 22.785 - type: nauc_precision_at_20_max value: 29.151 - type: nauc_precision_at_20_std value: 8.213 - type: nauc_precision_at_20_diff1 value: 16.6992 - type: nauc_precision_at_100_max value: 17.1377 - type: nauc_precision_at_100_std value: 16.1652 - type: nauc_precision_at_100_diff1 value: 4.4657 - type: nauc_precision_at_1000_max value: -2.6889 - type: nauc_precision_at_1000_std value: 11.010499999999999 - type: nauc_precision_at_1000_diff1 value: -11.0026 - type: nauc_mrr_at_1_max value: 48.6102 - type: nauc_mrr_at_1_std value: -5.6691 - type: nauc_mrr_at_1_diff1 value: 56.008599999999994 - type: nauc_mrr_at_3_max value: 47.6571 - type: nauc_mrr_at_3_std value: -4.1072999999999995 - type: nauc_mrr_at_3_diff1 value: 50.18470000000001 - type: nauc_mrr_at_5_max value: 47.6268 - type: nauc_mrr_at_5_std value: -3.6222 - type: nauc_mrr_at_5_diff1 value: 49.5854 - type: nauc_mrr_at_10_max value: 47.454499999999996 - type: nauc_mrr_at_10_std value: -3.4977 - type: nauc_mrr_at_10_diff1 value: 49.5833 - type: nauc_mrr_at_20_max value: 47.3316 - type: nauc_mrr_at_20_std value: -3.5721000000000003 - type: nauc_mrr_at_20_diff1 value: 49.6713 - type: nauc_mrr_at_100_max value: 47.387299999999996 - type: nauc_mrr_at_100_std value: -3.4835 - type: nauc_mrr_at_100_diff1 value: 49.8135 - type: nauc_mrr_at_1000_max value: 47.4002 - type: nauc_mrr_at_1000_std value: -3.4842999999999997 - type: nauc_mrr_at_1000_diff1 value: 49.8286 - type: main_score value: 45.275 - task: type: Retrieval dataset: name: MTEB CQADupstackWebmastersRetrieval (default) type: mteb/cqadupstack-webmasters config: default split: test revision: 160c094312a0e1facb97e55eeddb698c0abe3571 metrics: - type: ndcg_at_1 value: 32.806000000000004 - type: ndcg_at_3 value: 38.775999999999996 - type: ndcg_at_5 value: 40.614 - type: ndcg_at_10 value: 42.957 - type: ndcg_at_20 value: 45.202999999999996 - type: ndcg_at_100 value: 48.941 - type: ndcg_at_1000 value: 51.105000000000004 - type: map_at_1 value: 27.236 - type: map_at_3 value: 34.204 - type: map_at_5 value: 35.66 - type: map_at_10 value: 36.986000000000004 - type: map_at_20 value: 37.827 - type: map_at_100 value: 38.602 - type: map_at_1000 value: 38.818000000000005 - type: recall_at_1 value: 27.236 - type: recall_at_3 value: 41.596 - type: recall_at_5 value: 46.947 - type: recall_at_10 value: 54.129000000000005 - type: recall_at_20 value: 62.641000000000005 - type: recall_at_100 value: 80.971 - type: recall_at_1000 value: 93.98100000000001 - type: precision_at_1 value: 32.806000000000004 - type: precision_at_3 value: 18.445 - type: precision_at_5 value: 13.083 - type: precision_at_10 value: 8.142000000000001 - type: precision_at_20 value: 5.119 - type: precision_at_100 value: 1.599 - type: precision_at_1000 value: 0.244 - type: mrr_at_1 value: 32.8063 - type: mrr_at_3 value: 39.5257 - type: mrr_at_5 value: 40.8399 - type: mrr_at_10 value: 41.8107 - type: mrr_at_20 value: 42.4012 - type: mrr_at_100 value: 42.7919 - type: mrr_at_1000 value: 42.8261 - type: nauc_ndcg_at_1_max value: 49.2838 - type: nauc_ndcg_at_1_std value: 8.713799999999999 - type: nauc_ndcg_at_1_diff1 value: 48.2777 - type: nauc_ndcg_at_3_max value: 44.4031 - type: nauc_ndcg_at_3_std value: 11.4725 - type: nauc_ndcg_at_3_diff1 value: 41.5639 - type: nauc_ndcg_at_5_max value: 44.452999999999996 - type: nauc_ndcg_at_5_std value: 11.9373 - type: nauc_ndcg_at_5_diff1 value: 41.977199999999996 - type: nauc_ndcg_at_10_max value: 44.8695 - type: nauc_ndcg_at_10_std value: 13.6193 - type: nauc_ndcg_at_10_diff1 value: 41.665 - type: nauc_ndcg_at_20_max value: 45.691900000000004 - type: nauc_ndcg_at_20_std value: 14.0959 - type: nauc_ndcg_at_20_diff1 value: 42.2414 - type: nauc_ndcg_at_100_max value: 45.7442 - type: nauc_ndcg_at_100_std value: 15.218699999999998 - type: nauc_ndcg_at_100_diff1 value: 41.7288 - type: nauc_ndcg_at_1000_max value: 46.788000000000004 - type: nauc_ndcg_at_1000_std value: 15.409900000000002 - type: nauc_ndcg_at_1000_diff1 value: 41.9824 - type: nauc_map_at_1_max value: 48.0334 - type: nauc_map_at_1_std value: 8.0125 - type: nauc_map_at_1_diff1 value: 53.4579 - type: nauc_map_at_3_max value: 45.1289 - type: nauc_map_at_3_std value: 10.013 - type: nauc_map_at_3_diff1 value: 45.51 - type: nauc_map_at_5_max value: 45.3494 - type: nauc_map_at_5_std value: 10.0348 - type: nauc_map_at_5_diff1 value: 45.3972 - type: nauc_map_at_10_max value: 45.8378 - type: nauc_map_at_10_std value: 11.3299 - type: nauc_map_at_10_diff1 value: 44.8933 - type: nauc_map_at_20_max value: 46.156000000000006 - type: nauc_map_at_20_std value: 11.8154 - type: nauc_map_at_20_diff1 value: 44.6615 - type: nauc_map_at_100_max value: 46.1188 - type: nauc_map_at_100_std value: 12.3635 - type: nauc_map_at_100_diff1 value: 44.5946 - type: nauc_map_at_1000_max value: 46.1113 - type: nauc_map_at_1000_std value: 12.526599999999998 - type: nauc_map_at_1000_diff1 value: 44.595400000000005 - type: nauc_recall_at_1_max value: 48.0334 - type: nauc_recall_at_1_std value: 8.0125 - type: nauc_recall_at_1_diff1 value: 53.4579 - type: nauc_recall_at_3_max value: 39.3688 - type: nauc_recall_at_3_std value: 10.3834 - type: nauc_recall_at_3_diff1 value: 37.8084 - type: nauc_recall_at_5_max value: 39.3184 - type: nauc_recall_at_5_std value: 10.509400000000001 - type: nauc_recall_at_5_diff1 value: 36.7191 - type: nauc_recall_at_10_max value: 38.785599999999995 - type: nauc_recall_at_10_std value: 15.781300000000002 - type: nauc_recall_at_10_diff1 value: 34.7564 - type: nauc_recall_at_20_max value: 39.6075 - type: nauc_recall_at_20_std value: 18.0278 - type: nauc_recall_at_20_diff1 value: 35.483399999999996 - type: nauc_recall_at_100_max value: 36.1361 - type: nauc_recall_at_100_std value: 29.1037 - type: nauc_recall_at_100_diff1 value: 26.9486 - type: nauc_recall_at_1000_max value: 62.4461 - type: nauc_recall_at_1000_std value: 57.465599999999995 - type: nauc_recall_at_1000_diff1 value: 29.5554 - type: nauc_precision_at_1_max value: 49.2838 - type: nauc_precision_at_1_std value: 8.713799999999999 - type: nauc_precision_at_1_diff1 value: 48.2777 - type: nauc_precision_at_3_max value: 36.4572 - type: nauc_precision_at_3_std value: 14.3924 - type: nauc_precision_at_3_diff1 value: 22.9406 - type: nauc_precision_at_5_max value: 32.5803 - type: nauc_precision_at_5_std value: 16.4452 - type: nauc_precision_at_5_diff1 value: 18.2745 - type: nauc_precision_at_10_max value: 27.3789 - type: nauc_precision_at_10_std value: 21.0131 - type: nauc_precision_at_10_diff1 value: 6.947399999999999 - type: nauc_precision_at_20_max value: 22.8404 - type: nauc_precision_at_20_std value: 24.6328 - type: nauc_precision_at_20_diff1 value: 0.1601 - type: nauc_precision_at_100_max value: 2.6098 - type: nauc_precision_at_100_std value: 22.3326 - type: nauc_precision_at_100_diff1 value: -10.1755 - type: nauc_precision_at_1000_max value: -6.730899999999999 - type: nauc_precision_at_1000_std value: 18.262900000000002 - type: nauc_precision_at_1000_diff1 value: -16.3364 - type: nauc_mrr_at_1_max value: 49.2838 - type: nauc_mrr_at_1_std value: 8.713799999999999 - type: nauc_mrr_at_1_diff1 value: 48.2777 - type: nauc_mrr_at_3_max value: 45.8613 - type: nauc_mrr_at_3_std value: 10.4584 - type: nauc_mrr_at_3_diff1 value: 42.2388 - type: nauc_mrr_at_5_max value: 46.1544 - type: nauc_mrr_at_5_std value: 11.1434 - type: nauc_mrr_at_5_diff1 value: 42.2252 - type: nauc_mrr_at_10_max value: 46.2703 - type: nauc_mrr_at_10_std value: 11.7714 - type: nauc_mrr_at_10_diff1 value: 42.0821 - type: nauc_mrr_at_20_max value: 46.4586 - type: nauc_mrr_at_20_std value: 11.9329 - type: nauc_mrr_at_20_diff1 value: 42.3199 - type: nauc_mrr_at_100_max value: 46.4309 - type: nauc_mrr_at_100_std value: 11.9458 - type: nauc_mrr_at_100_diff1 value: 42.2902 - type: nauc_mrr_at_1000_max value: 46.4392 - type: nauc_mrr_at_1000_std value: 11.9269 - type: nauc_mrr_at_1000_diff1 value: 42.3078 - type: main_score value: 42.957 - task: type: Retrieval dataset: name: MTEB CQADupstackWordpressRetrieval (default) type: mteb/cqadupstack-wordpress config: default split: test revision: 4ffe81d471b1924886b33c7567bfb200e9eec5c4 metrics: - type: ndcg_at_1 value: 25.692999999999998 - type: ndcg_at_3 value: 31.375999999999998 - type: ndcg_at_5 value: 33.617999999999995 - type: ndcg_at_10 value: 36.409000000000006 - type: ndcg_at_20 value: 38.5 - type: ndcg_at_100 value: 41.614000000000004 - type: ndcg_at_1000 value: 44.119 - type: map_at_1 value: 23.666 - type: map_at_3 value: 29.072 - type: map_at_5 value: 30.453999999999997 - type: map_at_10 value: 31.673000000000002 - type: map_at_20 value: 32.256 - type: map_at_100 value: 32.721000000000004 - type: map_at_1000 value: 32.82 - type: recall_at_1 value: 23.666 - type: recall_at_3 value: 35.693000000000005 - type: recall_at_5 value: 40.937 - type: recall_at_10 value: 48.979 - type: recall_at_20 value: 57.028999999999996 - type: recall_at_100 value: 72.80799999999999 - type: recall_at_1000 value: 91.546 - type: precision_at_1 value: 25.692999999999998 - type: precision_at_3 value: 13.123999999999999 - type: precision_at_5 value: 9.279 - type: precision_at_10 value: 5.712 - type: precision_at_20 value: 3.3360000000000003 - type: precision_at_100 value: 0.8869999999999999 - type: precision_at_1000 value: 0.122 - type: mrr_at_1 value: 25.6932 - type: mrr_at_3 value: 31.2693 - type: mrr_at_5 value: 32.4522 - type: mrr_at_10 value: 33.6496 - type: mrr_at_20 value: 34.208 - type: mrr_at_100 value: 34.6132 - type: mrr_at_1000 value: 34.6794 - type: nauc_ndcg_at_1_max value: 30.436400000000003 - type: nauc_ndcg_at_1_std value: -5.177099999999999 - type: nauc_ndcg_at_1_diff1 value: 38.9465 - type: nauc_ndcg_at_3_max value: 27.759600000000002 - type: nauc_ndcg_at_3_std value: -3.7716 - type: nauc_ndcg_at_3_diff1 value: 32.0374 - type: nauc_ndcg_at_5_max value: 29.284399999999998 - type: nauc_ndcg_at_5_std value: -2.1555999999999997 - type: nauc_ndcg_at_5_diff1 value: 31.2735 - type: nauc_ndcg_at_10_max value: 27.4811 - type: nauc_ndcg_at_10_std value: -2.3712 - type: nauc_ndcg_at_10_diff1 value: 30.5165 - type: nauc_ndcg_at_20_max value: 28.385899999999996 - type: nauc_ndcg_at_20_std value: -0.7358 - type: nauc_ndcg_at_20_diff1 value: 30.5901 - type: nauc_ndcg_at_100_max value: 29.6634 - type: nauc_ndcg_at_100_std value: 0.6082 - type: nauc_ndcg_at_100_diff1 value: 30.455 - type: nauc_ndcg_at_1000_max value: 29.316 - type: nauc_ndcg_at_1000_std value: 0.8039 - type: nauc_ndcg_at_1000_diff1 value: 30.406699999999997 - type: nauc_map_at_1_max value: 28.618900000000004 - type: nauc_map_at_1_std value: -5.8273 - type: nauc_map_at_1_diff1 value: 39.6434 - type: nauc_map_at_3_max value: 27.3257 - type: nauc_map_at_3_std value: -4.8353 - type: nauc_map_at_3_diff1 value: 33.9743 - type: nauc_map_at_5_max value: 28.5433 - type: nauc_map_at_5_std value: -3.7222 - type: nauc_map_at_5_diff1 value: 33.360099999999996 - type: nauc_map_at_10_max value: 27.972399999999997 - type: nauc_map_at_10_std value: -3.565 - type: nauc_map_at_10_diff1 value: 32.9863 - type: nauc_map_at_20_max value: 28.2615 - type: nauc_map_at_20_std value: -3.1113 - type: nauc_map_at_20_diff1 value: 32.9793 - type: nauc_map_at_100_max value: 28.540300000000002 - type: nauc_map_at_100_std value: -2.7937 - type: nauc_map_at_100_diff1 value: 32.9581 - type: nauc_map_at_1000_max value: 28.5349 - type: nauc_map_at_1000_std value: -2.7701 - type: nauc_map_at_1000_diff1 value: 32.939299999999996 - type: nauc_recall_at_1_max value: 28.618900000000004 - type: nauc_recall_at_1_std value: -5.8273 - type: nauc_recall_at_1_diff1 value: 39.6434 - type: nauc_recall_at_3_max value: 25.120199999999997 - type: nauc_recall_at_3_std value: -3.4718 - type: nauc_recall_at_3_diff1 value: 27.233200000000004 - type: nauc_recall_at_5_max value: 28.6985 - type: nauc_recall_at_5_std value: 0.1915 - type: nauc_recall_at_5_diff1 value: 25.533299999999997 - type: nauc_recall_at_10_max value: 23.3717 - type: nauc_recall_at_10_std value: -0.9587999999999999 - type: nauc_recall_at_10_diff1 value: 23.8178 - type: nauc_recall_at_20_max value: 25.923800000000004 - type: nauc_recall_at_20_std value: 5.4661 - type: nauc_recall_at_20_diff1 value: 23.4099 - type: nauc_recall_at_100_max value: 32.182500000000005 - type: nauc_recall_at_100_std value: 14.696200000000001 - type: nauc_recall_at_100_diff1 value: 20.6716 - type: nauc_recall_at_1000_max value: 31.512400000000003 - type: nauc_recall_at_1000_std value: 42.5301 - type: nauc_recall_at_1000_diff1 value: 10.7694 - type: nauc_precision_at_1_max value: 30.436400000000003 - type: nauc_precision_at_1_std value: -5.177099999999999 - type: nauc_precision_at_1_diff1 value: 38.9465 - type: nauc_precision_at_3_max value: 29.1341 - type: nauc_precision_at_3_std value: -0.1582 - type: nauc_precision_at_3_diff1 value: 25.872600000000002 - type: nauc_precision_at_5_max value: 32.7748 - type: nauc_precision_at_5_std value: 4.798100000000001 - type: nauc_precision_at_5_diff1 value: 21.712400000000002 - type: nauc_precision_at_10_max value: 27.396700000000003 - type: nauc_precision_at_10_std value: 6.6187 - type: nauc_precision_at_10_diff1 value: 16.292499999999997 - type: nauc_precision_at_20_max value: 29.6999 - type: nauc_precision_at_20_std value: 12.6113 - type: nauc_precision_at_20_diff1 value: 14.616399999999999 - type: nauc_precision_at_100_max value: 29.297099999999997 - type: nauc_precision_at_100_std value: 20.9722 - type: nauc_precision_at_100_diff1 value: 1.6410999999999998 - type: nauc_precision_at_1000_max value: 2.7286 - type: nauc_precision_at_1000_std value: 14.837200000000001 - type: nauc_precision_at_1000_diff1 value: -21.584500000000002 - type: nauc_mrr_at_1_max value: 30.436400000000003 - type: nauc_mrr_at_1_std value: -5.177099999999999 - type: nauc_mrr_at_1_diff1 value: 38.9465 - type: nauc_mrr_at_3_max value: 29.766199999999998 - type: nauc_mrr_at_3_std value: -3.0375 - type: nauc_mrr_at_3_diff1 value: 33.568599999999996 - type: nauc_mrr_at_5_max value: 30.4582 - type: nauc_mrr_at_5_std value: -2.0233 - type: nauc_mrr_at_5_diff1 value: 33.1478 - type: nauc_mrr_at_10_max value: 29.3877 - type: nauc_mrr_at_10_std value: -2.3752 - type: nauc_mrr_at_10_diff1 value: 32.5597 - type: nauc_mrr_at_20_max value: 29.631400000000003 - type: nauc_mrr_at_20_std value: -1.9325999999999999 - type: nauc_mrr_at_20_diff1 value: 32.6145 - type: nauc_mrr_at_100_max value: 29.7106 - type: nauc_mrr_at_100_std value: -1.8483 - type: nauc_mrr_at_100_diff1 value: 32.624900000000004 - type: nauc_mrr_at_1000_max value: 29.7099 - type: nauc_mrr_at_1000_std value: -1.8341 - type: nauc_mrr_at_1000_diff1 value: 32.6251 - type: main_score value: 36.409000000000006 - task: type: Retrieval dataset: name: MTEB ClimateFEVER (default) type: mteb/climate-fever config: default split: test revision: 47f2ac6acb640fc46020b02a5b59fdda04d39380 metrics: - type: ndcg_at_1 value: 26.971 - type: ndcg_at_3 value: 24.196 - type: ndcg_at_5 value: 25.811 - type: ndcg_at_10 value: 29.494 - type: ndcg_at_20 value: 32.013999999999996 - type: ndcg_at_100 value: 35.989 - type: ndcg_at_1000 value: 39.326 - type: map_at_1 value: 12.107 - type: map_at_3 value: 17.538 - type: map_at_5 value: 19.124 - type: map_at_10 value: 20.896 - type: map_at_20 value: 21.798000000000002 - type: map_at_100 value: 22.567 - type: map_at_1000 value: 22.746 - type: recall_at_1 value: 12.107 - type: recall_at_3 value: 22.425 - type: recall_at_5 value: 27.394000000000002 - type: recall_at_10 value: 35.57 - type: recall_at_20 value: 42.565 - type: recall_at_100 value: 57.708000000000006 - type: recall_at_1000 value: 76.673 - type: precision_at_1 value: 26.971 - type: precision_at_3 value: 18.111 - type: precision_at_5 value: 13.694 - type: precision_at_10 value: 9.303 - type: precision_at_20 value: 5.769 - type: precision_at_100 value: 1.6320000000000001 - type: precision_at_1000 value: 0.22499999999999998 - type: mrr_at_1 value: 26.970699999999997 - type: mrr_at_3 value: 36.0478 - type: mrr_at_5 value: 37.9598 - type: mrr_at_10 value: 39.4286 - type: mrr_at_20 value: 39.9242 - type: mrr_at_100 value: 40.232600000000005 - type: mrr_at_1000 value: 40.2711 - type: nauc_ndcg_at_1_max value: 30.1498 - type: nauc_ndcg_at_1_std value: 9.795 - type: nauc_ndcg_at_1_diff1 value: 28.3202 - type: nauc_ndcg_at_3_max value: 36.1507 - type: nauc_ndcg_at_3_std value: 16.6918 - type: nauc_ndcg_at_3_diff1 value: 25.9179 - type: nauc_ndcg_at_5_max value: 38.4314 - type: nauc_ndcg_at_5_std value: 19.1236 - type: nauc_ndcg_at_5_diff1 value: 25.7315 - type: nauc_ndcg_at_10_max value: 39.734 - type: nauc_ndcg_at_10_std value: 22.795199999999998 - type: nauc_ndcg_at_10_diff1 value: 24.5446 - type: nauc_ndcg_at_20_max value: 40.0306 - type: nauc_ndcg_at_20_std value: 25.0242 - type: nauc_ndcg_at_20_diff1 value: 23.7608 - type: nauc_ndcg_at_100_max value: 39.881 - type: nauc_ndcg_at_100_std value: 26.8935 - type: nauc_ndcg_at_100_diff1 value: 23.366600000000002 - type: nauc_ndcg_at_1000_max value: 39.6299 - type: nauc_ndcg_at_1000_std value: 27.556000000000004 - type: nauc_ndcg_at_1000_diff1 value: 23.4406 - type: nauc_map_at_1_max value: 36.033500000000004 - type: nauc_map_at_1_std value: 9.3902 - type: nauc_map_at_1_diff1 value: 33.3389 - type: nauc_map_at_3_max value: 38.2772 - type: nauc_map_at_3_std value: 14.862 - type: nauc_map_at_3_diff1 value: 29.121799999999997 - type: nauc_map_at_5_max value: 38.8901 - type: nauc_map_at_5_std value: 16.4551 - type: nauc_map_at_5_diff1 value: 28.258499999999998 - type: nauc_map_at_10_max value: 39.689099999999996 - type: nauc_map_at_10_std value: 19.0082 - type: nauc_map_at_10_diff1 value: 27.5292 - type: nauc_map_at_20_max value: 39.8114 - type: nauc_map_at_20_std value: 20.099700000000002 - type: nauc_map_at_20_diff1 value: 27.1249 - type: nauc_map_at_100_max value: 39.7759 - type: nauc_map_at_100_std value: 20.671400000000002 - type: nauc_map_at_100_diff1 value: 26.9515 - type: nauc_map_at_1000_max value: 39.7635 - type: nauc_map_at_1000_std value: 20.7381 - type: nauc_map_at_1000_diff1 value: 26.9318 - type: nauc_recall_at_1_max value: 36.033500000000004 - type: nauc_recall_at_1_std value: 9.3902 - type: nauc_recall_at_1_diff1 value: 33.3389 - type: nauc_recall_at_3_max value: 37.040099999999995 - type: nauc_recall_at_3_std value: 18.421000000000003 - type: nauc_recall_at_3_diff1 value: 23.591 - type: nauc_recall_at_5_max value: 38.2483 - type: nauc_recall_at_5_std value: 21.9791 - type: nauc_recall_at_5_diff1 value: 20.9432 - type: nauc_recall_at_10_max value: 38.684400000000004 - type: nauc_recall_at_10_std value: 27.528000000000002 - type: nauc_recall_at_10_diff1 value: 17.874599999999997 - type: nauc_recall_at_20_max value: 37.7408 - type: nauc_recall_at_20_std value: 31.178800000000003 - type: nauc_recall_at_20_diff1 value: 15.3021 - type: nauc_recall_at_100_max value: 35.0668 - type: nauc_recall_at_100_std value: 35.8934 - type: nauc_recall_at_100_diff1 value: 12.0978 - type: nauc_recall_at_1000_max value: 33.2113 - type: nauc_recall_at_1000_std value: 44.3165 - type: nauc_recall_at_1000_diff1 value: 9.6011 - type: nauc_precision_at_1_max value: 30.1498 - type: nauc_precision_at_1_std value: 9.795 - type: nauc_precision_at_1_diff1 value: 28.3202 - type: nauc_precision_at_3_max value: 32.1047 - type: nauc_precision_at_3_std value: 20.7027 - type: nauc_precision_at_3_diff1 value: 18.3366 - type: nauc_precision_at_5_max value: 32.9484 - type: nauc_precision_at_5_std value: 24.439700000000002 - type: nauc_precision_at_5_diff1 value: 16.3709 - type: nauc_precision_at_10_max value: 30.626900000000003 - type: nauc_precision_at_10_std value: 30.3335 - type: nauc_precision_at_10_diff1 value: 10.4378 - type: nauc_precision_at_20_max value: 26.875100000000003 - type: nauc_precision_at_20_std value: 33.1578 - type: nauc_precision_at_20_diff1 value: 6.3161 - type: nauc_precision_at_100_max value: 18.5691 - type: nauc_precision_at_100_std value: 32.4294 - type: nauc_precision_at_100_diff1 value: 1.9001000000000001 - type: nauc_precision_at_1000_max value: 5.2522 - type: nauc_precision_at_1000_std value: 26.337899999999998 - type: nauc_precision_at_1000_diff1 value: -4.2309 - type: nauc_mrr_at_1_max value: 30.1498 - type: nauc_mrr_at_1_std value: 9.795 - type: nauc_mrr_at_1_diff1 value: 28.3202 - type: nauc_mrr_at_3_max value: 32.2466 - type: nauc_mrr_at_3_std value: 15.6475 - type: nauc_mrr_at_3_diff1 value: 24.160899999999998 - type: nauc_mrr_at_5_max value: 33.1837 - type: nauc_mrr_at_5_std value: 16.8917 - type: nauc_mrr_at_5_diff1 value: 24.072499999999998 - type: nauc_mrr_at_10_max value: 33.576 - type: nauc_mrr_at_10_std value: 17.4501 - type: nauc_mrr_at_10_diff1 value: 23.9826 - type: nauc_mrr_at_20_max value: 33.5003 - type: nauc_mrr_at_20_std value: 17.5104 - type: nauc_mrr_at_20_diff1 value: 23.9237 - type: nauc_mrr_at_100_max value: 33.455200000000005 - type: nauc_mrr_at_100_std value: 17.5181 - type: nauc_mrr_at_100_diff1 value: 23.9598 - type: nauc_mrr_at_1000_max value: 33.4473 - type: nauc_mrr_at_1000_std value: 17.4969 - type: nauc_mrr_at_1000_diff1 value: 23.974899999999998 - type: main_score value: 29.494 - task: type: Retrieval dataset: name: MTEB CodeFeedbackMT (default) type: CoIR-Retrieval/codefeedback-mt config: default split: test revision: b0f12fa0c0dd67f59c95a5c33d02aeeb4c398c5f metrics: - type: ndcg_at_1 value: 21.044 - type: ndcg_at_3 value: 27.134999999999998 - type: ndcg_at_5 value: 29.205 - type: ndcg_at_10 value: 31.391999999999996 - type: ndcg_at_20 value: 33.031 - type: ndcg_at_100 value: 35.852000000000004 - type: ndcg_at_1000 value: 38.076 - type: map_at_1 value: 21.044 - type: map_at_3 value: 25.637 - type: map_at_5 value: 26.779999999999998 - type: map_at_10 value: 27.683000000000003 - type: map_at_20 value: 28.133999999999997 - type: map_at_100 value: 28.510999999999996 - type: map_at_1000 value: 28.588 - type: recall_at_1 value: 21.044 - type: recall_at_3 value: 31.468 - type: recall_at_5 value: 36.522 - type: recall_at_10 value: 43.278 - type: recall_at_20 value: 49.748 - type: recall_at_100 value: 65.16499999999999 - type: recall_at_1000 value: 83.031 - type: precision_at_1 value: 21.044 - type: precision_at_3 value: 10.488999999999999 - type: precision_at_5 value: 7.303999999999999 - type: precision_at_10 value: 4.328 - type: precision_at_20 value: 2.487 - type: precision_at_100 value: 0.652 - type: precision_at_1000 value: 0.083 - type: mrr_at_1 value: 21.043899999999997 - type: mrr_at_3 value: 25.6371 - type: mrr_at_5 value: 26.7796 - type: mrr_at_10 value: 27.6831 - type: mrr_at_20 value: 28.1344 - type: mrr_at_100 value: 28.510999999999996 - type: mrr_at_1000 value: 28.588400000000004 - type: nauc_ndcg_at_1_max value: 11.8658 - type: nauc_ndcg_at_1_std value: -18.4852 - type: nauc_ndcg_at_1_diff1 value: 47.3429 - type: nauc_ndcg_at_3_max value: 11.608400000000001 - type: nauc_ndcg_at_3_std value: -19.0804 - type: nauc_ndcg_at_3_diff1 value: 41.7031 - type: nauc_ndcg_at_5_max value: 11.289299999999999 - type: nauc_ndcg_at_5_std value: -19.3124 - type: nauc_ndcg_at_5_diff1 value: 40.5381 - type: nauc_ndcg_at_10_max value: 11.6701 - type: nauc_ndcg_at_10_std value: -18.7838 - type: nauc_ndcg_at_10_diff1 value: 39.8088 - type: nauc_ndcg_at_20_max value: 11.942400000000001 - type: nauc_ndcg_at_20_std value: -18.123900000000003 - type: nauc_ndcg_at_20_diff1 value: 38.967800000000004 - type: nauc_ndcg_at_100_max value: 13.114999999999998 - type: nauc_ndcg_at_100_std value: -16.1964 - type: nauc_ndcg_at_100_diff1 value: 39.0077 - type: nauc_ndcg_at_1000_max value: 13.5244 - type: nauc_ndcg_at_1000_std value: -15.2702 - type: nauc_ndcg_at_1000_diff1 value: 39.1235 - type: nauc_map_at_1_max value: 11.8658 - type: nauc_map_at_1_std value: -18.4852 - type: nauc_map_at_1_diff1 value: 47.3429 - type: nauc_map_at_3_max value: 11.6937 - type: nauc_map_at_3_std value: -18.9625 - type: nauc_map_at_3_diff1 value: 42.993900000000004 - type: nauc_map_at_5_max value: 11.5064 - type: nauc_map_at_5_std value: -19.0958 - type: nauc_map_at_5_diff1 value: 42.3108 - type: nauc_map_at_10_max value: 11.6615 - type: nauc_map_at_10_std value: -18.885199999999998 - type: nauc_map_at_10_diff1 value: 41.993399999999994 - type: nauc_map_at_20_max value: 11.7419 - type: nauc_map_at_20_std value: -18.7005 - type: nauc_map_at_20_diff1 value: 41.7643 - type: nauc_map_at_100_max value: 11.902600000000001 - type: nauc_map_at_100_std value: -18.4376 - type: nauc_map_at_100_diff1 value: 41.7771 - type: nauc_map_at_1000_max value: 11.9208 - type: nauc_map_at_1000_std value: -18.395500000000002 - type: nauc_map_at_1000_diff1 value: 41.7802 - type: nauc_recall_at_1_max value: 11.8658 - type: nauc_recall_at_1_std value: -18.4852 - type: nauc_recall_at_1_diff1 value: 47.3429 - type: nauc_recall_at_3_max value: 11.3724 - type: nauc_recall_at_3_std value: -19.3869 - type: nauc_recall_at_3_diff1 value: 38.2763 - type: nauc_recall_at_5_max value: 10.678600000000001 - type: nauc_recall_at_5_std value: -19.8995 - type: nauc_recall_at_5_diff1 value: 35.781400000000005 - type: nauc_recall_at_10_max value: 11.7997 - type: nauc_recall_at_10_std value: -18.3219 - type: nauc_recall_at_10_diff1 value: 33.7507 - type: nauc_recall_at_20_max value: 12.7832 - type: nauc_recall_at_20_std value: -15.8611 - type: nauc_recall_at_20_diff1 value: 30.4676 - type: nauc_recall_at_100_max value: 20.0012 - type: nauc_recall_at_100_std value: -3.8268000000000004 - type: nauc_recall_at_100_diff1 value: 28.8928 - type: nauc_recall_at_1000_max value: 30.812099999999997 - type: nauc_recall_at_1000_std value: 18.1771 - type: nauc_recall_at_1000_diff1 value: 23.3851 - type: nauc_precision_at_1_max value: 11.8658 - type: nauc_precision_at_1_std value: -18.4852 - type: nauc_precision_at_1_diff1 value: 47.3429 - type: nauc_precision_at_3_max value: 11.3724 - type: nauc_precision_at_3_std value: -19.3869 - type: nauc_precision_at_3_diff1 value: 38.2763 - type: nauc_precision_at_5_max value: 10.678600000000001 - type: nauc_precision_at_5_std value: -19.8995 - type: nauc_precision_at_5_diff1 value: 35.781400000000005 - type: nauc_precision_at_10_max value: 11.7997 - type: nauc_precision_at_10_std value: -18.3219 - type: nauc_precision_at_10_diff1 value: 33.7507 - type: nauc_precision_at_20_max value: 12.7832 - type: nauc_precision_at_20_std value: -15.8611 - type: nauc_precision_at_20_diff1 value: 30.4676 - type: nauc_precision_at_100_max value: 20.0012 - type: nauc_precision_at_100_std value: -3.8268000000000004 - type: nauc_precision_at_100_diff1 value: 28.8928 - type: nauc_precision_at_1000_max value: 30.812099999999997 - type: nauc_precision_at_1000_std value: 18.1771 - type: nauc_precision_at_1000_diff1 value: 23.3851 - type: nauc_mrr_at_1_max value: 11.8658 - type: nauc_mrr_at_1_std value: -18.4852 - type: nauc_mrr_at_1_diff1 value: 47.3429 - type: nauc_mrr_at_3_max value: 11.6937 - type: nauc_mrr_at_3_std value: -18.9625 - type: nauc_mrr_at_3_diff1 value: 42.993900000000004 - type: nauc_mrr_at_5_max value: 11.5064 - type: nauc_mrr_at_5_std value: -19.0958 - type: nauc_mrr_at_5_diff1 value: 42.3108 - type: nauc_mrr_at_10_max value: 11.6615 - type: nauc_mrr_at_10_std value: -18.885199999999998 - type: nauc_mrr_at_10_diff1 value: 41.993399999999994 - type: nauc_mrr_at_20_max value: 11.7419 - type: nauc_mrr_at_20_std value: -18.7005 - type: nauc_mrr_at_20_diff1 value: 41.7643 - type: nauc_mrr_at_100_max value: 11.902600000000001 - type: nauc_mrr_at_100_std value: -18.4376 - type: nauc_mrr_at_100_diff1 value: 41.7771 - type: nauc_mrr_at_1000_max value: 11.9208 - type: nauc_mrr_at_1000_std value: -18.395500000000002 - type: nauc_mrr_at_1000_diff1 value: 41.7802 - type: main_score value: 31.391999999999996 - task: type: Retrieval dataset: name: MTEB CodeFeedbackST (default) type: CoIR-Retrieval/codefeedback-st config: default split: test revision: d213819e87aab9010628da8b73ab4eb337c89340 metrics: - type: ndcg_at_1 value: 51.227000000000004 - type: ndcg_at_3 value: 62.971999999999994 - type: ndcg_at_5 value: 65.649 - type: ndcg_at_10 value: 67.72200000000001 - type: ndcg_at_20 value: 68.919 - type: ndcg_at_100 value: 70.15299999999999 - type: ndcg_at_1000 value: 70.658 - type: map_at_1 value: 51.227000000000004 - type: map_at_3 value: 60.114000000000004 - type: map_at_5 value: 61.607 - type: map_at_10 value: 62.475 - type: map_at_20 value: 62.806 - type: map_at_100 value: 62.979 - type: map_at_1000 value: 62.999 - type: recall_at_1 value: 51.227000000000004 - type: recall_at_3 value: 71.232 - type: recall_at_5 value: 77.69800000000001 - type: recall_at_10 value: 84.041 - type: recall_at_20 value: 88.756 - type: recall_at_100 value: 95.371 - type: recall_at_1000 value: 99.278 - type: precision_at_1 value: 51.227000000000004 - type: precision_at_3 value: 23.744 - type: precision_at_5 value: 15.540000000000001 - type: precision_at_10 value: 8.404 - type: precision_at_20 value: 4.438000000000001 - type: precision_at_100 value: 0.954 - type: precision_at_1000 value: 0.099 - type: mrr_at_1 value: 51.0062 - type: mrr_at_3 value: 60.0023 - type: mrr_at_5 value: 61.492999999999995 - type: mrr_at_10 value: 62.362899999999996 - type: mrr_at_20 value: 62.693200000000004 - type: mrr_at_100 value: 62.8664 - type: mrr_at_1000 value: 62.8866 - type: nauc_ndcg_at_1_max value: 5.5119 - type: nauc_ndcg_at_1_std value: -27.434599999999996 - type: nauc_ndcg_at_1_diff1 value: 67.3476 - type: nauc_ndcg_at_3_max value: 11.8474 - type: nauc_ndcg_at_3_std value: -30.5305 - type: nauc_ndcg_at_3_diff1 value: 61.4515 - type: nauc_ndcg_at_5_max value: 12.692700000000002 - type: nauc_ndcg_at_5_std value: -30.938 - type: nauc_ndcg_at_5_diff1 value: 61.0505 - type: nauc_ndcg_at_10_max value: 12.354800000000001 - type: nauc_ndcg_at_10_std value: -30.6409 - type: nauc_ndcg_at_10_diff1 value: 61.205600000000004 - type: nauc_ndcg_at_20_max value: 11.9146 - type: nauc_ndcg_at_20_std value: -30.247 - type: nauc_ndcg_at_20_diff1 value: 61.5428 - type: nauc_ndcg_at_100_max value: 11.5407 - type: nauc_ndcg_at_100_std value: -29.561700000000002 - type: nauc_ndcg_at_100_diff1 value: 62.06270000000001 - type: nauc_ndcg_at_1000_max value: 11.2459 - type: nauc_ndcg_at_1000_std value: -29.5751 - type: nauc_ndcg_at_1000_diff1 value: 62.28 - type: nauc_map_at_1_max value: 5.5119 - type: nauc_map_at_1_std value: -27.434599999999996 - type: nauc_map_at_1_diff1 value: 67.3476 - type: nauc_map_at_3_max value: 10.1298 - type: nauc_map_at_3_std value: -29.674899999999997 - type: nauc_map_at_3_diff1 value: 62.982000000000006 - type: nauc_map_at_5_max value: 10.5075 - type: nauc_map_at_5_std value: -29.858600000000003 - type: nauc_map_at_5_diff1 value: 62.829299999999996 - type: nauc_map_at_10_max value: 10.3459 - type: nauc_map_at_10_std value: -29.7338 - type: nauc_map_at_10_diff1 value: 62.917699999999996 - type: nauc_map_at_20_max value: 10.2198 - type: nauc_map_at_20_std value: -29.6284 - type: nauc_map_at_20_diff1 value: 63.01409999999999 - type: nauc_map_at_100_max value: 10.1683 - type: nauc_map_at_100_std value: -29.5448 - type: nauc_map_at_100_diff1 value: 63.0794 - type: nauc_map_at_1000_max value: 10.1602 - type: nauc_map_at_1000_std value: -29.5412 - type: nauc_map_at_1000_diff1 value: 63.0874 - type: nauc_recall_at_1_max value: 5.5119 - type: nauc_recall_at_1_std value: -27.434599999999996 - type: nauc_recall_at_1_diff1 value: 67.3476 - type: nauc_recall_at_3_max value: 17.8724 - type: nauc_recall_at_3_std value: -33.5404 - type: nauc_recall_at_3_diff1 value: 56.1172 - type: nauc_recall_at_5_max value: 21.945700000000002 - type: nauc_recall_at_5_std value: -35.5124 - type: nauc_recall_at_5_diff1 value: 53.6154 - type: nauc_recall_at_10_max value: 23.1968 - type: nauc_recall_at_10_std value: -35.4292 - type: nauc_recall_at_10_diff1 value: 51.998900000000006 - type: nauc_recall_at_20_max value: 23.4056 - type: nauc_recall_at_20_std value: -33.825300000000006 - type: nauc_recall_at_20_diff1 value: 51.544900000000005 - type: nauc_recall_at_100_max value: 29.2331 - type: nauc_recall_at_100_std value: -20.444499999999998 - type: nauc_recall_at_100_diff1 value: 51.8606 - type: nauc_recall_at_1000_max value: 47.943000000000005 - type: nauc_recall_at_1000_std value: 16.1139 - type: nauc_recall_at_1000_diff1 value: 49.2407 - type: nauc_precision_at_1_max value: 5.5119 - type: nauc_precision_at_1_std value: -27.434599999999996 - type: nauc_precision_at_1_diff1 value: 67.3476 - type: nauc_precision_at_3_max value: 17.8724 - type: nauc_precision_at_3_std value: -33.5404 - type: nauc_precision_at_3_diff1 value: 56.1172 - type: nauc_precision_at_5_max value: 21.945700000000002 - type: nauc_precision_at_5_std value: -35.5124 - type: nauc_precision_at_5_diff1 value: 53.6154 - type: nauc_precision_at_10_max value: 23.1968 - type: nauc_precision_at_10_std value: -35.4292 - type: nauc_precision_at_10_diff1 value: 51.998900000000006 - type: nauc_precision_at_20_max value: 23.4056 - type: nauc_precision_at_20_std value: -33.825300000000006 - type: nauc_precision_at_20_diff1 value: 51.544900000000005 - type: nauc_precision_at_100_max value: 29.2331 - type: nauc_precision_at_100_std value: -20.444499999999998 - type: nauc_precision_at_100_diff1 value: 51.8606 - type: nauc_precision_at_1000_max value: 47.943000000000005 - type: nauc_precision_at_1000_std value: 16.1139 - type: nauc_precision_at_1000_diff1 value: 49.2407 - type: nauc_mrr_at_1_max value: 4.9502 - type: nauc_mrr_at_1_std value: -27.426099999999998 - type: nauc_mrr_at_1_diff1 value: 67.8214 - type: nauc_mrr_at_3_max value: 9.7423 - type: nauc_mrr_at_3_std value: -29.674699999999998 - type: nauc_mrr_at_3_diff1 value: 63.24340000000001 - type: nauc_mrr_at_5_max value: 10.1129 - type: nauc_mrr_at_5_std value: -29.871100000000002 - type: nauc_mrr_at_5_diff1 value: 63.1148 - type: nauc_mrr_at_10_max value: 9.9493 - type: nauc_mrr_at_10_std value: -29.7413 - type: nauc_mrr_at_10_diff1 value: 63.2057 - type: nauc_mrr_at_20_max value: 9.8157 - type: nauc_mrr_at_20_std value: -29.644 - type: nauc_mrr_at_20_diff1 value: 63.307100000000005 - type: nauc_mrr_at_100_max value: 9.7639 - type: nauc_mrr_at_100_std value: -29.5582 - type: nauc_mrr_at_100_diff1 value: 63.3738 - type: nauc_mrr_at_1000_max value: 9.7555 - type: nauc_mrr_at_1000_std value: -29.554599999999997 - type: nauc_mrr_at_1000_diff1 value: 63.382000000000005 - type: main_score value: 67.72200000000001 - task: type: Retrieval dataset: name: MTEB CodeSearchNetCCRetrieval (python) type: CoIR-Retrieval/CodeSearchNet-ccr config: python split: test revision: 6e1effa2c03723c5fde48ee912b5ee08d4f211e8 metrics: - type: ndcg_at_1 value: 32.417 - type: ndcg_at_3 value: 40.904 - type: ndcg_at_5 value: 43.321 - type: ndcg_at_10 value: 45.532000000000004 - type: ndcg_at_20 value: 47.071000000000005 - type: ndcg_at_100 value: 49.297999999999995 - type: ndcg_at_1000 value: 50.859 - type: map_at_1 value: 32.417 - type: map_at_3 value: 38.829 - type: map_at_5 value: 40.166000000000004 - type: map_at_10 value: 41.087 - type: map_at_20 value: 41.510999999999996 - type: map_at_100 value: 41.815000000000005 - type: map_at_1000 value: 41.869 - type: recall_at_1 value: 32.417 - type: recall_at_3 value: 46.903 - type: recall_at_5 value: 52.788999999999994 - type: recall_at_10 value: 59.57900000000001 - type: recall_at_20 value: 65.652 - type: recall_at_100 value: 77.718 - type: recall_at_1000 value: 90.294 - type: precision_at_1 value: 32.417 - type: precision_at_3 value: 15.634 - type: precision_at_5 value: 10.558 - type: precision_at_10 value: 5.958 - type: precision_at_20 value: 3.283 - type: precision_at_100 value: 0.777 - type: precision_at_1000 value: 0.09 - type: mrr_at_1 value: 32.4239 - type: mrr_at_3 value: 38.8323 - type: mrr_at_5 value: 40.1696 - type: mrr_at_10 value: 41.0908 - type: mrr_at_20 value: 41.5149 - type: mrr_at_100 value: 41.8188 - type: mrr_at_1000 value: 41.8726 - type: nauc_ndcg_at_1_max value: 32.4803 - type: nauc_ndcg_at_1_std value: -1.1774 - type: nauc_ndcg_at_1_diff1 value: 54.68730000000001 - type: nauc_ndcg_at_3_max value: 33.5662 - type: nauc_ndcg_at_3_std value: 0.361 - type: nauc_ndcg_at_3_diff1 value: 49.522 - type: nauc_ndcg_at_5_max value: 33.0861 - type: nauc_ndcg_at_5_std value: 0.5551999999999999 - type: nauc_ndcg_at_5_diff1 value: 48.9052 - type: nauc_ndcg_at_10_max value: 33.0427 - type: nauc_ndcg_at_10_std value: 1.466 - type: nauc_ndcg_at_10_diff1 value: 48.3256 - type: nauc_ndcg_at_20_max value: 33.059 - type: nauc_ndcg_at_20_std value: 2.2277 - type: nauc_ndcg_at_20_diff1 value: 48.2916 - type: nauc_ndcg_at_100_max value: 33.0797 - type: nauc_ndcg_at_100_std value: 2.9991 - type: nauc_ndcg_at_100_diff1 value: 48.266999999999996 - type: nauc_ndcg_at_1000_max value: 33.1052 - type: nauc_ndcg_at_1000_std value: 2.8583000000000003 - type: nauc_ndcg_at_1000_diff1 value: 48.5209 - type: nauc_map_at_1_max value: 32.4803 - type: nauc_map_at_1_std value: -1.1774 - type: nauc_map_at_1_diff1 value: 54.68730000000001 - type: nauc_map_at_3_max value: 33.3014 - type: nauc_map_at_3_std value: -0.06409999999999999 - type: nauc_map_at_3_diff1 value: 50.6726 - type: nauc_map_at_5_max value: 33.0327 - type: nauc_map_at_5_std value: 0.0325 - type: nauc_map_at_5_diff1 value: 50.3363 - type: nauc_map_at_10_max value: 33.0181 - type: nauc_map_at_10_std value: 0.3939 - type: nauc_map_at_10_diff1 value: 50.1109 - type: nauc_map_at_20_max value: 33.0183 - type: nauc_map_at_20_std value: 0.5951 - type: nauc_map_at_20_diff1 value: 50.108 - type: nauc_map_at_100_max value: 33.022 - type: nauc_map_at_100_std value: 0.6973 - type: nauc_map_at_100_diff1 value: 50.10790000000001 - type: nauc_map_at_1000_max value: 33.022 - type: nauc_map_at_1000_std value: 0.6931999999999999 - type: nauc_map_at_1000_diff1 value: 50.1174 - type: nauc_recall_at_1_max value: 32.4803 - type: nauc_recall_at_1_std value: -1.1774 - type: nauc_recall_at_1_diff1 value: 54.68730000000001 - type: nauc_recall_at_3_max value: 34.3301 - type: nauc_recall_at_3_std value: 1.6075 - type: nauc_recall_at_3_diff1 value: 46.2477 - type: nauc_recall_at_5_max value: 33.177299999999995 - type: nauc_recall_at_5_std value: 2.1687000000000003 - type: nauc_recall_at_5_diff1 value: 44.61 - type: nauc_recall_at_10_max value: 33.020500000000006 - type: nauc_recall_at_10_std value: 5.3331 - type: nauc_recall_at_10_diff1 value: 42.3796 - type: nauc_recall_at_20_max value: 33.1279 - type: nauc_recall_at_20_std value: 9.2437 - type: nauc_recall_at_20_diff1 value: 41.584199999999996 - type: nauc_recall_at_100_max value: 33.2882 - type: nauc_recall_at_100_std value: 18.1866 - type: nauc_recall_at_100_diff1 value: 38.9221 - type: nauc_recall_at_1000_max value: 34.2607 - type: nauc_recall_at_1000_std value: 30.5699 - type: nauc_recall_at_1000_diff1 value: 35.204800000000006 - type: nauc_precision_at_1_max value: 32.4803 - type: nauc_precision_at_1_std value: -1.1774 - type: nauc_precision_at_1_diff1 value: 54.68730000000001 - type: nauc_precision_at_3_max value: 34.3301 - type: nauc_precision_at_3_std value: 1.6075 - type: nauc_precision_at_3_diff1 value: 46.2477 - type: nauc_precision_at_5_max value: 33.177299999999995 - type: nauc_precision_at_5_std value: 2.1687000000000003 - type: nauc_precision_at_5_diff1 value: 44.61 - type: nauc_precision_at_10_max value: 33.020500000000006 - type: nauc_precision_at_10_std value: 5.3331 - type: nauc_precision_at_10_diff1 value: 42.3796 - type: nauc_precision_at_20_max value: 33.1279 - type: nauc_precision_at_20_std value: 9.2437 - type: nauc_precision_at_20_diff1 value: 41.584199999999996 - type: nauc_precision_at_100_max value: 33.2882 - type: nauc_precision_at_100_std value: 18.1866 - type: nauc_precision_at_100_diff1 value: 38.9221 - type: nauc_precision_at_1000_max value: 34.2607 - type: nauc_precision_at_1000_std value: 30.5699 - type: nauc_precision_at_1000_diff1 value: 35.204800000000006 - type: nauc_mrr_at_1_max value: 32.5013 - type: nauc_mrr_at_1_std value: -1.1843 - type: nauc_mrr_at_1_diff1 value: 54.6663 - type: nauc_mrr_at_3_max value: 33.315 - type: nauc_mrr_at_3_std value: -0.06849999999999999 - type: nauc_mrr_at_3_diff1 value: 50.66460000000001 - type: nauc_mrr_at_5_max value: 33.0452 - type: nauc_mrr_at_5_std value: 0.0305 - type: nauc_mrr_at_5_diff1 value: 50.326499999999996 - type: nauc_mrr_at_10_max value: 33.0308 - type: nauc_mrr_at_10_std value: 0.39189999999999997 - type: nauc_mrr_at_10_diff1 value: 50.101 - type: nauc_mrr_at_20_max value: 33.031 - type: nauc_mrr_at_20_std value: 0.5930000000000001 - type: nauc_mrr_at_20_diff1 value: 50.0981 - type: nauc_mrr_at_100_max value: 33.0348 - type: nauc_mrr_at_100_std value: 0.6952 - type: nauc_mrr_at_100_diff1 value: 50.097899999999996 - type: nauc_mrr_at_1000_max value: 33.0348 - type: nauc_mrr_at_1000_std value: 0.6910999999999999 - type: nauc_mrr_at_1000_diff1 value: 50.1074 - type: main_score value: 45.532000000000004 - task: type: Retrieval dataset: name: MTEB CodeSearchNetCCRetrieval (javascript) type: CoIR-Retrieval/CodeSearchNet-ccr config: javascript split: test revision: 6e1effa2c03723c5fde48ee912b5ee08d4f211e8 metrics: - type: ndcg_at_1 value: 33.364 - type: ndcg_at_3 value: 41.943999999999996 - type: ndcg_at_5 value: 44.167 - type: ndcg_at_10 value: 46.024 - type: ndcg_at_20 value: 47.508 - type: ndcg_at_100 value: 49.668 - type: ndcg_at_1000 value: 51.336999999999996 - type: map_at_1 value: 33.364 - type: map_at_3 value: 39.846 - type: map_at_5 value: 41.083999999999996 - type: map_at_10 value: 41.85 - type: map_at_20 value: 42.254000000000005 - type: map_at_100 value: 42.547000000000004 - type: map_at_1000 value: 42.601 - type: recall_at_1 value: 33.364 - type: recall_at_3 value: 48.010000000000005 - type: recall_at_5 value: 53.388000000000005 - type: recall_at_10 value: 59.131 - type: recall_at_20 value: 65.026 - type: recall_at_100 value: 76.755 - type: recall_at_1000 value: 90.398 - type: precision_at_1 value: 33.364 - type: precision_at_3 value: 16.003 - type: precision_at_5 value: 10.678 - type: precision_at_10 value: 5.913 - type: precision_at_20 value: 3.251 - type: precision_at_100 value: 0.768 - type: precision_at_1000 value: 0.09 - type: mrr_at_1 value: 33.272600000000004 - type: mrr_at_3 value: 39.7954 - type: mrr_at_5 value: 41.0412 - type: mrr_at_10 value: 41.8073 - type: mrr_at_20 value: 42.2109 - type: mrr_at_100 value: 42.5037 - type: mrr_at_1000 value: 42.5577 - type: nauc_ndcg_at_1_max value: 26.6036 - type: nauc_ndcg_at_1_std value: -8.3972 - type: nauc_ndcg_at_1_diff1 value: 52.43560000000001 - type: nauc_ndcg_at_3_max value: 28.5119 - type: nauc_ndcg_at_3_std value: -5.6812000000000005 - type: nauc_ndcg_at_3_diff1 value: 47.1671 - type: nauc_ndcg_at_5_max value: 28.1875 - type: nauc_ndcg_at_5_std value: -5.6434999999999995 - type: nauc_ndcg_at_5_diff1 value: 46.1849 - type: nauc_ndcg_at_10_max value: 27.5534 - type: nauc_ndcg_at_10_std value: -5.6785000000000005 - type: nauc_ndcg_at_10_diff1 value: 45.6927 - type: nauc_ndcg_at_20_max value: 27.4338 - type: nauc_ndcg_at_20_std value: -5.5037 - type: nauc_ndcg_at_20_diff1 value: 45.872800000000005 - type: nauc_ndcg_at_100_max value: 27.386100000000003 - type: nauc_ndcg_at_100_std value: -5.2795000000000005 - type: nauc_ndcg_at_100_diff1 value: 46.1008 - type: nauc_ndcg_at_1000_max value: 27.5195 - type: nauc_ndcg_at_1000_std value: -5.0668999999999995 - type: nauc_ndcg_at_1000_diff1 value: 46.381499999999996 - type: nauc_map_at_1_max value: 26.6036 - type: nauc_map_at_1_std value: -8.3972 - type: nauc_map_at_1_diff1 value: 52.43560000000001 - type: nauc_map_at_3_max value: 28.098699999999997 - type: nauc_map_at_3_std value: -6.357500000000001 - type: nauc_map_at_3_diff1 value: 48.4799 - type: nauc_map_at_5_max value: 27.938000000000002 - type: nauc_map_at_5_std value: -6.3283000000000005 - type: nauc_map_at_5_diff1 value: 47.955799999999996 - type: nauc_map_at_10_max value: 27.6989 - type: nauc_map_at_10_std value: -6.3546000000000005 - type: nauc_map_at_10_diff1 value: 47.7813 - type: nauc_map_at_20_max value: 27.637099999999997 - type: nauc_map_at_20_std value: -6.3278 - type: nauc_map_at_20_diff1 value: 47.8258 - type: nauc_map_at_100_max value: 27.6654 - type: nauc_map_at_100_std value: -6.284199999999999 - type: nauc_map_at_100_diff1 value: 47.8675 - type: nauc_map_at_1000_max value: 27.668599999999998 - type: nauc_map_at_1000_std value: -6.2727 - type: nauc_map_at_1000_diff1 value: 47.8793 - type: nauc_recall_at_1_max value: 26.6036 - type: nauc_recall_at_1_std value: -8.3972 - type: nauc_recall_at_1_diff1 value: 52.43560000000001 - type: nauc_recall_at_3_max value: 29.686600000000002 - type: nauc_recall_at_3_std value: -3.7178999999999998 - type: nauc_recall_at_3_diff1 value: 43.3556 - type: nauc_recall_at_5_max value: 28.835499999999996 - type: nauc_recall_at_5_std value: -3.6023 - type: nauc_recall_at_5_diff1 value: 40.7246 - type: nauc_recall_at_10_max value: 26.6593 - type: nauc_recall_at_10_std value: -3.5498000000000003 - type: nauc_recall_at_10_diff1 value: 38.6728 - type: nauc_recall_at_20_max value: 26.293499999999998 - type: nauc_recall_at_20_std value: -2.3813 - type: nauc_recall_at_20_diff1 value: 38.8857 - type: nauc_recall_at_100_max value: 24.7411 - type: nauc_recall_at_100_std value: 0.1296 - type: nauc_recall_at_100_diff1 value: 38.1683 - type: nauc_recall_at_1000_max value: 25.1934 - type: nauc_recall_at_1000_std value: 10.7766 - type: nauc_recall_at_1000_diff1 value: 35.856300000000005 - type: nauc_precision_at_1_max value: 26.6036 - type: nauc_precision_at_1_std value: -8.3972 - type: nauc_precision_at_1_diff1 value: 52.43560000000001 - type: nauc_precision_at_3_max value: 29.686600000000002 - type: nauc_precision_at_3_std value: -3.7178999999999998 - type: nauc_precision_at_3_diff1 value: 43.3556 - type: nauc_precision_at_5_max value: 28.835499999999996 - type: nauc_precision_at_5_std value: -3.6023 - type: nauc_precision_at_5_diff1 value: 40.7246 - type: nauc_precision_at_10_max value: 26.6593 - type: nauc_precision_at_10_std value: -3.5498000000000003 - type: nauc_precision_at_10_diff1 value: 38.6728 - type: nauc_precision_at_20_max value: 26.293499999999998 - type: nauc_precision_at_20_std value: -2.3813 - type: nauc_precision_at_20_diff1 value: 38.8857 - type: nauc_precision_at_100_max value: 24.7411 - type: nauc_precision_at_100_std value: 0.1296 - type: nauc_precision_at_100_diff1 value: 38.1683 - type: nauc_precision_at_1000_max value: 25.1934 - type: nauc_precision_at_1000_std value: 10.7766 - type: nauc_precision_at_1000_diff1 value: 35.856300000000005 - type: nauc_mrr_at_1_max value: 26.7351 - type: nauc_mrr_at_1_std value: -8.2798 - type: nauc_mrr_at_1_diff1 value: 52.7186 - type: nauc_mrr_at_3_max value: 28.1671 - type: nauc_mrr_at_3_std value: -6.3235 - type: nauc_mrr_at_3_diff1 value: 48.6387 - type: nauc_mrr_at_5_max value: 28.0115 - type: nauc_mrr_at_5_std value: -6.256399999999999 - type: nauc_mrr_at_5_diff1 value: 48.098400000000005 - type: nauc_mrr_at_10_max value: 27.7729 - type: nauc_mrr_at_10_std value: -6.2821 - type: nauc_mrr_at_10_diff1 value: 47.925000000000004 - type: nauc_mrr_at_20_max value: 27.7115 - type: nauc_mrr_at_20_std value: -6.254899999999999 - type: nauc_mrr_at_20_diff1 value: 47.9703 - type: nauc_mrr_at_100_max value: 27.740199999999998 - type: nauc_mrr_at_100_std value: -6.2109 - type: nauc_mrr_at_100_diff1 value: 48.0128 - type: nauc_mrr_at_1000_max value: 27.743499999999997 - type: nauc_mrr_at_1000_std value: -6.1993 - type: nauc_mrr_at_1000_diff1 value: 48.0248 - type: main_score value: 46.024 - task: type: Retrieval dataset: name: MTEB CodeSearchNetCCRetrieval (go) type: CoIR-Retrieval/CodeSearchNet-ccr config: go split: test revision: 6e1effa2c03723c5fde48ee912b5ee08d4f211e8 metrics: - type: ndcg_at_1 value: 26.471 - type: ndcg_at_3 value: 33.489999999999995 - type: ndcg_at_5 value: 35.55 - type: ndcg_at_10 value: 37.555 - type: ndcg_at_20 value: 39.029 - type: ndcg_at_100 value: 41.478 - type: ndcg_at_1000 value: 43.457 - type: map_at_1 value: 26.471 - type: map_at_3 value: 31.774 - type: map_at_5 value: 32.915 - type: map_at_10 value: 33.745999999999995 - type: map_at_20 value: 34.150000000000006 - type: map_at_100 value: 34.477999999999994 - type: map_at_1000 value: 34.544000000000004 - type: recall_at_1 value: 26.471 - type: recall_at_3 value: 38.451 - type: recall_at_5 value: 43.462 - type: recall_at_10 value: 49.643 - type: recall_at_20 value: 55.479 - type: recall_at_100 value: 68.825 - type: recall_at_1000 value: 84.93 - type: precision_at_1 value: 26.471 - type: precision_at_3 value: 12.817 - type: precision_at_5 value: 8.692 - type: precision_at_10 value: 4.9639999999999995 - type: precision_at_20 value: 2.774 - type: precision_at_100 value: 0.688 - type: precision_at_1000 value: 0.08499999999999999 - type: mrr_at_1 value: 26.459 - type: mrr_at_3 value: 31.757400000000004 - type: mrr_at_5 value: 32.9092 - type: mrr_at_10 value: 33.7387 - type: mrr_at_20 value: 34.1428 - type: mrr_at_100 value: 34.471000000000004 - type: mrr_at_1000 value: 34.5364 - type: nauc_ndcg_at_1_max value: 29.408299999999997 - type: nauc_ndcg_at_1_std value: 1.5685 - type: nauc_ndcg_at_1_diff1 value: 45.834599999999995 - type: nauc_ndcg_at_3_max value: 27.7526 - type: nauc_ndcg_at_3_std value: -0.43810000000000004 - type: nauc_ndcg_at_3_diff1 value: 41.272999999999996 - type: nauc_ndcg_at_5_max value: 27.2864 - type: nauc_ndcg_at_5_std value: -0.37820000000000004 - type: nauc_ndcg_at_5_diff1 value: 40.4934 - type: nauc_ndcg_at_10_max value: 26.845599999999997 - type: nauc_ndcg_at_10_std value: -0.3317 - type: nauc_ndcg_at_10_diff1 value: 39.9305 - type: nauc_ndcg_at_20_max value: 26.4669 - type: nauc_ndcg_at_20_std value: 0.1423 - type: nauc_ndcg_at_20_diff1 value: 39.432 - type: nauc_ndcg_at_100_max value: 26.3318 - type: nauc_ndcg_at_100_std value: 0.8049000000000001 - type: nauc_ndcg_at_100_diff1 value: 39.0276 - type: nauc_ndcg_at_1000_max value: 26.5895 - type: nauc_ndcg_at_1000_std value: 1.0204 - type: nauc_ndcg_at_1000_diff1 value: 39.2747 - type: nauc_map_at_1_max value: 29.408299999999997 - type: nauc_map_at_1_std value: 1.5685 - type: nauc_map_at_1_diff1 value: 45.834599999999995 - type: nauc_map_at_3_max value: 28.1245 - type: nauc_map_at_3_std value: -0.006999999999999999 - type: nauc_map_at_3_diff1 value: 42.2701 - type: nauc_map_at_5_max value: 27.8563 - type: nauc_map_at_5_std value: 0.0204 - type: nauc_map_at_5_diff1 value: 41.8294 - type: nauc_map_at_10_max value: 27.6709 - type: nauc_map_at_10_std value: 0.0262 - type: nauc_map_at_10_diff1 value: 41.5973 - type: nauc_map_at_20_max value: 27.572000000000003 - type: nauc_map_at_20_std value: 0.1652 - type: nauc_map_at_20_diff1 value: 41.4683 - type: nauc_map_at_100_max value: 27.5573 - type: nauc_map_at_100_std value: 0.243 - type: nauc_map_at_100_diff1 value: 41.4201 - type: nauc_map_at_1000_max value: 27.5663 - type: nauc_map_at_1000_std value: 0.254 - type: nauc_map_at_1000_diff1 value: 41.4275 - type: nauc_recall_at_1_max value: 29.408299999999997 - type: nauc_recall_at_1_std value: 1.5685 - type: nauc_recall_at_1_diff1 value: 45.834599999999995 - type: nauc_recall_at_3_max value: 26.737499999999997 - type: nauc_recall_at_3_std value: -1.6067999999999998 - type: nauc_recall_at_3_diff1 value: 38.5628 - type: nauc_recall_at_5_max value: 25.6664 - type: nauc_recall_at_5_std value: -1.4459 - type: nauc_recall_at_5_diff1 value: 36.7369 - type: nauc_recall_at_10_max value: 24.3156 - type: nauc_recall_at_10_std value: -1.25 - type: nauc_recall_at_10_diff1 value: 34.959 - type: nauc_recall_at_20_max value: 22.6187 - type: nauc_recall_at_20_std value: 0.5539 - type: nauc_recall_at_20_diff1 value: 32.634299999999996 - type: nauc_recall_at_100_max value: 20.8069 - type: nauc_recall_at_100_std value: 5.2502 - type: nauc_recall_at_100_diff1 value: 28.3304 - type: nauc_recall_at_1000_max value: 20.8473 - type: nauc_recall_at_1000_std value: 12.2405 - type: nauc_recall_at_1000_diff1 value: 24.2366 - type: nauc_precision_at_1_max value: 29.408299999999997 - type: nauc_precision_at_1_std value: 1.5685 - type: nauc_precision_at_1_diff1 value: 45.834599999999995 - type: nauc_precision_at_3_max value: 26.737499999999997 - type: nauc_precision_at_3_std value: -1.6067999999999998 - type: nauc_precision_at_3_diff1 value: 38.5628 - type: nauc_precision_at_5_max value: 25.6664 - type: nauc_precision_at_5_std value: -1.4459 - type: nauc_precision_at_5_diff1 value: 36.7369 - type: nauc_precision_at_10_max value: 24.3156 - type: nauc_precision_at_10_std value: -1.25 - type: nauc_precision_at_10_diff1 value: 34.959 - type: nauc_precision_at_20_max value: 22.6187 - type: nauc_precision_at_20_std value: 0.5539 - type: nauc_precision_at_20_diff1 value: 32.634299999999996 - type: nauc_precision_at_100_max value: 20.8069 - type: nauc_precision_at_100_std value: 5.2502 - type: nauc_precision_at_100_diff1 value: 28.3304 - type: nauc_precision_at_1000_max value: 20.8473 - type: nauc_precision_at_1000_std value: 12.2405 - type: nauc_precision_at_1000_diff1 value: 24.2366 - type: nauc_mrr_at_1_max value: 29.435499999999998 - type: nauc_mrr_at_1_std value: 1.5623 - type: nauc_mrr_at_1_diff1 value: 45.8822 - type: nauc_mrr_at_3_max value: 28.183000000000003 - type: nauc_mrr_at_3_std value: -0.00039999999999999996 - type: nauc_mrr_at_3_diff1 value: 42.2776 - type: nauc_mrr_at_5_max value: 27.8735 - type: nauc_mrr_at_5_std value: 0.0288 - type: nauc_mrr_at_5_diff1 value: 41.827999999999996 - type: nauc_mrr_at_10_max value: 27.6989 - type: nauc_mrr_at_10_std value: 0.0349 - type: nauc_mrr_at_10_diff1 value: 41.6043 - type: nauc_mrr_at_20_max value: 27.599 - type: nauc_mrr_at_20_std value: 0.1719 - type: nauc_mrr_at_20_diff1 value: 41.4786 - type: nauc_mrr_at_100_max value: 27.5846 - type: nauc_mrr_at_100_std value: 0.25 - type: nauc_mrr_at_100_diff1 value: 41.4307 - type: nauc_mrr_at_1000_max value: 27.5937 - type: nauc_mrr_at_1000_std value: 0.261 - type: nauc_mrr_at_1000_diff1 value: 41.4381 - type: main_score value: 37.555 - task: type: Retrieval dataset: name: MTEB CodeSearchNetCCRetrieval (ruby) type: CoIR-Retrieval/CodeSearchNet-ccr config: ruby split: test revision: 6e1effa2c03723c5fde48ee912b5ee08d4f211e8 metrics: - type: ndcg_at_1 value: 36.003 - type: ndcg_at_3 value: 43.306 - type: ndcg_at_5 value: 45.443 - type: ndcg_at_10 value: 47.549 - type: ndcg_at_20 value: 48.872 - type: ndcg_at_100 value: 50.651 - type: ndcg_at_1000 value: 52.406 - type: map_at_1 value: 36.003 - type: map_at_3 value: 41.501 - type: map_at_5 value: 42.695 - type: map_at_10 value: 43.580999999999996 - type: map_at_20 value: 43.954 - type: map_at_100 value: 44.195 - type: map_at_1000 value: 44.255 - type: recall_at_1 value: 36.003 - type: recall_at_3 value: 48.533 - type: recall_at_5 value: 53.688 - type: recall_at_10 value: 60.111000000000004 - type: recall_at_20 value: 65.266 - type: recall_at_100 value: 74.941 - type: recall_at_1000 value: 89.056 - type: precision_at_1 value: 36.003 - type: precision_at_3 value: 16.178 - type: precision_at_5 value: 10.738 - type: precision_at_10 value: 6.010999999999999 - type: precision_at_20 value: 3.263 - type: precision_at_100 value: 0.749 - type: precision_at_1000 value: 0.089 - type: mrr_at_1 value: 36.0032 - type: mrr_at_3 value: 41.5015 - type: mrr_at_5 value: 42.695 - type: mrr_at_10 value: 43.580600000000004 - type: mrr_at_20 value: 43.9543 - type: mrr_at_100 value: 44.195299999999996 - type: mrr_at_1000 value: 44.255 - type: nauc_ndcg_at_1_max value: 32.9994 - type: nauc_ndcg_at_1_std value: -12.2575 - type: nauc_ndcg_at_1_diff1 value: 55.63360000000001 - type: nauc_ndcg_at_3_max value: 33.314899999999994 - type: nauc_ndcg_at_3_std value: -11.4208 - type: nauc_ndcg_at_3_diff1 value: 50.995599999999996 - type: nauc_ndcg_at_5_max value: 33.1612 - type: nauc_ndcg_at_5_std value: -11.4067 - type: nauc_ndcg_at_5_diff1 value: 50.766999999999996 - type: nauc_ndcg_at_10_max value: 32.903999999999996 - type: nauc_ndcg_at_10_std value: -11.447000000000001 - type: nauc_ndcg_at_10_diff1 value: 50.1061 - type: nauc_ndcg_at_20_max value: 32.8849 - type: nauc_ndcg_at_20_std value: -11.4567 - type: nauc_ndcg_at_20_diff1 value: 50.0131 - type: nauc_ndcg_at_100_max value: 32.5449 - type: nauc_ndcg_at_100_std value: -11.0686 - type: nauc_ndcg_at_100_diff1 value: 49.7046 - type: nauc_ndcg_at_1000_max value: 32.7575 - type: nauc_ndcg_at_1000_std value: -10.9682 - type: nauc_ndcg_at_1000_diff1 value: 50.17359999999999 - type: nauc_map_at_1_max value: 32.9994 - type: nauc_map_at_1_std value: -12.2575 - type: nauc_map_at_1_diff1 value: 55.63360000000001 - type: nauc_map_at_3_max value: 33.2746 - type: nauc_map_at_3_std value: -11.5215 - type: nauc_map_at_3_diff1 value: 52.1439 - type: nauc_map_at_5_max value: 33.206799999999994 - type: nauc_map_at_5_std value: -11.533 - type: nauc_map_at_5_diff1 value: 52.0477 - type: nauc_map_at_10_max value: 33.1113 - type: nauc_map_at_10_std value: -11.5406 - type: nauc_map_at_10_diff1 value: 51.8103 - type: nauc_map_at_20_max value: 33.070899999999995 - type: nauc_map_at_20_std value: -11.5655 - type: nauc_map_at_20_diff1 value: 51.7759 - type: nauc_map_at_100_max value: 32.9989 - type: nauc_map_at_100_std value: -11.546 - type: nauc_map_at_100_diff1 value: 51.739000000000004 - type: nauc_map_at_1000_max value: 33.0074 - type: nauc_map_at_1000_std value: -11.541 - type: nauc_map_at_1000_diff1 value: 51.7548 - type: nauc_recall_at_1_max value: 32.9994 - type: nauc_recall_at_1_std value: -12.2575 - type: nauc_recall_at_1_diff1 value: 55.63360000000001 - type: nauc_recall_at_3_max value: 33.4172 - type: nauc_recall_at_3_std value: -11.1701 - type: nauc_recall_at_3_diff1 value: 47.6442 - type: nauc_recall_at_5_max value: 32.962799999999994 - type: nauc_recall_at_5_std value: -11.0448 - type: nauc_recall_at_5_diff1 value: 46.8433 - type: nauc_recall_at_10_max value: 32.042500000000004 - type: nauc_recall_at_10_std value: -11.2125 - type: nauc_recall_at_10_diff1 value: 44.2396 - type: nauc_recall_at_20_max value: 32.1997 - type: nauc_recall_at_20_std value: -11.0222 - type: nauc_recall_at_20_diff1 value: 43.4014 - type: nauc_recall_at_100_max value: 29.972500000000004 - type: nauc_recall_at_100_std value: -7.2572 - type: nauc_recall_at_100_diff1 value: 39.285199999999996 - type: nauc_recall_at_1000_max value: 31.759300000000003 - type: nauc_recall_at_1000_std value: -1.555 - type: nauc_recall_at_1000_diff1 value: 38.7819 - type: nauc_precision_at_1_max value: 32.9994 - type: nauc_precision_at_1_std value: -12.2575 - type: nauc_precision_at_1_diff1 value: 55.63360000000001 - type: nauc_precision_at_3_max value: 33.4172 - type: nauc_precision_at_3_std value: -11.1701 - type: nauc_precision_at_3_diff1 value: 47.6442 - type: nauc_precision_at_5_max value: 32.962799999999994 - type: nauc_precision_at_5_std value: -11.0448 - type: nauc_precision_at_5_diff1 value: 46.8433 - type: nauc_precision_at_10_max value: 32.042500000000004 - type: nauc_precision_at_10_std value: -11.2125 - type: nauc_precision_at_10_diff1 value: 44.2396 - type: nauc_precision_at_20_max value: 32.1997 - type: nauc_precision_at_20_std value: -11.0222 - type: nauc_precision_at_20_diff1 value: 43.4014 - type: nauc_precision_at_100_max value: 29.972500000000004 - type: nauc_precision_at_100_std value: -7.2572 - type: nauc_precision_at_100_diff1 value: 39.285199999999996 - type: nauc_precision_at_1000_max value: 31.759300000000003 - type: nauc_precision_at_1000_std value: -1.555 - type: nauc_precision_at_1000_diff1 value: 38.7819 - type: nauc_mrr_at_1_max value: 33.1174 - type: nauc_mrr_at_1_std value: -12.0388 - type: nauc_mrr_at_1_diff1 value: 55.63360000000001 - type: nauc_mrr_at_3_max value: 33.333800000000004 - type: nauc_mrr_at_3_std value: -11.4119 - type: nauc_mrr_at_3_diff1 value: 52.1439 - type: nauc_mrr_at_5_max value: 33.2665 - type: nauc_mrr_at_5_std value: -11.4223 - type: nauc_mrr_at_5_diff1 value: 52.0477 - type: nauc_mrr_at_10_max value: 33.1716 - type: nauc_mrr_at_10_std value: -11.4289 - type: nauc_mrr_at_10_diff1 value: 51.8103 - type: nauc_mrr_at_20_max value: 33.1315 - type: nauc_mrr_at_20_std value: -11.4531 - type: nauc_mrr_at_20_diff1 value: 51.7759 - type: nauc_mrr_at_100_max value: 33.0598 - type: nauc_mrr_at_100_std value: -11.4331 - type: nauc_mrr_at_100_diff1 value: 51.739000000000004 - type: nauc_mrr_at_1000_max value: 33.0684 - type: nauc_mrr_at_1000_std value: -11.428 - type: nauc_mrr_at_1000_diff1 value: 51.7548 - type: main_score value: 47.549 - task: type: Retrieval dataset: name: MTEB CodeSearchNetCCRetrieval (java) type: CoIR-Retrieval/CodeSearchNet-ccr config: java split: test revision: 6e1effa2c03723c5fde48ee912b5ee08d4f211e8 metrics: - type: ndcg_at_1 value: 33.355000000000004 - type: ndcg_at_3 value: 41.551 - type: ndcg_at_5 value: 43.592 - type: ndcg_at_10 value: 45.539 - type: ndcg_at_20 value: 46.922999999999995 - type: ndcg_at_100 value: 49.01 - type: ndcg_at_1000 value: 50.592000000000006 - type: map_at_1 value: 33.355000000000004 - type: map_at_3 value: 39.582 - type: map_at_5 value: 40.716 - type: map_at_10 value: 41.524 - type: map_at_20 value: 41.905 - type: map_at_100 value: 42.185 - type: map_at_1000 value: 42.239 - type: recall_at_1 value: 33.355000000000004 - type: recall_at_3 value: 47.23 - type: recall_at_5 value: 52.17699999999999 - type: recall_at_10 value: 58.17400000000001 - type: recall_at_20 value: 63.641999999999996 - type: recall_at_100 value: 75.034 - type: recall_at_1000 value: 87.85 - type: precision_at_1 value: 33.355000000000004 - type: precision_at_3 value: 15.742999999999999 - type: precision_at_5 value: 10.435 - type: precision_at_10 value: 5.817 - type: precision_at_20 value: 3.182 - type: precision_at_100 value: 0.75 - type: precision_at_1000 value: 0.08800000000000001 - type: mrr_at_1 value: 33.3455 - type: mrr_at_3 value: 39.569500000000005 - type: mrr_at_5 value: 40.7055 - type: mrr_at_10 value: 41.5123 - type: mrr_at_20 value: 41.8948 - type: mrr_at_100 value: 42.175200000000004 - type: mrr_at_1000 value: 42.228500000000004 - type: nauc_ndcg_at_1_max value: 29.177500000000002 - type: nauc_ndcg_at_1_std value: -5.8229999999999995 - type: nauc_ndcg_at_1_diff1 value: 53.2548 - type: nauc_ndcg_at_3_max value: 31.0728 - type: nauc_ndcg_at_3_std value: -4.3403 - type: nauc_ndcg_at_3_diff1 value: 48.6597 - type: nauc_ndcg_at_5_max value: 30.9135 - type: nauc_ndcg_at_5_std value: -3.5812999999999997 - type: nauc_ndcg_at_5_diff1 value: 47.6076 - type: nauc_ndcg_at_10_max value: 30.662899999999997 - type: nauc_ndcg_at_10_std value: -3.3078999999999996 - type: nauc_ndcg_at_10_diff1 value: 46.9647 - type: nauc_ndcg_at_20_max value: 30.7534 - type: nauc_ndcg_at_20_std value: -2.6957 - type: nauc_ndcg_at_20_diff1 value: 46.6956 - type: nauc_ndcg_at_100_max value: 30.8268 - type: nauc_ndcg_at_100_std value: -1.9675000000000002 - type: nauc_ndcg_at_100_diff1 value: 46.4854 - type: nauc_ndcg_at_1000_max value: 30.7713 - type: nauc_ndcg_at_1000_std value: -1.9892 - type: nauc_ndcg_at_1000_diff1 value: 46.7157 - type: nauc_map_at_1_max value: 29.177500000000002 - type: nauc_map_at_1_std value: -5.8229999999999995 - type: nauc_map_at_1_diff1 value: 53.2548 - type: nauc_map_at_3_max value: 30.6136 - type: nauc_map_at_3_std value: -4.7136 - type: nauc_map_at_3_diff1 value: 49.709399999999995 - type: nauc_map_at_5_max value: 30.523699999999998 - type: nauc_map_at_5_std value: -4.288200000000001 - type: nauc_map_at_5_diff1 value: 49.127700000000004 - type: nauc_map_at_10_max value: 30.4224 - type: nauc_map_at_10_std value: -4.1822 - type: nauc_map_at_10_diff1 value: 48.8812 - type: nauc_map_at_20_max value: 30.4446 - type: nauc_map_at_20_std value: -4.0194 - type: nauc_map_at_20_diff1 value: 48.8177 - type: nauc_map_at_100_max value: 30.4531 - type: nauc_map_at_100_std value: -3.9356 - type: nauc_map_at_100_diff1 value: 48.7971 - type: nauc_map_at_1000_max value: 30.4507 - type: nauc_map_at_1000_std value: -3.9337999999999997 - type: nauc_map_at_1000_diff1 value: 48.8055 - type: nauc_recall_at_1_max value: 29.177500000000002 - type: nauc_recall_at_1_std value: -5.8229999999999995 - type: nauc_recall_at_1_diff1 value: 53.2548 - type: nauc_recall_at_3_max value: 32.3983 - type: nauc_recall_at_3_std value: -3.2567 - type: nauc_recall_at_3_diff1 value: 45.6552 - type: nauc_recall_at_5_max value: 32.043 - type: nauc_recall_at_5_std value: -1.3823 - type: nauc_recall_at_5_diff1 value: 42.9898 - type: nauc_recall_at_10_max value: 31.272 - type: nauc_recall_at_10_std value: -0.3417 - type: nauc_recall_at_10_diff1 value: 40.5539 - type: nauc_recall_at_20_max value: 31.7395 - type: nauc_recall_at_20_std value: 2.645 - type: nauc_recall_at_20_diff1 value: 38.777499999999996 - type: nauc_recall_at_100_max value: 32.6198 - type: nauc_recall_at_100_std value: 10.1172 - type: nauc_recall_at_100_diff1 value: 34.6806 - type: nauc_recall_at_1000_max value: 33.0633 - type: nauc_recall_at_1000_std value: 19.5697 - type: nauc_recall_at_1000_diff1 value: 29.418699999999998 - type: nauc_precision_at_1_max value: 29.177500000000002 - type: nauc_precision_at_1_std value: -5.8229999999999995 - type: nauc_precision_at_1_diff1 value: 53.2548 - type: nauc_precision_at_3_max value: 32.3983 - type: nauc_precision_at_3_std value: -3.2567 - type: nauc_precision_at_3_diff1 value: 45.6552 - type: nauc_precision_at_5_max value: 32.043 - type: nauc_precision_at_5_std value: -1.3823 - type: nauc_precision_at_5_diff1 value: 42.9898 - type: nauc_precision_at_10_max value: 31.272 - type: nauc_precision_at_10_std value: -0.3417 - type: nauc_precision_at_10_diff1 value: 40.5539 - type: nauc_precision_at_20_max value: 31.7395 - type: nauc_precision_at_20_std value: 2.645 - type: nauc_precision_at_20_diff1 value: 38.777499999999996 - type: nauc_precision_at_100_max value: 32.6198 - type: nauc_precision_at_100_std value: 10.1172 - type: nauc_precision_at_100_diff1 value: 34.6806 - type: nauc_precision_at_1000_max value: 33.0633 - type: nauc_precision_at_1000_std value: 19.5697 - type: nauc_precision_at_1000_diff1 value: 29.418699999999998 - type: nauc_mrr_at_1_max value: 29.217900000000004 - type: nauc_mrr_at_1_std value: -5.8532 - type: nauc_mrr_at_1_diff1 value: 53.283100000000005 - type: nauc_mrr_at_3_max value: 30.6327 - type: nauc_mrr_at_3_std value: -4.7439 - type: nauc_mrr_at_3_diff1 value: 49.7477 - type: nauc_mrr_at_5_max value: 30.5427 - type: nauc_mrr_at_5_std value: -4.3167 - type: nauc_mrr_at_5_diff1 value: 49.152 - type: nauc_mrr_at_10_max value: 30.444100000000002 - type: nauc_mrr_at_10_std value: -4.2066 - type: nauc_mrr_at_10_diff1 value: 48.9038 - type: nauc_mrr_at_20_max value: 30.462899999999998 - type: nauc_mrr_at_20_std value: -4.0467 - type: nauc_mrr_at_20_diff1 value: 48.8397 - type: nauc_mrr_at_100_max value: 30.4714 - type: nauc_mrr_at_100_std value: -3.963 - type: nauc_mrr_at_100_diff1 value: 48.8192 - type: nauc_mrr_at_1000_max value: 30.469 - type: nauc_mrr_at_1000_std value: -3.9613 - type: nauc_mrr_at_1000_diff1 value: 48.8277 - type: main_score value: 45.539 - task: type: Retrieval dataset: name: MTEB CodeSearchNetCCRetrieval (php) type: CoIR-Retrieval/CodeSearchNet-ccr config: php split: test revision: 6e1effa2c03723c5fde48ee912b5ee08d4f211e8 metrics: - type: ndcg_at_1 value: 25.139 - type: ndcg_at_3 value: 31.922 - type: ndcg_at_5 value: 33.989999999999995 - type: ndcg_at_10 value: 35.942 - type: ndcg_at_20 value: 37.506 - type: ndcg_at_100 value: 39.971000000000004 - type: ndcg_at_1000 value: 42.074 - type: map_at_1 value: 25.139 - type: map_at_3 value: 30.263 - type: map_at_5 value: 31.411 - type: map_at_10 value: 32.218 - type: map_at_20 value: 32.65 - type: map_at_100 value: 32.979 - type: map_at_1000 value: 33.050000000000004 - type: recall_at_1 value: 25.139 - type: recall_at_3 value: 36.720000000000006 - type: recall_at_5 value: 41.737 - type: recall_at_10 value: 47.766999999999996 - type: recall_at_20 value: 53.932 - type: recall_at_100 value: 67.38300000000001 - type: recall_at_1000 value: 84.416 - type: precision_at_1 value: 25.139 - type: precision_at_3 value: 12.24 - type: precision_at_5 value: 8.347 - type: precision_at_10 value: 4.777 - type: precision_at_20 value: 2.697 - type: precision_at_100 value: 0.674 - type: precision_at_1000 value: 0.084 - type: mrr_at_1 value: 25.1463 - type: mrr_at_3 value: 30.2709 - type: mrr_at_5 value: 31.4126 - type: mrr_at_10 value: 32.2202 - type: mrr_at_20 value: 32.6527 - type: mrr_at_100 value: 32.9822 - type: mrr_at_1000 value: 33.0527 - type: nauc_ndcg_at_1_max value: 24.082600000000003 - type: nauc_ndcg_at_1_std value: -3.9068 - type: nauc_ndcg_at_1_diff1 value: 50.1815 - type: nauc_ndcg_at_3_max value: 23.160700000000002 - type: nauc_ndcg_at_3_std value: -3.3746 - type: nauc_ndcg_at_3_diff1 value: 45.009 - type: nauc_ndcg_at_5_max value: 22.644000000000002 - type: nauc_ndcg_at_5_std value: -3.0027999999999997 - type: nauc_ndcg_at_5_diff1 value: 44.0016 - type: nauc_ndcg_at_10_max value: 22.3578 - type: nauc_ndcg_at_10_std value: -2.5096 - type: nauc_ndcg_at_10_diff1 value: 43.4367 - type: nauc_ndcg_at_20_max value: 22.0477 - type: nauc_ndcg_at_20_std value: -1.7484 - type: nauc_ndcg_at_20_diff1 value: 42.9771 - type: nauc_ndcg_at_100_max value: 21.7016 - type: nauc_ndcg_at_100_std value: -1.0854000000000001 - type: nauc_ndcg_at_100_diff1 value: 42.707 - type: nauc_ndcg_at_1000_max value: 21.988 - type: nauc_ndcg_at_1000_std value: -0.8564999999999999 - type: nauc_ndcg_at_1000_diff1 value: 43.0368 - type: nauc_map_at_1_max value: 24.082600000000003 - type: nauc_map_at_1_std value: -3.9068 - type: nauc_map_at_1_diff1 value: 50.1815 - type: nauc_map_at_3_max value: 23.418 - type: nauc_map_at_3_std value: -3.4922 - type: nauc_map_at_3_diff1 value: 46.19 - type: nauc_map_at_5_max value: 23.1203 - type: nauc_map_at_5_std value: -3.2856000000000005 - type: nauc_map_at_5_diff1 value: 45.6063 - type: nauc_map_at_10_max value: 23.0132 - type: nauc_map_at_10_std value: -3.0803000000000003 - type: nauc_map_at_10_diff1 value: 45.3708 - type: nauc_map_at_20_max value: 22.926199999999998 - type: nauc_map_at_20_std value: -2.8717 - type: nauc_map_at_20_diff1 value: 45.2482 - type: nauc_map_at_100_max value: 22.8776 - type: nauc_map_at_100_std value: -2.7819 - type: nauc_map_at_100_diff1 value: 45.2205 - type: nauc_map_at_1000_max value: 22.886 - type: nauc_map_at_1000_std value: -2.7714 - type: nauc_map_at_1000_diff1 value: 45.231300000000005 - type: nauc_recall_at_1_max value: 24.082600000000003 - type: nauc_recall_at_1_std value: -3.9068 - type: nauc_recall_at_1_diff1 value: 50.1815 - type: nauc_recall_at_3_max value: 22.442500000000003 - type: nauc_recall_at_3_std value: -3.0562 - type: nauc_recall_at_3_diff1 value: 41.797000000000004 - type: nauc_recall_at_5_max value: 21.2749 - type: nauc_recall_at_5_std value: -2.1853000000000002 - type: nauc_recall_at_5_diff1 value: 39.543 - type: nauc_recall_at_10_max value: 20.336399999999998 - type: nauc_recall_at_10_std value: -0.6941 - type: nauc_recall_at_10_diff1 value: 37.7835 - type: nauc_recall_at_20_max value: 19.031799999999997 - type: nauc_recall_at_20_std value: 2.4044 - type: nauc_recall_at_20_diff1 value: 35.6973 - type: nauc_recall_at_100_max value: 16.1657 - type: nauc_recall_at_100_std value: 7.480199999999999 - type: nauc_recall_at_100_diff1 value: 32.2845 - type: nauc_recall_at_1000_max value: 16.6175 - type: nauc_recall_at_1000_std value: 17.7626 - type: nauc_recall_at_1000_diff1 value: 29.4846 - type: nauc_precision_at_1_max value: 24.082600000000003 - type: nauc_precision_at_1_std value: -3.9068 - type: nauc_precision_at_1_diff1 value: 50.1815 - type: nauc_precision_at_3_max value: 22.442500000000003 - type: nauc_precision_at_3_std value: -3.0562 - type: nauc_precision_at_3_diff1 value: 41.797000000000004 - type: nauc_precision_at_5_max value: 21.2749 - type: nauc_precision_at_5_std value: -2.1853000000000002 - type: nauc_precision_at_5_diff1 value: 39.543 - type: nauc_precision_at_10_max value: 20.336399999999998 - type: nauc_precision_at_10_std value: -0.6941 - type: nauc_precision_at_10_diff1 value: 37.7835 - type: nauc_precision_at_20_max value: 19.031799999999997 - type: nauc_precision_at_20_std value: 2.4044 - type: nauc_precision_at_20_diff1 value: 35.6973 - type: nauc_precision_at_100_max value: 16.1657 - type: nauc_precision_at_100_std value: 7.480199999999999 - type: nauc_precision_at_100_diff1 value: 32.2845 - type: nauc_precision_at_1000_max value: 16.6175 - type: nauc_precision_at_1000_std value: 17.7626 - type: nauc_precision_at_1000_diff1 value: 29.4846 - type: nauc_mrr_at_1_max value: 23.9848 - type: nauc_mrr_at_1_std value: -3.9669000000000003 - type: nauc_mrr_at_1_diff1 value: 50.152699999999996 - type: nauc_mrr_at_3_max value: 23.3397 - type: nauc_mrr_at_3_std value: -3.5128 - type: nauc_mrr_at_3_diff1 value: 46.1227 - type: nauc_mrr_at_5_max value: 23.0454 - type: nauc_mrr_at_5_std value: -3.3141 - type: nauc_mrr_at_5_diff1 value: 45.561 - type: nauc_mrr_at_10_max value: 22.9526 - type: nauc_mrr_at_10_std value: -3.1052 - type: nauc_mrr_at_10_diff1 value: 45.3316 - type: nauc_mrr_at_20_max value: 22.8654 - type: nauc_mrr_at_20_std value: -2.8967 - type: nauc_mrr_at_20_diff1 value: 45.2089 - type: nauc_mrr_at_100_max value: 22.8164 - type: nauc_mrr_at_100_std value: -2.8074000000000003 - type: nauc_mrr_at_100_diff1 value: 45.1812 - type: nauc_mrr_at_1000_max value: 22.8248 - type: nauc_mrr_at_1000_std value: -2.7968 - type: nauc_mrr_at_1000_diff1 value: 45.191900000000004 - type: main_score value: 35.942 - task: type: Retrieval dataset: name: MTEB CodeSearchNetRetrieval (python) type: code-search-net/code_search_net config: python split: test revision: fdc6a9e39575768c27eb8a2a5f702bf846eb4759 metrics: - type: ndcg_at_1 value: 70.89999999999999 - type: ndcg_at_3 value: 80.06400000000001 - type: ndcg_at_5 value: 81.703 - type: ndcg_at_10 value: 83.12 - type: ndcg_at_20 value: 83.67999999999999 - type: ndcg_at_100 value: 84.11 - type: ndcg_at_1000 value: 84.195 - type: map_at_1 value: 70.89999999999999 - type: map_at_3 value: 77.86699999999999 - type: map_at_5 value: 78.77199999999999 - type: map_at_10 value: 79.353 - type: map_at_20 value: 79.508 - type: map_at_100 value: 79.569 - type: map_at_1000 value: 79.571 - type: recall_at_1 value: 70.89999999999999 - type: recall_at_3 value: 86.4 - type: recall_at_5 value: 90.4 - type: recall_at_10 value: 94.8 - type: recall_at_20 value: 97.0 - type: recall_at_100 value: 99.3 - type: recall_at_1000 value: 100.0 - type: precision_at_1 value: 70.89999999999999 - type: precision_at_3 value: 28.799999999999997 - type: precision_at_5 value: 18.08 - type: precision_at_10 value: 9.48 - type: precision_at_20 value: 4.8500000000000005 - type: precision_at_100 value: 0.993 - type: precision_at_1000 value: 0.1 - type: mrr_at_1 value: 70.89999999999999 - type: mrr_at_3 value: 77.8667 - type: mrr_at_5 value: 78.7717 - type: mrr_at_10 value: 79.3526 - type: mrr_at_20 value: 79.5084 - type: mrr_at_100 value: 79.5687 - type: mrr_at_1000 value: 79.5713 - type: nauc_ndcg_at_1_max value: 42.7162 - type: nauc_ndcg_at_1_std value: -4.6818 - type: nauc_ndcg_at_1_diff1 value: 70.6364 - type: nauc_ndcg_at_3_max value: 48.1282 - type: nauc_ndcg_at_3_std value: -2.8091 - type: nauc_ndcg_at_3_diff1 value: 67.9426 - type: nauc_ndcg_at_5_max value: 45.713 - type: nauc_ndcg_at_5_std value: -4.0022 - type: nauc_ndcg_at_5_diff1 value: 67.0684 - type: nauc_ndcg_at_10_max value: 45.8762 - type: nauc_ndcg_at_10_std value: -2.8594999999999997 - type: nauc_ndcg_at_10_diff1 value: 67.318 - type: nauc_ndcg_at_20_max value: 45.8448 - type: nauc_ndcg_at_20_std value: -2.9843 - type: nauc_ndcg_at_20_diff1 value: 67.5016 - type: nauc_ndcg_at_100_max value: 45.9045 - type: nauc_ndcg_at_100_std value: -3.1647000000000003 - type: nauc_ndcg_at_100_diff1 value: 67.8211 - type: nauc_ndcg_at_1000_max value: 45.7011 - type: nauc_ndcg_at_1000_std value: -3.4981 - type: nauc_ndcg_at_1000_diff1 value: 67.9137 - type: nauc_map_at_1_max value: 42.7162 - type: nauc_map_at_1_std value: -4.6818 - type: nauc_map_at_1_diff1 value: 70.6364 - type: nauc_map_at_3_max value: 46.5287 - type: nauc_map_at_3_std value: -3.6239 - type: nauc_map_at_3_diff1 value: 68.5879 - type: nauc_map_at_5_max value: 45.291599999999995 - type: nauc_map_at_5_std value: -4.2172 - type: nauc_map_at_5_diff1 value: 68.1788 - type: nauc_map_at_10_max value: 45.31 - type: nauc_map_at_10_std value: -3.8557 - type: nauc_map_at_10_diff1 value: 68.2538 - type: nauc_map_at_20_max value: 45.2841 - type: nauc_map_at_20_std value: -3.92 - type: nauc_map_at_20_diff1 value: 68.2978 - type: nauc_map_at_100_max value: 45.3154 - type: nauc_map_at_100_std value: -3.929 - type: nauc_map_at_100_diff1 value: 68.3362 - type: nauc_map_at_1000_max value: 45.3097 - type: nauc_map_at_1000_std value: -3.9364999999999997 - type: nauc_map_at_1000_diff1 value: 68.3376 - type: nauc_recall_at_1_max value: 42.7162 - type: nauc_recall_at_1_std value: -4.6818 - type: nauc_recall_at_1_diff1 value: 70.6364 - type: nauc_recall_at_3_max value: 55.0798 - type: nauc_recall_at_3_std value: 0.9014 - type: nauc_recall_at_3_diff1 value: 65.2358 - type: nauc_recall_at_5_max value: 47.4148 - type: nauc_recall_at_5_std value: -2.9387 - type: nauc_recall_at_5_diff1 value: 60.644299999999994 - type: nauc_recall_at_10_max value: 50.820600000000006 - type: nauc_recall_at_10_std value: 8.7499 - type: nauc_recall_at_10_diff1 value: 58.34049999999999 - type: nauc_recall_at_20_max value: 54.4382 - type: nauc_recall_at_20_std value: 16.0862 - type: nauc_recall_at_20_diff1 value: 55.5229 - type: nauc_recall_at_100_max value: 79.2317 - type: nauc_recall_at_100_std value: 54.095000000000006 - type: nauc_recall_at_100_diff1 value: 50.6869 - type: nauc_recall_at_1000_max value: .nan - type: nauc_recall_at_1000_std value: .nan - type: nauc_recall_at_1000_diff1 value: .nan - type: nauc_precision_at_1_max value: 42.7162 - type: nauc_precision_at_1_std value: -4.6818 - type: nauc_precision_at_1_diff1 value: 70.6364 - type: nauc_precision_at_3_max value: 55.0798 - type: nauc_precision_at_3_std value: 0.9014 - type: nauc_precision_at_3_diff1 value: 65.2358 - type: nauc_precision_at_5_max value: 47.4148 - type: nauc_precision_at_5_std value: -2.9387 - type: nauc_precision_at_5_diff1 value: 60.644299999999994 - type: nauc_precision_at_10_max value: 50.820600000000006 - type: nauc_precision_at_10_std value: 8.7499 - type: nauc_precision_at_10_diff1 value: 58.34049999999999 - type: nauc_precision_at_20_max value: 54.4382 - type: nauc_precision_at_20_std value: 16.0862 - type: nauc_precision_at_20_diff1 value: 55.5229 - type: nauc_precision_at_100_max value: 79.2317 - type: nauc_precision_at_100_std value: 54.095000000000006 - type: nauc_precision_at_100_diff1 value: 50.6869 - type: nauc_precision_at_1000_max value: .nan - type: nauc_precision_at_1000_std value: .nan - type: nauc_precision_at_1000_diff1 value: .nan - type: nauc_mrr_at_1_max value: 42.7162 - type: nauc_mrr_at_1_std value: -4.6818 - type: nauc_mrr_at_1_diff1 value: 70.6364 - type: nauc_mrr_at_3_max value: 46.5287 - type: nauc_mrr_at_3_std value: -3.6239 - type: nauc_mrr_at_3_diff1 value: 68.5879 - type: nauc_mrr_at_5_max value: 45.291599999999995 - type: nauc_mrr_at_5_std value: -4.2172 - type: nauc_mrr_at_5_diff1 value: 68.1788 - type: nauc_mrr_at_10_max value: 45.31 - type: nauc_mrr_at_10_std value: -3.8557 - type: nauc_mrr_at_10_diff1 value: 68.2538 - type: nauc_mrr_at_20_max value: 45.2841 - type: nauc_mrr_at_20_std value: -3.92 - type: nauc_mrr_at_20_diff1 value: 68.2978 - type: nauc_mrr_at_100_max value: 45.3154 - type: nauc_mrr_at_100_std value: -3.929 - type: nauc_mrr_at_100_diff1 value: 68.3362 - type: nauc_mrr_at_1000_max value: 45.3097 - type: nauc_mrr_at_1000_std value: -3.9364999999999997 - type: nauc_mrr_at_1000_diff1 value: 68.3376 - type: main_score value: 83.12 - task: type: Retrieval dataset: name: MTEB CodeSearchNetRetrieval (javascript) type: code-search-net/code_search_net config: javascript split: test revision: fdc6a9e39575768c27eb8a2a5f702bf846eb4759 metrics: - type: ndcg_at_1 value: 57.99999999999999 - type: ndcg_at_3 value: 67.24900000000001 - type: ndcg_at_5 value: 68.781 - type: ndcg_at_10 value: 70.34 - type: ndcg_at_20 value: 71.24000000000001 - type: ndcg_at_100 value: 72.617 - type: ndcg_at_1000 value: 73.436 - type: map_at_1 value: 57.99999999999999 - type: map_at_3 value: 64.983 - type: map_at_5 value: 65.838 - type: map_at_10 value: 66.50500000000001 - type: map_at_20 value: 66.74600000000001 - type: map_at_100 value: 66.93299999999999 - type: map_at_1000 value: 66.959 - type: recall_at_1 value: 57.99999999999999 - type: recall_at_3 value: 73.8 - type: recall_at_5 value: 77.5 - type: recall_at_10 value: 82.19999999999999 - type: recall_at_20 value: 85.8 - type: recall_at_100 value: 93.30000000000001 - type: recall_at_1000 value: 100.0 - type: precision_at_1 value: 57.99999999999999 - type: precision_at_3 value: 24.6 - type: precision_at_5 value: 15.5 - type: precision_at_10 value: 8.219999999999999 - type: precision_at_20 value: 4.29 - type: precision_at_100 value: 0.9329999999999999 - type: precision_at_1000 value: 0.1 - type: mrr_at_1 value: 57.99999999999999 - type: mrr_at_3 value: 64.9833 - type: mrr_at_5 value: 65.8383 - type: mrr_at_10 value: 66.50500000000001 - type: mrr_at_20 value: 66.7464 - type: mrr_at_100 value: 66.9326 - type: mrr_at_1000 value: 66.9593 - type: nauc_ndcg_at_1_max value: 51.0918 - type: nauc_ndcg_at_1_std value: 12.0501 - type: nauc_ndcg_at_1_diff1 value: 69.1716 - type: nauc_ndcg_at_3_max value: 59.404199999999996 - type: nauc_ndcg_at_3_std value: 22.4787 - type: nauc_ndcg_at_3_diff1 value: 66.2602 - type: nauc_ndcg_at_5_max value: 60.711000000000006 - type: nauc_ndcg_at_5_std value: 24.1272 - type: nauc_ndcg_at_5_diff1 value: 65.9406 - type: nauc_ndcg_at_10_max value: 61.492599999999996 - type: nauc_ndcg_at_10_std value: 26.6758 - type: nauc_ndcg_at_10_diff1 value: 66.1164 - type: nauc_ndcg_at_20_max value: 61.34610000000001 - type: nauc_ndcg_at_20_std value: 27.331 - type: nauc_ndcg_at_20_diff1 value: 66.981 - type: nauc_ndcg_at_100_max value: 60.50020000000001 - type: nauc_ndcg_at_100_std value: 26.623 - type: nauc_ndcg_at_100_diff1 value: 66.4658 - type: nauc_ndcg_at_1000_max value: 59.600500000000004 - type: nauc_ndcg_at_1000_std value: 24.3596 - type: nauc_ndcg_at_1000_diff1 value: 66.7619 - type: nauc_map_at_1_max value: 51.0918 - type: nauc_map_at_1_std value: 12.0501 - type: nauc_map_at_1_diff1 value: 69.1716 - type: nauc_map_at_3_max value: 57.2093 - type: nauc_map_at_3_std value: 19.4523 - type: nauc_map_at_3_diff1 value: 67.0065 - type: nauc_map_at_5_max value: 57.81699999999999 - type: nauc_map_at_5_std value: 20.2597 - type: nauc_map_at_5_diff1 value: 66.8577 - type: nauc_map_at_10_max value: 58.052099999999996 - type: nauc_map_at_10_std value: 21.195 - type: nauc_map_at_10_diff1 value: 66.9095 - type: nauc_map_at_20_max value: 57.9955 - type: nauc_map_at_20_std value: 21.3121 - type: nauc_map_at_20_diff1 value: 67.1257 - type: nauc_map_at_100_max value: 57.8974 - type: nauc_map_at_100_std value: 21.2576 - type: nauc_map_at_100_diff1 value: 67.0765 - type: nauc_map_at_1000_max value: 57.873799999999996 - type: nauc_map_at_1000_std value: 21.195 - type: nauc_map_at_1000_diff1 value: 67.08579999999999 - type: nauc_recall_at_1_max value: 51.0918 - type: nauc_recall_at_1_std value: 12.0501 - type: nauc_recall_at_1_diff1 value: 69.1716 - type: nauc_recall_at_3_max value: 67.0934 - type: nauc_recall_at_3_std value: 33.2241 - type: nauc_recall_at_3_diff1 value: 63.65769999999999 - type: nauc_recall_at_5_max value: 72.2191 - type: nauc_recall_at_5_std value: 39.5657 - type: nauc_recall_at_5_diff1 value: 62.3367 - type: nauc_recall_at_10_max value: 78.3358 - type: nauc_recall_at_10_std value: 54.093599999999995 - type: nauc_recall_at_10_diff1 value: 62.605900000000005 - type: nauc_recall_at_20_max value: 81.0991 - type: nauc_recall_at_20_std value: 64.9068 - type: nauc_recall_at_20_diff1 value: 67.7761 - type: nauc_recall_at_100_max value: 85.0279 - type: nauc_recall_at_100_std value: 87.47930000000001 - type: nauc_recall_at_100_diff1 value: 58.818000000000005 - type: nauc_recall_at_1000_max value: .nan - type: nauc_recall_at_1000_std value: .nan - type: nauc_recall_at_1000_diff1 value: .nan - type: nauc_precision_at_1_max value: 51.0918 - type: nauc_precision_at_1_std value: 12.0501 - type: nauc_precision_at_1_diff1 value: 69.1716 - type: nauc_precision_at_3_max value: 67.0934 - type: nauc_precision_at_3_std value: 33.2241 - type: nauc_precision_at_3_diff1 value: 63.65769999999999 - type: nauc_precision_at_5_max value: 72.2191 - type: nauc_precision_at_5_std value: 39.5657 - type: nauc_precision_at_5_diff1 value: 62.3367 - type: nauc_precision_at_10_max value: 78.3358 - type: nauc_precision_at_10_std value: 54.093599999999995 - type: nauc_precision_at_10_diff1 value: 62.605900000000005 - type: nauc_precision_at_20_max value: 81.0991 - type: nauc_precision_at_20_std value: 64.9068 - type: nauc_precision_at_20_diff1 value: 67.7761 - type: nauc_precision_at_100_max value: 85.0279 - type: nauc_precision_at_100_std value: 87.47930000000001 - type: nauc_precision_at_100_diff1 value: 58.818000000000005 - type: nauc_precision_at_1000_max value: .nan - type: nauc_precision_at_1000_std value: .nan - type: nauc_precision_at_1000_diff1 value: .nan - type: nauc_mrr_at_1_max value: 51.0918 - type: nauc_mrr_at_1_std value: 12.0501 - type: nauc_mrr_at_1_diff1 value: 69.1716 - type: nauc_mrr_at_3_max value: 57.2093 - type: nauc_mrr_at_3_std value: 19.4523 - type: nauc_mrr_at_3_diff1 value: 67.0065 - type: nauc_mrr_at_5_max value: 57.81699999999999 - type: nauc_mrr_at_5_std value: 20.2597 - type: nauc_mrr_at_5_diff1 value: 66.8577 - type: nauc_mrr_at_10_max value: 58.052099999999996 - type: nauc_mrr_at_10_std value: 21.195 - type: nauc_mrr_at_10_diff1 value: 66.9095 - type: nauc_mrr_at_20_max value: 57.9955 - type: nauc_mrr_at_20_std value: 21.3121 - type: nauc_mrr_at_20_diff1 value: 67.1257 - type: nauc_mrr_at_100_max value: 57.8974 - type: nauc_mrr_at_100_std value: 21.2576 - type: nauc_mrr_at_100_diff1 value: 67.0765 - type: nauc_mrr_at_1000_max value: 57.873799999999996 - type: nauc_mrr_at_1000_std value: 21.195 - type: nauc_mrr_at_1000_diff1 value: 67.08579999999999 - type: main_score value: 70.34 - task: type: Retrieval dataset: name: MTEB CodeSearchNetRetrieval (go) type: code-search-net/code_search_net config: go split: test revision: fdc6a9e39575768c27eb8a2a5f702bf846eb4759 metrics: - type: ndcg_at_1 value: 75.6 - type: ndcg_at_3 value: 84.112 - type: ndcg_at_5 value: 85.351 - type: ndcg_at_10 value: 86.139 - type: ndcg_at_20 value: 86.599 - type: ndcg_at_100 value: 86.971 - type: ndcg_at_1000 value: 87.086 - type: map_at_1 value: 75.6 - type: map_at_3 value: 82.1 - type: map_at_5 value: 82.78999999999999 - type: map_at_10 value: 83.122 - type: map_at_20 value: 83.25099999999999 - type: map_at_100 value: 83.30300000000001 - type: map_at_1000 value: 83.307 - type: recall_at_1 value: 75.6 - type: recall_at_3 value: 89.9 - type: recall_at_5 value: 92.9 - type: recall_at_10 value: 95.3 - type: recall_at_20 value: 97.1 - type: recall_at_100 value: 99.1 - type: recall_at_1000 value: 100.0 - type: precision_at_1 value: 75.6 - type: precision_at_3 value: 29.967 - type: precision_at_5 value: 18.58 - type: precision_at_10 value: 9.53 - type: precision_at_20 value: 4.855 - type: precision_at_100 value: 0.991 - type: precision_at_1000 value: 0.1 - type: mrr_at_1 value: 75.6 - type: mrr_at_3 value: 82.1 - type: mrr_at_5 value: 82.78999999999999 - type: mrr_at_10 value: 83.12230000000001 - type: mrr_at_20 value: 83.2511 - type: mrr_at_100 value: 83.3027 - type: mrr_at_1000 value: 83.307 - type: nauc_ndcg_at_1_max value: 50.9856 - type: nauc_ndcg_at_1_std value: 6.729 - type: nauc_ndcg_at_1_diff1 value: 75.68589999999999 - type: nauc_ndcg_at_3_max value: 59.266 - type: nauc_ndcg_at_3_std value: 10.0957 - type: nauc_ndcg_at_3_diff1 value: 73.3044 - type: nauc_ndcg_at_5_max value: 58.7545 - type: nauc_ndcg_at_5_std value: 9.295399999999999 - type: nauc_ndcg_at_5_diff1 value: 73.9355 - type: nauc_ndcg_at_10_max value: 58.7538 - type: nauc_ndcg_at_10_std value: 10.335999999999999 - type: nauc_ndcg_at_10_diff1 value: 74.01870000000001 - type: nauc_ndcg_at_20_max value: 57.9057 - type: nauc_ndcg_at_20_std value: 10.115300000000001 - type: nauc_ndcg_at_20_diff1 value: 74.456 - type: nauc_ndcg_at_100_max value: 57.198800000000006 - type: nauc_ndcg_at_100_std value: 9.2269 - type: nauc_ndcg_at_100_diff1 value: 74.2418 - type: nauc_ndcg_at_1000_max value: 57.1141 - type: nauc_ndcg_at_1000_std value: 9.366900000000001 - type: nauc_ndcg_at_1000_diff1 value: 74.3329 - type: nauc_map_at_1_max value: 50.9856 - type: nauc_map_at_1_std value: 6.729 - type: nauc_map_at_1_diff1 value: 75.68589999999999 - type: nauc_map_at_3_max value: 57.0017 - type: nauc_map_at_3_std value: 9.2059 - type: nauc_map_at_3_diff1 value: 73.9956 - type: nauc_map_at_5_max value: 56.6856 - type: nauc_map_at_5_std value: 8.8058 - type: nauc_map_at_5_diff1 value: 74.3367 - type: nauc_map_at_10_max value: 56.652100000000004 - type: nauc_map_at_10_std value: 9.1465 - type: nauc_map_at_10_diff1 value: 74.37519999999999 - type: nauc_map_at_20_max value: 56.4431 - type: nauc_map_at_20_std value: 9.0962 - type: nauc_map_at_20_diff1 value: 74.4763 - type: nauc_map_at_100_max value: 56.3572 - type: nauc_map_at_100_std value: 8.9981 - type: nauc_map_at_100_diff1 value: 74.4551 - type: nauc_map_at_1000_max value: 56.3527 - type: nauc_map_at_1000_std value: 9.0022 - type: nauc_map_at_1000_diff1 value: 74.4583 - type: nauc_recall_at_1_max value: 50.9856 - type: nauc_recall_at_1_std value: 6.729 - type: nauc_recall_at_1_diff1 value: 75.68589999999999 - type: nauc_recall_at_3_max value: 69.7291 - type: nauc_recall_at_3_std value: 14.183000000000002 - type: nauc_recall_at_3_diff1 value: 70.07900000000001 - type: nauc_recall_at_5_max value: 71.5009 - type: nauc_recall_at_5_std value: 11.9764 - type: nauc_recall_at_5_diff1 value: 71.5765 - type: nauc_recall_at_10_max value: 77.7927 - type: nauc_recall_at_10_std value: 22.2123 - type: nauc_recall_at_10_diff1 value: 71.0601 - type: nauc_recall_at_20_max value: 75.421 - type: nauc_recall_at_20_std value: 25.5385 - type: nauc_recall_at_20_diff1 value: 76.5318 - type: nauc_recall_at_100_max value: 64.4206 - type: nauc_recall_at_100_std value: -4.8864 - type: nauc_recall_at_100_diff1 value: 65.2765 - type: nauc_recall_at_1000_max value: .nan - type: nauc_recall_at_1000_std value: .nan - type: nauc_recall_at_1000_diff1 value: .nan - type: nauc_precision_at_1_max value: 50.9856 - type: nauc_precision_at_1_std value: 6.729 - type: nauc_precision_at_1_diff1 value: 75.68589999999999 - type: nauc_precision_at_3_max value: 69.7291 - type: nauc_precision_at_3_std value: 14.183000000000002 - type: nauc_precision_at_3_diff1 value: 70.07900000000001 - type: nauc_precision_at_5_max value: 71.5009 - type: nauc_precision_at_5_std value: 11.9764 - type: nauc_precision_at_5_diff1 value: 71.5765 - type: nauc_precision_at_10_max value: 77.7927 - type: nauc_precision_at_10_std value: 22.2123 - type: nauc_precision_at_10_diff1 value: 71.0601 - type: nauc_precision_at_20_max value: 75.421 - type: nauc_precision_at_20_std value: 25.5385 - type: nauc_precision_at_20_diff1 value: 76.5318 - type: nauc_precision_at_100_max value: 64.4206 - type: nauc_precision_at_100_std value: -4.8864 - type: nauc_precision_at_100_diff1 value: 65.2765 - type: nauc_precision_at_1000_max value: .nan - type: nauc_precision_at_1000_std value: .nan - type: nauc_precision_at_1000_diff1 value: .nan - type: nauc_mrr_at_1_max value: 50.9856 - type: nauc_mrr_at_1_std value: 6.729 - type: nauc_mrr_at_1_diff1 value: 75.68589999999999 - type: nauc_mrr_at_3_max value: 57.0017 - type: nauc_mrr_at_3_std value: 9.2059 - type: nauc_mrr_at_3_diff1 value: 73.9956 - type: nauc_mrr_at_5_max value: 56.6856 - type: nauc_mrr_at_5_std value: 8.8058 - type: nauc_mrr_at_5_diff1 value: 74.3367 - type: nauc_mrr_at_10_max value: 56.652100000000004 - type: nauc_mrr_at_10_std value: 9.1465 - type: nauc_mrr_at_10_diff1 value: 74.37519999999999 - type: nauc_mrr_at_20_max value: 56.4431 - type: nauc_mrr_at_20_std value: 9.0962 - type: nauc_mrr_at_20_diff1 value: 74.4763 - type: nauc_mrr_at_100_max value: 56.3572 - type: nauc_mrr_at_100_std value: 8.9981 - type: nauc_mrr_at_100_diff1 value: 74.4551 - type: nauc_mrr_at_1000_max value: 56.3527 - type: nauc_mrr_at_1000_std value: 9.0022 - type: nauc_mrr_at_1000_diff1 value: 74.4583 - type: main_score value: 86.139 - task: type: Retrieval dataset: name: MTEB CodeSearchNetRetrieval (ruby) type: code-search-net/code_search_net config: ruby split: test revision: fdc6a9e39575768c27eb8a2a5f702bf846eb4759 metrics: - type: ndcg_at_1 value: 61.3 - type: ndcg_at_3 value: 71.232 - type: ndcg_at_5 value: 73.1 - type: ndcg_at_10 value: 74.736 - type: ndcg_at_20 value: 75.511 - type: ndcg_at_100 value: 76.416 - type: ndcg_at_1000 value: 76.996 - type: map_at_1 value: 61.3 - type: map_at_3 value: 68.85 - type: map_at_5 value: 69.895 - type: map_at_10 value: 70.581 - type: map_at_20 value: 70.80199999999999 - type: map_at_100 value: 70.94200000000001 - type: map_at_1000 value: 70.961 - type: recall_at_1 value: 61.3 - type: recall_at_3 value: 78.10000000000001 - type: recall_at_5 value: 82.6 - type: recall_at_10 value: 87.6 - type: recall_at_20 value: 90.60000000000001 - type: recall_at_100 value: 95.3 - type: recall_at_1000 value: 100.0 - type: precision_at_1 value: 61.3 - type: precision_at_3 value: 26.033 - type: precision_at_5 value: 16.520000000000003 - type: precision_at_10 value: 8.76 - type: precision_at_20 value: 4.53 - type: precision_at_100 value: 0.9530000000000001 - type: precision_at_1000 value: 0.1 - type: mrr_at_1 value: 61.3 - type: mrr_at_3 value: 68.85 - type: mrr_at_5 value: 69.895 - type: mrr_at_10 value: 70.58109999999999 - type: mrr_at_20 value: 70.8024 - type: mrr_at_100 value: 70.94160000000001 - type: mrr_at_1000 value: 70.96090000000001 - type: nauc_ndcg_at_1_max value: 54.2597 - type: nauc_ndcg_at_1_std value: 9.9915 - type: nauc_ndcg_at_1_diff1 value: 72.0029 - type: nauc_ndcg_at_3_max value: 58.517799999999994 - type: nauc_ndcg_at_3_std value: 13.256599999999999 - type: nauc_ndcg_at_3_diff1 value: 67.861 - type: nauc_ndcg_at_5_max value: 59.1541 - type: nauc_ndcg_at_5_std value: 16.237099999999998 - type: nauc_ndcg_at_5_diff1 value: 67.8155 - type: nauc_ndcg_at_10_max value: 59.1703 - type: nauc_ndcg_at_10_std value: 17.8202 - type: nauc_ndcg_at_10_diff1 value: 67.6082 - type: nauc_ndcg_at_20_max value: 58.829299999999996 - type: nauc_ndcg_at_20_std value: 18.001900000000003 - type: nauc_ndcg_at_20_diff1 value: 67.6747 - type: nauc_ndcg_at_100_max value: 58.675399999999996 - type: nauc_ndcg_at_100_std value: 17.7394 - type: nauc_ndcg_at_100_diff1 value: 68.02810000000001 - type: nauc_ndcg_at_1000_max value: 58.333400000000005 - type: nauc_ndcg_at_1000_std value: 16.169900000000002 - type: nauc_ndcg_at_1000_diff1 value: 68.3788 - type: nauc_map_at_1_max value: 54.2597 - type: nauc_map_at_1_std value: 9.9915 - type: nauc_map_at_1_diff1 value: 72.0029 - type: nauc_map_at_3_max value: 57.4277 - type: nauc_map_at_3_std value: 12.1778 - type: nauc_map_at_3_diff1 value: 69.0312 - type: nauc_map_at_5_max value: 57.7291 - type: nauc_map_at_5_std value: 13.655800000000001 - type: nauc_map_at_5_diff1 value: 69.0376 - type: nauc_map_at_10_max value: 57.7091 - type: nauc_map_at_10_std value: 14.2236 - type: nauc_map_at_10_diff1 value: 68.99849999999999 - type: nauc_map_at_20_max value: 57.605700000000006 - type: nauc_map_at_20_std value: 14.2305 - type: nauc_map_at_20_diff1 value: 69.0304 - type: nauc_map_at_100_max value: 57.6007 - type: nauc_map_at_100_std value: 14.219499999999998 - type: nauc_map_at_100_diff1 value: 69.0682 - type: nauc_map_at_1000_max value: 57.5939 - type: nauc_map_at_1000_std value: 14.1793 - type: nauc_map_at_1000_diff1 value: 69.0767 - type: nauc_recall_at_1_max value: 54.2597 - type: nauc_recall_at_1_std value: 9.9915 - type: nauc_recall_at_1_diff1 value: 72.0029 - type: nauc_recall_at_3_max value: 62.5301 - type: nauc_recall_at_3_std value: 17.372799999999998 - type: nauc_recall_at_3_diff1 value: 63.488 - type: nauc_recall_at_5_max value: 65.4804 - type: nauc_recall_at_5_std value: 28.376 - type: nauc_recall_at_5_diff1 value: 62.4274 - type: nauc_recall_at_10_max value: 67.7459 - type: nauc_recall_at_10_std value: 40.8339 - type: nauc_recall_at_10_diff1 value: 59.2704 - type: nauc_recall_at_20_max value: 67.4241 - type: nauc_recall_at_20_std value: 49.1244 - type: nauc_recall_at_20_diff1 value: 57.3728 - type: nauc_recall_at_100_max value: 71.1514 - type: nauc_recall_at_100_std value: 71.35510000000001 - type: nauc_recall_at_100_diff1 value: 55.964800000000004 - type: nauc_recall_at_1000_max value: .nan - type: nauc_recall_at_1000_std value: .nan - type: nauc_recall_at_1000_diff1 value: .nan - type: nauc_precision_at_1_max value: 54.2597 - type: nauc_precision_at_1_std value: 9.9915 - type: nauc_precision_at_1_diff1 value: 72.0029 - type: nauc_precision_at_3_max value: 62.5301 - type: nauc_precision_at_3_std value: 17.372799999999998 - type: nauc_precision_at_3_diff1 value: 63.488 - type: nauc_precision_at_5_max value: 65.4804 - type: nauc_precision_at_5_std value: 28.376 - type: nauc_precision_at_5_diff1 value: 62.4274 - type: nauc_precision_at_10_max value: 67.7459 - type: nauc_precision_at_10_std value: 40.8339 - type: nauc_precision_at_10_diff1 value: 59.2704 - type: nauc_precision_at_20_max value: 67.4241 - type: nauc_precision_at_20_std value: 49.1244 - type: nauc_precision_at_20_diff1 value: 57.3728 - type: nauc_precision_at_100_max value: 71.1514 - type: nauc_precision_at_100_std value: 71.35510000000001 - type: nauc_precision_at_100_diff1 value: 55.964800000000004 - type: nauc_precision_at_1000_max value: .nan - type: nauc_precision_at_1000_std value: .nan - type: nauc_precision_at_1000_diff1 value: .nan - type: nauc_mrr_at_1_max value: 54.2597 - type: nauc_mrr_at_1_std value: 9.9915 - type: nauc_mrr_at_1_diff1 value: 72.0029 - type: nauc_mrr_at_3_max value: 57.4277 - type: nauc_mrr_at_3_std value: 12.1778 - type: nauc_mrr_at_3_diff1 value: 69.0312 - type: nauc_mrr_at_5_max value: 57.7291 - type: nauc_mrr_at_5_std value: 13.655800000000001 - type: nauc_mrr_at_5_diff1 value: 69.0376 - type: nauc_mrr_at_10_max value: 57.7091 - type: nauc_mrr_at_10_std value: 14.2236 - type: nauc_mrr_at_10_diff1 value: 68.99849999999999 - type: nauc_mrr_at_20_max value: 57.605700000000006 - type: nauc_mrr_at_20_std value: 14.2305 - type: nauc_mrr_at_20_diff1 value: 69.0304 - type: nauc_mrr_at_100_max value: 57.6007 - type: nauc_mrr_at_100_std value: 14.219499999999998 - type: nauc_mrr_at_100_diff1 value: 69.0682 - type: nauc_mrr_at_1000_max value: 57.5939 - type: nauc_mrr_at_1000_std value: 14.1793 - type: nauc_mrr_at_1000_diff1 value: 69.0767 - type: main_score value: 74.736 - task: type: Retrieval dataset: name: MTEB CodeSearchNetRetrieval (java) type: code-search-net/code_search_net config: java split: test revision: fdc6a9e39575768c27eb8a2a5f702bf846eb4759 metrics: - type: ndcg_at_1 value: 55.1 - type: ndcg_at_3 value: 66.89399999999999 - type: ndcg_at_5 value: 68.89999999999999 - type: ndcg_at_10 value: 70.89 - type: ndcg_at_20 value: 72.016 - type: ndcg_at_100 value: 73.047 - type: ndcg_at_1000 value: 73.553 - type: map_at_1 value: 55.1 - type: map_at_3 value: 64.05 - type: map_at_5 value: 65.18 - type: map_at_10 value: 66.012 - type: map_at_20 value: 66.328 - type: map_at_100 value: 66.483 - type: map_at_1000 value: 66.498 - type: recall_at_1 value: 55.1 - type: recall_at_3 value: 75.1 - type: recall_at_5 value: 79.9 - type: recall_at_10 value: 86.0 - type: recall_at_20 value: 90.4 - type: recall_at_100 value: 95.8 - type: recall_at_1000 value: 100.0 - type: precision_at_1 value: 55.1 - type: precision_at_3 value: 25.033 - type: precision_at_5 value: 15.98 - type: precision_at_10 value: 8.6 - type: precision_at_20 value: 4.52 - type: precision_at_100 value: 0.958 - type: precision_at_1000 value: 0.1 - type: mrr_at_1 value: 55.1 - type: mrr_at_3 value: 64.05 - type: mrr_at_5 value: 65.18 - type: mrr_at_10 value: 66.0123 - type: mrr_at_20 value: 66.32820000000001 - type: mrr_at_100 value: 66.4827 - type: mrr_at_1000 value: 66.49810000000001 - type: nauc_ndcg_at_1_max value: 30.206100000000003 - type: nauc_ndcg_at_1_std value: -14.6389 - type: nauc_ndcg_at_1_diff1 value: 61.8849 - type: nauc_ndcg_at_3_max value: 32.7259 - type: nauc_ndcg_at_3_std value: -11.568399999999999 - type: nauc_ndcg_at_3_diff1 value: 59.918800000000005 - type: nauc_ndcg_at_5_max value: 34.1822 - type: nauc_ndcg_at_5_std value: -8.104 - type: nauc_ndcg_at_5_diff1 value: 59.434799999999996 - type: nauc_ndcg_at_10_max value: 36.1247 - type: nauc_ndcg_at_10_std value: -6.585100000000001 - type: nauc_ndcg_at_10_diff1 value: 59.2885 - type: nauc_ndcg_at_20_max value: 35.9396 - type: nauc_ndcg_at_20_std value: -6.0885 - type: nauc_ndcg_at_20_diff1 value: 59.4417 - type: nauc_ndcg_at_100_max value: 35.951499999999996 - type: nauc_ndcg_at_100_std value: -6.1491 - type: nauc_ndcg_at_100_diff1 value: 60.3437 - type: nauc_ndcg_at_1000_max value: 34.7092 - type: nauc_ndcg_at_1000_std value: -8.0607 - type: nauc_ndcg_at_1000_diff1 value: 60.0215 - type: nauc_map_at_1_max value: 30.206100000000003 - type: nauc_map_at_1_std value: -14.6389 - type: nauc_map_at_1_diff1 value: 61.8849 - type: nauc_map_at_3_max value: 31.9303 - type: nauc_map_at_3_std value: -12.651200000000001 - type: nauc_map_at_3_diff1 value: 60.33 - type: nauc_map_at_5_max value: 32.6537 - type: nauc_map_at_5_std value: -10.8746 - type: nauc_map_at_5_diff1 value: 60.0754 - type: nauc_map_at_10_max value: 33.269 - type: nauc_map_at_10_std value: -10.4054 - type: nauc_map_at_10_diff1 value: 60.0235 - type: nauc_map_at_20_max value: 33.1875 - type: nauc_map_at_20_std value: -10.3417 - type: nauc_map_at_20_diff1 value: 60.067899999999995 - type: nauc_map_at_100_max value: 33.213 - type: nauc_map_at_100_std value: -10.3299 - type: nauc_map_at_100_diff1 value: 60.166399999999996 - type: nauc_map_at_1000_max value: 33.186 - type: nauc_map_at_1000_std value: -10.3713 - type: nauc_map_at_1000_diff1 value: 60.16010000000001 - type: nauc_recall_at_1_max value: 30.206100000000003 - type: nauc_recall_at_1_std value: -14.6389 - type: nauc_recall_at_1_diff1 value: 61.8849 - type: nauc_recall_at_3_max value: 35.7096 - type: nauc_recall_at_3_std value: -7.4548000000000005 - type: nauc_recall_at_3_diff1 value: 58.475699999999996 - type: nauc_recall_at_5_max value: 41.0231 - type: nauc_recall_at_5_std value: 4.4421 - type: nauc_recall_at_5_diff1 value: 56.7391 - type: nauc_recall_at_10_max value: 54.789 - type: nauc_recall_at_10_std value: 17.7044 - type: nauc_recall_at_10_diff1 value: 55.0592 - type: nauc_recall_at_20_max value: 60.7809 - type: nauc_recall_at_20_std value: 32.4021 - type: nauc_recall_at_20_diff1 value: 54.7663 - type: nauc_recall_at_100_max value: 89.4591 - type: nauc_recall_at_100_std value: 76.2783 - type: nauc_recall_at_100_diff1 value: 74.4576 - type: nauc_recall_at_1000_max value: .nan - type: nauc_recall_at_1000_std value: .nan - type: nauc_recall_at_1000_diff1 value: .nan - type: nauc_precision_at_1_max value: 30.206100000000003 - type: nauc_precision_at_1_std value: -14.6389 - type: nauc_precision_at_1_diff1 value: 61.8849 - type: nauc_precision_at_3_max value: 35.7096 - type: nauc_precision_at_3_std value: -7.4548000000000005 - type: nauc_precision_at_3_diff1 value: 58.475699999999996 - type: nauc_precision_at_5_max value: 41.0231 - type: nauc_precision_at_5_std value: 4.4421 - type: nauc_precision_at_5_diff1 value: 56.7391 - type: nauc_precision_at_10_max value: 54.789 - type: nauc_precision_at_10_std value: 17.7044 - type: nauc_precision_at_10_diff1 value: 55.0592 - type: nauc_precision_at_20_max value: 60.7809 - type: nauc_precision_at_20_std value: 32.4021 - type: nauc_precision_at_20_diff1 value: 54.7663 - type: nauc_precision_at_100_max value: 89.4591 - type: nauc_precision_at_100_std value: 76.2783 - type: nauc_precision_at_100_diff1 value: 74.4576 - type: nauc_precision_at_1000_max value: .nan - type: nauc_precision_at_1000_std value: .nan - type: nauc_precision_at_1000_diff1 value: .nan - type: nauc_mrr_at_1_max value: 30.206100000000003 - type: nauc_mrr_at_1_std value: -14.6389 - type: nauc_mrr_at_1_diff1 value: 61.8849 - type: nauc_mrr_at_3_max value: 31.9303 - type: nauc_mrr_at_3_std value: -12.651200000000001 - type: nauc_mrr_at_3_diff1 value: 60.33 - type: nauc_mrr_at_5_max value: 32.6537 - type: nauc_mrr_at_5_std value: -10.8746 - type: nauc_mrr_at_5_diff1 value: 60.0754 - type: nauc_mrr_at_10_max value: 33.269 - type: nauc_mrr_at_10_std value: -10.4054 - type: nauc_mrr_at_10_diff1 value: 60.0235 - type: nauc_mrr_at_20_max value: 33.1875 - type: nauc_mrr_at_20_std value: -10.3417 - type: nauc_mrr_at_20_diff1 value: 60.067899999999995 - type: nauc_mrr_at_100_max value: 33.213 - type: nauc_mrr_at_100_std value: -10.3299 - type: nauc_mrr_at_100_diff1 value: 60.166399999999996 - type: nauc_mrr_at_1000_max value: 33.186 - type: nauc_mrr_at_1000_std value: -10.3713 - type: nauc_mrr_at_1000_diff1 value: 60.16010000000001 - type: main_score value: 70.89 - task: type: Retrieval dataset: name: MTEB CodeSearchNetRetrieval (php) type: code-search-net/code_search_net config: php split: test revision: fdc6a9e39575768c27eb8a2a5f702bf846eb4759 metrics: - type: ndcg_at_1 value: 56.89999999999999 - type: ndcg_at_3 value: 69.128 - type: ndcg_at_5 value: 71.495 - type: ndcg_at_10 value: 72.92999999999999 - type: ndcg_at_20 value: 73.775 - type: ndcg_at_100 value: 74.476 - type: ndcg_at_1000 value: 75.075 - type: map_at_1 value: 56.89999999999999 - type: map_at_3 value: 66.10000000000001 - type: map_at_5 value: 67.425 - type: map_at_10 value: 68.024 - type: map_at_20 value: 68.26100000000001 - type: map_at_100 value: 68.357 - type: map_at_1000 value: 68.376 - type: recall_at_1 value: 56.89999999999999 - type: recall_at_3 value: 77.9 - type: recall_at_5 value: 83.6 - type: recall_at_10 value: 88.0 - type: recall_at_20 value: 91.3 - type: recall_at_100 value: 95.1 - type: recall_at_1000 value: 100.0 - type: precision_at_1 value: 56.89999999999999 - type: precision_at_3 value: 25.967000000000002 - type: precision_at_5 value: 16.72 - type: precision_at_10 value: 8.799999999999999 - type: precision_at_20 value: 4.565 - type: precision_at_100 value: 0.951 - type: precision_at_1000 value: 0.1 - type: mrr_at_1 value: 56.89999999999999 - type: mrr_at_3 value: 66.10000000000001 - type: mrr_at_5 value: 67.425 - type: mrr_at_10 value: 68.0238 - type: mrr_at_20 value: 68.2613 - type: mrr_at_100 value: 68.35719999999999 - type: mrr_at_1000 value: 68.3763 - type: nauc_ndcg_at_1_max value: 43.5297 - type: nauc_ndcg_at_1_std value: 7.986600000000001 - type: nauc_ndcg_at_1_diff1 value: 65.95689999999999 - type: nauc_ndcg_at_3_max value: 52.166500000000006 - type: nauc_ndcg_at_3_std value: 17.0778 - type: nauc_ndcg_at_3_diff1 value: 60.8598 - type: nauc_ndcg_at_5_max value: 53.1733 - type: nauc_ndcg_at_5_std value: 18.7316 - type: nauc_ndcg_at_5_diff1 value: 61.4908 - type: nauc_ndcg_at_10_max value: 53.6245 - type: nauc_ndcg_at_10_std value: 19.5627 - type: nauc_ndcg_at_10_diff1 value: 61.9788 - type: nauc_ndcg_at_20_max value: 53.725199999999994 - type: nauc_ndcg_at_20_std value: 20.5901 - type: nauc_ndcg_at_20_diff1 value: 62.480199999999996 - type: nauc_ndcg_at_100_max value: 53.083499999999994 - type: nauc_ndcg_at_100_std value: 19.8779 - type: nauc_ndcg_at_100_diff1 value: 62.849 - type: nauc_ndcg_at_1000_max value: 51.9568 - type: nauc_ndcg_at_1000_std value: 17.8629 - type: nauc_ndcg_at_1000_diff1 value: 62.7251 - type: nauc_map_at_1_max value: 43.5297 - type: nauc_map_at_1_std value: 7.986600000000001 - type: nauc_map_at_1_diff1 value: 65.95689999999999 - type: nauc_map_at_3_max value: 49.7136 - type: nauc_map_at_3_std value: 14.054400000000001 - type: nauc_map_at_3_diff1 value: 62.3127 - type: nauc_map_at_5_max value: 50.138400000000004 - type: nauc_map_at_5_std value: 14.7824 - type: nauc_map_at_5_diff1 value: 62.6784 - type: nauc_map_at_10_max value: 50.2613 - type: nauc_map_at_10_std value: 15.024899999999999 - type: nauc_map_at_10_diff1 value: 62.864200000000004 - type: nauc_map_at_20_max value: 50.267300000000006 - type: nauc_map_at_20_std value: 15.234300000000001 - type: nauc_map_at_20_diff1 value: 63.00130000000001 - type: nauc_map_at_100_max value: 50.1927 - type: nauc_map_at_100_std value: 15.1701 - type: nauc_map_at_100_diff1 value: 63.0549 - type: nauc_map_at_1000_max value: 50.1623 - type: nauc_map_at_1000_std value: 15.118500000000001 - type: nauc_map_at_1000_diff1 value: 63.048300000000005 - type: nauc_recall_at_1_max value: 43.5297 - type: nauc_recall_at_1_std value: 7.986600000000001 - type: nauc_recall_at_1_diff1 value: 65.95689999999999 - type: nauc_recall_at_3_max value: 61.7214 - type: nauc_recall_at_3_std value: 29.1046 - type: nauc_recall_at_3_diff1 value: 55.1971 - type: nauc_recall_at_5_max value: 68.1151 - type: nauc_recall_at_5_std value: 38.587700000000005 - type: nauc_recall_at_5_diff1 value: 55.886 - type: nauc_recall_at_10_max value: 75.3834 - type: nauc_recall_at_10_std value: 49.6516 - type: nauc_recall_at_10_diff1 value: 57.0852 - type: nauc_recall_at_20_max value: 83.7342 - type: nauc_recall_at_20_std value: 69.9947 - type: nauc_recall_at_20_diff1 value: 60.002500000000005 - type: nauc_recall_at_100_max value: 91.4204 - type: nauc_recall_at_100_std value: 89.0309 - type: nauc_recall_at_100_diff1 value: 65.7358 - type: nauc_recall_at_1000_max value: .nan - type: nauc_recall_at_1000_std value: .nan - type: nauc_recall_at_1000_diff1 value: .nan - type: nauc_precision_at_1_max value: 43.5297 - type: nauc_precision_at_1_std value: 7.986600000000001 - type: nauc_precision_at_1_diff1 value: 65.95689999999999 - type: nauc_precision_at_3_max value: 61.7214 - type: nauc_precision_at_3_std value: 29.1046 - type: nauc_precision_at_3_diff1 value: 55.1971 - type: nauc_precision_at_5_max value: 68.1151 - type: nauc_precision_at_5_std value: 38.587700000000005 - type: nauc_precision_at_5_diff1 value: 55.886 - type: nauc_precision_at_10_max value: 75.3834 - type: nauc_precision_at_10_std value: 49.6516 - type: nauc_precision_at_10_diff1 value: 57.0852 - type: nauc_precision_at_20_max value: 83.7342 - type: nauc_precision_at_20_std value: 69.9947 - type: nauc_precision_at_20_diff1 value: 60.002500000000005 - type: nauc_precision_at_100_max value: 91.4204 - type: nauc_precision_at_100_std value: 89.0309 - type: nauc_precision_at_100_diff1 value: 65.7358 - type: nauc_precision_at_1000_max value: .nan - type: nauc_precision_at_1000_std value: .nan - type: nauc_precision_at_1000_diff1 value: .nan - type: nauc_mrr_at_1_max value: 43.5297 - type: nauc_mrr_at_1_std value: 7.986600000000001 - type: nauc_mrr_at_1_diff1 value: 65.95689999999999 - type: nauc_mrr_at_3_max value: 49.7136 - type: nauc_mrr_at_3_std value: 14.054400000000001 - type: nauc_mrr_at_3_diff1 value: 62.3127 - type: nauc_mrr_at_5_max value: 50.138400000000004 - type: nauc_mrr_at_5_std value: 14.7824 - type: nauc_mrr_at_5_diff1 value: 62.6784 - type: nauc_mrr_at_10_max value: 50.2613 - type: nauc_mrr_at_10_std value: 15.024899999999999 - type: nauc_mrr_at_10_diff1 value: 62.864200000000004 - type: nauc_mrr_at_20_max value: 50.267300000000006 - type: nauc_mrr_at_20_std value: 15.234300000000001 - type: nauc_mrr_at_20_diff1 value: 63.00130000000001 - type: nauc_mrr_at_100_max value: 50.1927 - type: nauc_mrr_at_100_std value: 15.1701 - type: nauc_mrr_at_100_diff1 value: 63.0549 - type: nauc_mrr_at_1000_max value: 50.1623 - type: nauc_mrr_at_1000_std value: 15.118500000000001 - type: nauc_mrr_at_1000_diff1 value: 63.048300000000005 - type: main_score value: 72.92999999999999 - task: type: Retrieval dataset: name: MTEB CodeTransOceanContest (default) type: CoIR-Retrieval/codetrans-contest config: default split: test revision: 20da4eb20a4b17300c0986ee148c90867a7f2a4d metrics: - type: ndcg_at_1 value: 50.226000000000006 - type: ndcg_at_3 value: 55.748 - type: ndcg_at_5 value: 58.007 - type: ndcg_at_10 value: 60.831 - type: ndcg_at_20 value: 62.793 - type: ndcg_at_100 value: 64.43299999999999 - type: ndcg_at_1000 value: 65.60000000000001 - type: map_at_1 value: 50.226000000000006 - type: map_at_3 value: 54.374 - type: map_at_5 value: 55.641 - type: map_at_10 value: 56.83200000000001 - type: map_at_20 value: 57.379999999999995 - type: map_at_100 value: 57.594 - type: map_at_1000 value: 57.633 - type: recall_at_1 value: 50.226000000000006 - type: recall_at_3 value: 59.729 - type: recall_at_5 value: 65.158 - type: recall_at_10 value: 73.756 - type: recall_at_20 value: 81.448 - type: recall_at_100 value: 90.498 - type: recall_at_1000 value: 100.0 - type: precision_at_1 value: 50.226000000000006 - type: precision_at_3 value: 19.91 - type: precision_at_5 value: 13.032 - type: precision_at_10 value: 7.376 - type: precision_at_20 value: 4.072 - type: precision_at_100 value: 0.905 - type: precision_at_1000 value: 0.1 - type: mrr_at_1 value: 50.2262 - type: mrr_at_3 value: 54.374100000000006 - type: mrr_at_5 value: 55.641 - type: mrr_at_10 value: 56.8322 - type: mrr_at_20 value: 57.3798 - type: mrr_at_100 value: 57.594500000000004 - type: mrr_at_1000 value: 57.6333 - type: nauc_ndcg_at_1_max value: 57.24249999999999 - type: nauc_ndcg_at_1_std value: 3.4893 - type: nauc_ndcg_at_1_diff1 value: 74.5093 - type: nauc_ndcg_at_3_max value: 57.099 - type: nauc_ndcg_at_3_std value: 3.3562000000000003 - type: nauc_ndcg_at_3_diff1 value: 71.5239 - type: nauc_ndcg_at_5_max value: 57.5998 - type: nauc_ndcg_at_5_std value: 4.7879 - type: nauc_ndcg_at_5_diff1 value: 69.9839 - type: nauc_ndcg_at_10_max value: 56.1631 - type: nauc_ndcg_at_10_std value: 6.0869 - type: nauc_ndcg_at_10_diff1 value: 68.32939999999999 - type: nauc_ndcg_at_20_max value: 56.098800000000004 - type: nauc_ndcg_at_20_std value: 5.1246 - type: nauc_ndcg_at_20_diff1 value: 68.9858 - type: nauc_ndcg_at_100_max value: 56.788799999999995 - type: nauc_ndcg_at_100_std value: 5.6714 - type: nauc_ndcg_at_100_diff1 value: 69.3668 - type: nauc_ndcg_at_1000_max value: 56.7396 - type: nauc_ndcg_at_1000_std value: 5.0106 - type: nauc_ndcg_at_1000_diff1 value: 70.1024 - type: nauc_map_at_1_max value: 57.24249999999999 - type: nauc_map_at_1_std value: 3.4893 - type: nauc_map_at_1_diff1 value: 74.5093 - type: nauc_map_at_3_max value: 57.2832 - type: nauc_map_at_3_std value: 3.4703999999999997 - type: nauc_map_at_3_diff1 value: 72.40490000000001 - type: nauc_map_at_5_max value: 57.5445 - type: nauc_map_at_5_std value: 4.1418 - type: nauc_map_at_5_diff1 value: 71.5756 - type: nauc_map_at_10_max value: 57.0669 - type: nauc_map_at_10_std value: 4.7488 - type: nauc_map_at_10_diff1 value: 70.97869999999999 - type: nauc_map_at_20_max value: 57.08800000000001 - type: nauc_map_at_20_std value: 4.4653 - type: nauc_map_at_20_diff1 value: 71.2187 - type: nauc_map_at_100_max value: 57.1484 - type: nauc_map_at_100_std value: 4.5175 - type: nauc_map_at_100_diff1 value: 71.2734 - type: nauc_map_at_1000_max value: 57.1356 - type: nauc_map_at_1000_std value: 4.4929 - type: nauc_map_at_1000_diff1 value: 71.28710000000001 - type: nauc_recall_at_1_max value: 57.24249999999999 - type: nauc_recall_at_1_std value: 3.4893 - type: nauc_recall_at_1_diff1 value: 74.5093 - type: nauc_recall_at_3_max value: 56.469800000000006 - type: nauc_recall_at_3_std value: 2.9709 - type: nauc_recall_at_3_diff1 value: 68.7698 - type: nauc_recall_at_5_max value: 57.811 - type: nauc_recall_at_5_std value: 7.2669999999999995 - type: nauc_recall_at_5_diff1 value: 64.4325 - type: nauc_recall_at_10_max value: 51.5712 - type: nauc_recall_at_10_std value: 12.1867 - type: nauc_recall_at_10_diff1 value: 56.4929 - type: nauc_recall_at_20_max value: 49.3 - type: nauc_recall_at_20_std value: 8.371599999999999 - type: nauc_recall_at_20_diff1 value: 56.2505 - type: nauc_recall_at_100_max value: 55.7663 - type: nauc_recall_at_100_std value: 19.9214 - type: nauc_recall_at_100_diff1 value: 51.6979 - type: nauc_recall_at_1000_max value: .nan - type: nauc_recall_at_1000_std value: .nan - type: nauc_recall_at_1000_diff1 value: .nan - type: nauc_precision_at_1_max value: 57.24249999999999 - type: nauc_precision_at_1_std value: 3.4893 - type: nauc_precision_at_1_diff1 value: 74.5093 - type: nauc_precision_at_3_max value: 56.469800000000006 - type: nauc_precision_at_3_std value: 2.9709 - type: nauc_precision_at_3_diff1 value: 68.7698 - type: nauc_precision_at_5_max value: 57.811 - type: nauc_precision_at_5_std value: 7.2669999999999995 - type: nauc_precision_at_5_diff1 value: 64.4325 - type: nauc_precision_at_10_max value: 51.5712 - type: nauc_precision_at_10_std value: 12.1867 - type: nauc_precision_at_10_diff1 value: 56.4929 - type: nauc_precision_at_20_max value: 49.3 - type: nauc_precision_at_20_std value: 8.371599999999999 - type: nauc_precision_at_20_diff1 value: 56.2505 - type: nauc_precision_at_100_max value: 55.7663 - type: nauc_precision_at_100_std value: 19.9214 - type: nauc_precision_at_100_diff1 value: 51.6979 - type: nauc_precision_at_1000_max value: 100.0 - type: nauc_precision_at_1000_std value: 100.0 - type: nauc_precision_at_1000_diff1 value: 100.0 - type: nauc_mrr_at_1_max value: 57.24249999999999 - type: nauc_mrr_at_1_std value: 3.4893 - type: nauc_mrr_at_1_diff1 value: 74.5093 - type: nauc_mrr_at_3_max value: 57.2832 - type: nauc_mrr_at_3_std value: 3.4703999999999997 - type: nauc_mrr_at_3_diff1 value: 72.40490000000001 - type: nauc_mrr_at_5_max value: 57.5445 - type: nauc_mrr_at_5_std value: 4.1418 - type: nauc_mrr_at_5_diff1 value: 71.5756 - type: nauc_mrr_at_10_max value: 57.0669 - type: nauc_mrr_at_10_std value: 4.7488 - type: nauc_mrr_at_10_diff1 value: 70.97869999999999 - type: nauc_mrr_at_20_max value: 57.08800000000001 - type: nauc_mrr_at_20_std value: 4.4653 - type: nauc_mrr_at_20_diff1 value: 71.2187 - type: nauc_mrr_at_100_max value: 57.1484 - type: nauc_mrr_at_100_std value: 4.5175 - type: nauc_mrr_at_100_diff1 value: 71.2734 - type: nauc_mrr_at_1000_max value: 57.1356 - type: nauc_mrr_at_1000_std value: 4.4929 - type: nauc_mrr_at_1000_diff1 value: 71.28710000000001 - type: main_score value: 60.831 - task: type: Retrieval dataset: name: MTEB CodeTransOceanDL (default) type: CoIR-Retrieval/codetrans-dl config: default split: test revision: 281562cb8a1265ab5c0824bfa6ddcd9b0a15618f metrics: - type: ndcg_at_1 value: 8.889 - type: ndcg_at_3 value: 12.09 - type: ndcg_at_5 value: 18.355 - type: ndcg_at_10 value: 32.138 - type: ndcg_at_20 value: 38.437 - type: ndcg_at_100 value: 39.031 - type: ndcg_at_1000 value: 39.031 - type: map_at_1 value: 8.889 - type: map_at_3 value: 11.111 - type: map_at_5 value: 14.639 - type: map_at_10 value: 20.193 - type: map_at_20 value: 22.137 - type: map_at_100 value: 22.21 - type: map_at_1000 value: 22.21 - type: recall_at_1 value: 8.889 - type: recall_at_3 value: 15.0 - type: recall_at_5 value: 30.0 - type: recall_at_10 value: 73.333 - type: recall_at_20 value: 96.667 - type: recall_at_100 value: 100.0 - type: recall_at_1000 value: 100.0 - type: precision_at_1 value: 8.889 - type: precision_at_3 value: 5.0 - type: precision_at_5 value: 6.0 - type: precision_at_10 value: 7.333 - type: precision_at_20 value: 4.833 - type: precision_at_100 value: 1.0 - type: precision_at_1000 value: 0.1 - type: mrr_at_1 value: 6.1110999999999995 - type: mrr_at_3 value: 10.0 - type: mrr_at_5 value: 12.8056 - type: mrr_at_10 value: 19.164900000000003 - type: mrr_at_20 value: 20.8374 - type: mrr_at_100 value: 20.9115 - type: mrr_at_1000 value: 20.9115 - type: nauc_ndcg_at_1_max value: -40.8791 - type: nauc_ndcg_at_1_std value: -29.137 - type: nauc_ndcg_at_1_diff1 value: -25.7462 - type: nauc_ndcg_at_3_max value: -43.8611 - type: nauc_ndcg_at_3_std value: -31.619999999999997 - type: nauc_ndcg_at_3_diff1 value: -8.387799999999999 - type: nauc_ndcg_at_5_max value: -34.1018 - type: nauc_ndcg_at_5_std value: -20.9725 - type: nauc_ndcg_at_5_diff1 value: -14.6478 - type: nauc_ndcg_at_10_max value: -29.694599999999998 - type: nauc_ndcg_at_10_std value: -17.6602 - type: nauc_ndcg_at_10_diff1 value: -21.0388 - type: nauc_ndcg_at_20_max value: -42.308800000000005 - type: nauc_ndcg_at_20_std value: -20.778 - type: nauc_ndcg_at_20_diff1 value: -15.67 - type: nauc_ndcg_at_100_max value: -37.4946 - type: nauc_ndcg_at_100_std value: -22.2861 - type: nauc_ndcg_at_100_diff1 value: -16.020300000000002 - type: nauc_ndcg_at_1000_max value: -37.4946 - type: nauc_ndcg_at_1000_std value: -22.2861 - type: nauc_ndcg_at_1000_diff1 value: -16.020300000000002 - type: nauc_map_at_1_max value: -40.8791 - type: nauc_map_at_1_std value: -29.137 - type: nauc_map_at_1_diff1 value: -25.7462 - type: nauc_map_at_3_max value: -43.1058 - type: nauc_map_at_3_std value: -31.071900000000003 - type: nauc_map_at_3_diff1 value: -12.875900000000001 - type: nauc_map_at_5_max value: -36.4737 - type: nauc_map_at_5_std value: -23.8979 - type: nauc_map_at_5_diff1 value: -16.206400000000002 - type: nauc_map_at_10_max value: -34.2318 - type: nauc_map_at_10_std value: -22.0811 - type: nauc_map_at_10_diff1 value: -18.5454 - type: nauc_map_at_20_max value: -37.9204 - type: nauc_map_at_20_std value: -23.3876 - type: nauc_map_at_20_diff1 value: -16.8628 - type: nauc_map_at_100_max value: -37.401 - type: nauc_map_at_100_std value: -23.595299999999998 - type: nauc_map_at_100_diff1 value: -16.8443 - type: nauc_map_at_1000_max value: -37.401 - type: nauc_map_at_1000_std value: -23.595299999999998 - type: nauc_map_at_1000_diff1 value: -16.8443 - type: nauc_recall_at_1_max value: -40.8791 - type: nauc_recall_at_1_std value: -29.137 - type: nauc_recall_at_1_diff1 value: -25.7462 - type: nauc_recall_at_3_max value: -45.6372 - type: nauc_recall_at_3_std value: -32.8876 - type: nauc_recall_at_3_diff1 value: 2.1906 - type: nauc_recall_at_5_max value: -29.531299999999998 - type: nauc_recall_at_5_std value: -15.2907 - type: nauc_recall_at_5_diff1 value: -12.279900000000001 - type: nauc_recall_at_10_max value: -17.0981 - type: nauc_recall_at_10_std value: -5.6821 - type: nauc_recall_at_10_diff1 value: -31.382700000000003 - type: nauc_recall_at_20_max value: -164.1923 - type: nauc_recall_at_20_std value: 14.6592 - type: nauc_recall_at_20_diff1 value: -1.6729 - type: nauc_recall_at_100_max value: .nan - type: nauc_recall_at_100_std value: .nan - type: nauc_recall_at_100_diff1 value: .nan - type: nauc_recall_at_1000_max value: .nan - type: nauc_recall_at_1000_std value: .nan - type: nauc_recall_at_1000_diff1 value: .nan - type: nauc_precision_at_1_max value: -40.8791 - type: nauc_precision_at_1_std value: -29.137 - type: nauc_precision_at_1_diff1 value: -25.7462 - type: nauc_precision_at_3_max value: -45.6372 - type: nauc_precision_at_3_std value: -32.8876 - type: nauc_precision_at_3_diff1 value: 2.1906 - type: nauc_precision_at_5_max value: -29.531299999999998 - type: nauc_precision_at_5_std value: -15.2907 - type: nauc_precision_at_5_diff1 value: -12.279900000000001 - type: nauc_precision_at_10_max value: -17.0981 - type: nauc_precision_at_10_std value: -5.6821 - type: nauc_precision_at_10_diff1 value: -31.382700000000003 - type: nauc_precision_at_20_max value: -164.1923 - type: nauc_precision_at_20_std value: 14.6592 - type: nauc_precision_at_20_diff1 value: -1.6729 - type: nauc_precision_at_100_max value: 100.0 - type: nauc_precision_at_100_std value: 100.0 - type: nauc_precision_at_100_diff1 value: 100.0 - type: nauc_precision_at_1000_max value: 100.0 - type: nauc_precision_at_1000_std value: 100.0 - type: nauc_precision_at_1000_diff1 value: 100.0 - type: nauc_mrr_at_1_max value: -38.4833 - type: nauc_mrr_at_1_std value: -27.4288 - type: nauc_mrr_at_1_diff1 value: -2.3441 - type: nauc_mrr_at_3_max value: -40.2427 - type: nauc_mrr_at_3_std value: -28.479 - type: nauc_mrr_at_3_diff1 value: 14.5837 - type: nauc_mrr_at_5_max value: -32.784400000000005 - type: nauc_mrr_at_5_std value: -19.3984 - type: nauc_mrr_at_5_diff1 value: 8.2762 - type: nauc_mrr_at_10_max value: -31.999499999999998 - type: nauc_mrr_at_10_std value: -20.9878 - type: nauc_mrr_at_10_diff1 value: 9.2346 - type: nauc_mrr_at_20_max value: -36.2588 - type: nauc_mrr_at_20_std value: -21.057699999999997 - type: nauc_mrr_at_20_diff1 value: 9.4499 - type: nauc_mrr_at_100_max value: -35.6528 - type: nauc_mrr_at_100_std value: -21.288 - type: nauc_mrr_at_100_diff1 value: 9.591 - type: nauc_mrr_at_1000_max value: -35.6528 - type: nauc_mrr_at_1000_std value: -21.288 - type: nauc_mrr_at_1000_diff1 value: 9.591 - type: main_score value: 32.138 - task: type: Retrieval dataset: name: MTEB CosQA (default) type: CoIR-Retrieval/cosqa config: default split: test revision: bc5efb7e9d437246ce393ed19d772e08e4a79535 metrics: - type: ndcg_at_1 value: 14.6 - type: ndcg_at_3 value: 23.043 - type: ndcg_at_5 value: 28.551 - type: ndcg_at_10 value: 33.452 - type: ndcg_at_20 value: 37.094 - type: ndcg_at_100 value: 40.416999999999994 - type: ndcg_at_1000 value: 41.684 - type: map_at_1 value: 14.6 - type: map_at_3 value: 20.8 - type: map_at_5 value: 23.849999999999998 - type: map_at_10 value: 25.941 - type: map_at_20 value: 26.941 - type: map_at_100 value: 27.418 - type: map_at_1000 value: 27.473999999999997 - type: recall_at_1 value: 14.6 - type: recall_at_3 value: 29.599999999999998 - type: recall_at_5 value: 43.0 - type: recall_at_10 value: 57.8 - type: recall_at_20 value: 72.2 - type: recall_at_100 value: 89.8 - type: recall_at_1000 value: 99.4 - type: precision_at_1 value: 14.6 - type: precision_at_3 value: 9.866999999999999 - type: precision_at_5 value: 8.6 - type: precision_at_10 value: 5.779999999999999 - type: precision_at_20 value: 3.61 - type: precision_at_100 value: 0.898 - type: precision_at_1000 value: 0.099 - type: mrr_at_1 value: 15.4 - type: mrr_at_3 value: 21.099999999999998 - type: mrr_at_5 value: 23.380000000000003 - type: mrr_at_10 value: 25.5087 - type: mrr_at_20 value: 26.5332 - type: mrr_at_100 value: 27.0822 - type: mrr_at_1000 value: 27.1358 - type: nauc_ndcg_at_1_max value: 15.7645 - type: nauc_ndcg_at_1_std value: -8.4668 - type: nauc_ndcg_at_1_diff1 value: 38.0187 - type: nauc_ndcg_at_3_max value: 14.791799999999999 - type: nauc_ndcg_at_3_std value: -11.6736 - type: nauc_ndcg_at_3_diff1 value: 24.288899999999998 - type: nauc_ndcg_at_5_max value: 17.9426 - type: nauc_ndcg_at_5_std value: -11.1099 - type: nauc_ndcg_at_5_diff1 value: 18.8892 - type: nauc_ndcg_at_10_max value: 18.3537 - type: nauc_ndcg_at_10_std value: -9.0621 - type: nauc_ndcg_at_10_diff1 value: 17.6054 - type: nauc_ndcg_at_20_max value: 19.9156 - type: nauc_ndcg_at_20_std value: -6.926699999999999 - type: nauc_ndcg_at_20_diff1 value: 16.125 - type: nauc_ndcg_at_100_max value: 19.527900000000002 - type: nauc_ndcg_at_100_std value: -5.9748 - type: nauc_ndcg_at_100_diff1 value: 18.8697 - type: nauc_ndcg_at_1000_max value: 18.6624 - type: nauc_ndcg_at_1000_std value: -7.6636999999999995 - type: nauc_ndcg_at_1000_diff1 value: 20.2624 - type: nauc_map_at_1_max value: 15.7645 - type: nauc_map_at_1_std value: -8.4668 - type: nauc_map_at_1_diff1 value: 38.0187 - type: nauc_map_at_3_max value: 14.932200000000002 - type: nauc_map_at_3_std value: -11.2233 - type: nauc_map_at_3_diff1 value: 27.254800000000003 - type: nauc_map_at_5_max value: 16.700599999999998 - type: nauc_map_at_5_std value: -10.9701 - type: nauc_map_at_5_diff1 value: 23.9832 - type: nauc_map_at_10_max value: 16.947200000000002 - type: nauc_map_at_10_std value: -9.896099999999999 - type: nauc_map_at_10_diff1 value: 23.4428 - type: nauc_map_at_20_max value: 17.3857 - type: nauc_map_at_20_std value: -9.2728 - type: nauc_map_at_20_diff1 value: 23.1321 - type: nauc_map_at_100_max value: 17.3462 - type: nauc_map_at_100_std value: -9.2043 - type: nauc_map_at_100_diff1 value: 23.5583 - type: nauc_map_at_1000_max value: 17.3214 - type: nauc_map_at_1000_std value: -9.2627 - type: nauc_map_at_1000_diff1 value: 23.6455 - type: nauc_recall_at_1_max value: 15.7645 - type: nauc_recall_at_1_std value: -8.4668 - type: nauc_recall_at_1_diff1 value: 38.0187 - type: nauc_recall_at_3_max value: 14.4809 - type: nauc_recall_at_3_std value: -12.664700000000002 - type: nauc_recall_at_3_diff1 value: 17.275199999999998 - type: nauc_recall_at_5_max value: 21.2405 - type: nauc_recall_at_5_std value: -11.2278 - type: nauc_recall_at_5_diff1 value: 6.6622 - type: nauc_recall_at_10_max value: 22.3474 - type: nauc_recall_at_10_std value: -6.399299999999999 - type: nauc_recall_at_10_diff1 value: 2.0452000000000004 - type: nauc_recall_at_20_max value: 30.1398 - type: nauc_recall_at_20_std value: 3.3263000000000003 - type: nauc_recall_at_20_diff1 value: -9.3067 - type: nauc_recall_at_100_max value: 37.6654 - type: nauc_recall_at_100_std value: 30.699700000000004 - type: nauc_recall_at_100_diff1 value: -8.959999999999999 - type: nauc_recall_at_1000_max value: 47.3389 - type: nauc_recall_at_1000_std value: 95.6427 - type: nauc_recall_at_1000_diff1 value: -102.10079999999999 - type: nauc_precision_at_1_max value: 15.7645 - type: nauc_precision_at_1_std value: -8.4668 - type: nauc_precision_at_1_diff1 value: 38.0187 - type: nauc_precision_at_3_max value: 14.4809 - type: nauc_precision_at_3_std value: -12.664700000000002 - type: nauc_precision_at_3_diff1 value: 17.275199999999998 - type: nauc_precision_at_5_max value: 21.2405 - type: nauc_precision_at_5_std value: -11.2278 - type: nauc_precision_at_5_diff1 value: 6.6622 - type: nauc_precision_at_10_max value: 22.3474 - type: nauc_precision_at_10_std value: -6.399299999999999 - type: nauc_precision_at_10_diff1 value: 2.0452000000000004 - type: nauc_precision_at_20_max value: 30.1398 - type: nauc_precision_at_20_std value: 3.3263000000000003 - type: nauc_precision_at_20_diff1 value: -9.3067 - type: nauc_precision_at_100_max value: 37.6654 - type: nauc_precision_at_100_std value: 30.699700000000004 - type: nauc_precision_at_100_diff1 value: -8.959999999999999 - type: nauc_precision_at_1000_max value: 47.3389 - type: nauc_precision_at_1000_std value: 95.6427 - type: nauc_precision_at_1000_diff1 value: -102.10079999999999 - type: nauc_mrr_at_1_max value: 15.059800000000001 - type: nauc_mrr_at_1_std value: -17.3443 - type: nauc_mrr_at_1_diff1 value: 34.5918 - type: nauc_mrr_at_3_max value: 15.5076 - type: nauc_mrr_at_3_std value: -16.3353 - type: nauc_mrr_at_3_diff1 value: 27.414899999999996 - type: nauc_mrr_at_5_max value: 15.033299999999999 - type: nauc_mrr_at_5_std value: -16.0288 - type: nauc_mrr_at_5_diff1 value: 25.4198 - type: nauc_mrr_at_10_max value: 15.7434 - type: nauc_mrr_at_10_std value: -14.8923 - type: nauc_mrr_at_10_diff1 value: 23.6099 - type: nauc_mrr_at_20_max value: 16.2588 - type: nauc_mrr_at_20_std value: -14.5306 - type: nauc_mrr_at_20_diff1 value: 23.718700000000002 - type: nauc_mrr_at_100_max value: 16.2196 - type: nauc_mrr_at_100_std value: -14.4928 - type: nauc_mrr_at_100_diff1 value: 24.017 - type: nauc_mrr_at_1000_max value: 16.1885 - type: nauc_mrr_at_1000_std value: -14.5629 - type: nauc_mrr_at_1000_diff1 value: 24.0998 - type: main_score value: 33.452 - task: type: Retrieval dataset: name: MTEB DBPedia (default) type: mteb/dbpedia config: default split: test revision: c0f706b76e590d620bd6618b3ca8efdd34e2d659 metrics: - type: ndcg_at_1 value: 48.75 - type: ndcg_at_3 value: 40.266000000000005 - type: ndcg_at_5 value: 37.034 - type: ndcg_at_10 value: 34.565 - type: ndcg_at_20 value: 34.013 - type: ndcg_at_100 value: 39.006 - type: ndcg_at_1000 value: 46.64 - type: map_at_1 value: 7.866 - type: map_at_3 value: 12.145999999999999 - type: map_at_5 value: 13.874 - type: map_at_10 value: 16.02 - type: map_at_20 value: 18.183 - type: map_at_100 value: 21.775 - type: map_at_1000 value: 23.203 - type: recall_at_1 value: 7.866 - type: recall_at_3 value: 13.700000000000001 - type: recall_at_5 value: 16.683 - type: recall_at_10 value: 21.059 - type: recall_at_20 value: 27.045 - type: recall_at_100 value: 45.236 - type: recall_at_1000 value: 69.867 - type: precision_at_1 value: 60.5 - type: precision_at_3 value: 44.083 - type: precision_at_5 value: 35.449999999999996 - type: precision_at_10 value: 26.400000000000002 - type: precision_at_20 value: 19.75 - type: precision_at_100 value: 8.472 - type: precision_at_1000 value: 1.822 - type: mrr_at_1 value: 60.5 - type: mrr_at_3 value: 67.625 - type: mrr_at_5 value: 68.4625 - type: mrr_at_10 value: 69.4092 - type: mrr_at_20 value: 69.6644 - type: mrr_at_100 value: 69.8187 - type: mrr_at_1000 value: 69.8284 - type: nauc_ndcg_at_1_max value: 27.385199999999998 - type: nauc_ndcg_at_1_std value: 15.502199999999998 - type: nauc_ndcg_at_1_diff1 value: 40.3474 - type: nauc_ndcg_at_3_max value: 23.691100000000002 - type: nauc_ndcg_at_3_std value: 17.8766 - type: nauc_ndcg_at_3_diff1 value: 26.1322 - type: nauc_ndcg_at_5_max value: 21.908 - type: nauc_ndcg_at_5_std value: 16.5012 - type: nauc_ndcg_at_5_diff1 value: 24.9377 - type: nauc_ndcg_at_10_max value: 21.5239 - type: nauc_ndcg_at_10_std value: 15.327399999999999 - type: nauc_ndcg_at_10_diff1 value: 25.0379 - type: nauc_ndcg_at_20_max value: 18.6445 - type: nauc_ndcg_at_20_std value: 10.4816 - type: nauc_ndcg_at_20_diff1 value: 24.5885 - type: nauc_ndcg_at_100_max value: 21.7258 - type: nauc_ndcg_at_100_std value: 14.514199999999999 - type: nauc_ndcg_at_100_diff1 value: 21.6285 - type: nauc_ndcg_at_1000_max value: 25.515 - type: nauc_ndcg_at_1000_std value: 23.278499999999998 - type: nauc_ndcg_at_1000_diff1 value: 21.3373 - type: nauc_map_at_1_max value: 2.911 - type: nauc_map_at_1_std value: -23.3734 - type: nauc_map_at_1_diff1 value: 31.251099999999997 - type: nauc_map_at_3_max value: 6.7765 - type: nauc_map_at_3_std value: -21.1466 - type: nauc_map_at_3_diff1 value: 26.6096 - type: nauc_map_at_5_max value: 7.2574 - type: nauc_map_at_5_std value: -18.0369 - type: nauc_map_at_5_diff1 value: 24.0648 - type: nauc_map_at_10_max value: 11.669699999999999 - type: nauc_map_at_10_std value: -10.5142 - type: nauc_map_at_10_diff1 value: 23.289099999999998 - type: nauc_map_at_20_max value: 13.9376 - type: nauc_map_at_20_std value: -4.1179 - type: nauc_map_at_20_diff1 value: 22.9493 - type: nauc_map_at_100_max value: 18.756600000000002 - type: nauc_map_at_100_std value: 7.5601 - type: nauc_map_at_100_diff1 value: 21.1962 - type: nauc_map_at_1000_max value: 20.4084 - type: nauc_map_at_1000_std value: 10.7807 - type: nauc_map_at_1000_diff1 value: 21.6074 - type: nauc_recall_at_1_max value: 2.911 - type: nauc_recall_at_1_std value: -23.3734 - type: nauc_recall_at_1_diff1 value: 31.251099999999997 - type: nauc_recall_at_3_max value: 5.9628 - type: nauc_recall_at_3_std value: -21.7657 - type: nauc_recall_at_3_diff1 value: 22.1779 - type: nauc_recall_at_5_max value: 4.2336 - type: nauc_recall_at_5_std value: -19.872 - type: nauc_recall_at_5_diff1 value: 17.4799 - type: nauc_recall_at_10_max value: 9.376900000000001 - type: nauc_recall_at_10_std value: -12.3596 - type: nauc_recall_at_10_diff1 value: 15.801100000000002 - type: nauc_recall_at_20_max value: 11.2098 - type: nauc_recall_at_20_std value: -6.471699999999999 - type: nauc_recall_at_20_diff1 value: 15.1155 - type: nauc_recall_at_100_max value: 16.7433 - type: nauc_recall_at_100_std value: 12.2849 - type: nauc_recall_at_100_diff1 value: 6.908499999999999 - type: nauc_recall_at_1000_max value: 18.6941 - type: nauc_recall_at_1000_std value: 25.2521 - type: nauc_recall_at_1000_diff1 value: 1.0488000000000002 - type: nauc_precision_at_1_max value: 39.5387 - type: nauc_precision_at_1_std value: 23.244600000000002 - type: nauc_precision_at_1_diff1 value: 50.275499999999994 - type: nauc_precision_at_3_max value: 32.3641 - type: nauc_precision_at_3_std value: 34.4136 - type: nauc_precision_at_3_diff1 value: 17.316200000000002 - type: nauc_precision_at_5_max value: 29.9613 - type: nauc_precision_at_5_std value: 39.3271 - type: nauc_precision_at_5_diff1 value: 13.352 - type: nauc_precision_at_10_max value: 29.5821 - type: nauc_precision_at_10_std value: 48.0976 - type: nauc_precision_at_10_diff1 value: 9.610000000000001 - type: nauc_precision_at_20_max value: 25.5555 - type: nauc_precision_at_20_std value: 49.3622 - type: nauc_precision_at_20_diff1 value: 8.0656 - type: nauc_precision_at_100_max value: 24.3874 - type: nauc_precision_at_100_std value: 49.613600000000005 - type: nauc_precision_at_100_diff1 value: 4.1512 - type: nauc_precision_at_1000_max value: 16.0014 - type: nauc_precision_at_1000_std value: 28.3243 - type: nauc_precision_at_1000_diff1 value: 11.5068 - type: nauc_mrr_at_1_max value: 39.5387 - type: nauc_mrr_at_1_std value: 23.244600000000002 - type: nauc_mrr_at_1_diff1 value: 50.275499999999994 - type: nauc_mrr_at_3_max value: 44.3328 - type: nauc_mrr_at_3_std value: 29.595900000000004 - type: nauc_mrr_at_3_diff1 value: 47.0929 - type: nauc_mrr_at_5_max value: 43.6678 - type: nauc_mrr_at_5_std value: 29.219299999999997 - type: nauc_mrr_at_5_diff1 value: 47.7731 - type: nauc_mrr_at_10_max value: 43.1409 - type: nauc_mrr_at_10_std value: 29.5283 - type: nauc_mrr_at_10_diff1 value: 47.7777 - type: nauc_mrr_at_20_max value: 43.2155 - type: nauc_mrr_at_20_std value: 29.378999999999998 - type: nauc_mrr_at_20_diff1 value: 47.826800000000006 - type: nauc_mrr_at_100_max value: 43.2448 - type: nauc_mrr_at_100_std value: 29.385 - type: nauc_mrr_at_100_diff1 value: 47.7931 - type: nauc_mrr_at_1000_max value: 43.2316 - type: nauc_mrr_at_1000_std value: 29.3645 - type: nauc_mrr_at_1000_diff1 value: 47.7958 - type: main_score value: 34.565 - task: type: Classification dataset: name: MTEB EmotionClassification (default) type: mteb/emotion config: default split: test revision: 4f58c6b202a23cf9a4da393831edf4f9183cad37 metrics: - type: accuracy value: 36.449999999999996 - type: f1 value: 32.3042 - type: f1_weighted value: 38.7818 - type: main_score value: 36.449999999999996 - task: type: Retrieval dataset: name: MTEB FEVER (default) type: mteb/fever config: default split: test revision: bea83ef9e8fb933d90a2f1d5515737465d613e12 metrics: - type: ndcg_at_1 value: 77.93299999999999 - type: ndcg_at_3 value: 83.146 - type: ndcg_at_5 value: 84.188 - type: ndcg_at_10 value: 84.932 - type: ndcg_at_20 value: 85.187 - type: ndcg_at_100 value: 85.452 - type: ndcg_at_1000 value: 85.68599999999999 - type: map_at_1 value: 72.173 - type: map_at_3 value: 79.618 - type: map_at_5 value: 80.32000000000001 - type: map_at_10 value: 80.674 - type: map_at_20 value: 80.762 - type: map_at_100 value: 80.81 - type: map_at_1000 value: 80.822 - type: recall_at_1 value: 72.173 - type: recall_at_3 value: 87.804 - type: recall_at_5 value: 90.556 - type: recall_at_10 value: 92.869 - type: recall_at_20 value: 93.768 - type: recall_at_100 value: 95.00699999999999 - type: recall_at_1000 value: 96.504 - type: precision_at_1 value: 77.93299999999999 - type: precision_at_3 value: 31.828 - type: precision_at_5 value: 19.727 - type: precision_at_10 value: 10.135 - type: precision_at_20 value: 5.136 - type: precision_at_100 value: 1.049 - type: precision_at_1000 value: 0.109 - type: mrr_at_1 value: 77.9328 - type: mrr_at_3 value: 85.221 - type: mrr_at_5 value: 85.8076 - type: mrr_at_10 value: 86.0963 - type: mrr_at_20 value: 86.1448 - type: mrr_at_100 value: 86.1622 - type: mrr_at_1000 value: 86.1631 - type: nauc_ndcg_at_1_max value: 27.804499999999997 - type: nauc_ndcg_at_1_std value: -31.1045 - type: nauc_ndcg_at_1_diff1 value: 66.6633 - type: nauc_ndcg_at_3_max value: 21.6576 - type: nauc_ndcg_at_3_std value: -24.3372 - type: nauc_ndcg_at_3_diff1 value: 48.9088 - type: nauc_ndcg_at_5_max value: 20.612 - type: nauc_ndcg_at_5_std value: -23.8007 - type: nauc_ndcg_at_5_diff1 value: 48.0635 - type: nauc_ndcg_at_10_max value: 19.6463 - type: nauc_ndcg_at_10_std value: -22.5941 - type: nauc_ndcg_at_10_diff1 value: 47.5561 - type: nauc_ndcg_at_20_max value: 19.5443 - type: nauc_ndcg_at_20_std value: -21.998 - type: nauc_ndcg_at_20_diff1 value: 47.664699999999996 - type: nauc_ndcg_at_100_max value: 19.2285 - type: nauc_ndcg_at_100_std value: -21.6826 - type: nauc_ndcg_at_100_diff1 value: 47.897099999999995 - type: nauc_ndcg_at_1000_max value: 19.5578 - type: nauc_ndcg_at_1000_std value: -21.9412 - type: nauc_ndcg_at_1000_diff1 value: 48.361 - type: nauc_map_at_1_max value: 20.3735 - type: nauc_map_at_1_std value: -24.7274 - type: nauc_map_at_1_diff1 value: 54.148399999999995 - type: nauc_map_at_3_max value: 19.3166 - type: nauc_map_at_3_std value: -23.171 - type: nauc_map_at_3_diff1 value: 48.254000000000005 - type: nauc_map_at_5_max value: 19.158900000000003 - type: nauc_map_at_5_std value: -22.966900000000003 - type: nauc_map_at_5_diff1 value: 48.0877 - type: nauc_map_at_10_max value: 18.8745 - type: nauc_map_at_10_std value: -22.5913 - type: nauc_map_at_10_diff1 value: 47.957899999999995 - type: nauc_map_at_20_max value: 18.895200000000003 - type: nauc_map_at_20_std value: -22.4542 - type: nauc_map_at_20_diff1 value: 48.0047 - type: nauc_map_at_100_max value: 18.8722 - type: nauc_map_at_100_std value: -22.3984 - type: nauc_map_at_100_diff1 value: 48.0394 - type: nauc_map_at_1000_max value: 18.8824 - type: nauc_map_at_1000_std value: -22.4034 - type: nauc_map_at_1000_diff1 value: 48.0533 - type: nauc_recall_at_1_max value: 20.3735 - type: nauc_recall_at_1_std value: -24.7274 - type: nauc_recall_at_1_diff1 value: 54.148399999999995 - type: nauc_recall_at_3_max value: 15.2387 - type: nauc_recall_at_3_std value: -17.3947 - type: nauc_recall_at_3_diff1 value: 30.6589 - type: nauc_recall_at_5_max value: 11.4037 - type: nauc_recall_at_5_std value: -14.3603 - type: nauc_recall_at_5_diff1 value: 23.7356 - type: nauc_recall_at_10_max value: 3.8233 - type: nauc_recall_at_10_std value: -4.6399 - type: nauc_recall_at_10_diff1 value: 13.8514 - type: nauc_recall_at_20_max value: 0.3939 - type: nauc_recall_at_20_std value: 2.4212000000000002 - type: nauc_recall_at_20_diff1 value: 10.110800000000001 - type: nauc_recall_at_100_max value: -8.9768 - type: nauc_recall_at_100_std value: 11.2598 - type: nauc_recall_at_100_diff1 value: 4.6753 - type: nauc_recall_at_1000_max value: -13.494800000000001 - type: nauc_recall_at_1000_std value: 17.2306 - type: nauc_recall_at_1000_diff1 value: 0.0856 - type: nauc_precision_at_1_max value: 27.804499999999997 - type: nauc_precision_at_1_std value: -31.1045 - type: nauc_precision_at_1_diff1 value: 66.6633 - type: nauc_precision_at_3_max value: 25.660899999999998 - type: nauc_precision_at_3_std value: -22.0243 - type: nauc_precision_at_3_diff1 value: 34.5966 - type: nauc_precision_at_5_max value: 22.4777 - type: nauc_precision_at_5_std value: -14.9469 - type: nauc_precision_at_5_diff1 value: 20.9233 - type: nauc_precision_at_10_max value: 13.7882 - type: nauc_precision_at_10_std value: -0.1941 - type: nauc_precision_at_10_diff1 value: 2.5737 - type: nauc_precision_at_20_max value: 10.422099999999999 - type: nauc_precision_at_20_std value: 8.518 - type: nauc_precision_at_20_diff1 value: -4.2715000000000005 - type: nauc_precision_at_100_max value: 3.8884000000000003 - type: nauc_precision_at_100_std value: 14.529800000000002 - type: nauc_precision_at_100_diff1 value: -10.066 - type: nauc_precision_at_1000_max value: 5.5056 - type: nauc_precision_at_1000_std value: 10.3948 - type: nauc_precision_at_1000_diff1 value: -9.5234 - type: nauc_mrr_at_1_max value: 27.804499999999997 - type: nauc_mrr_at_1_std value: -31.1045 - type: nauc_mrr_at_1_diff1 value: 66.6633 - type: nauc_mrr_at_3_max value: 30.593500000000002 - type: nauc_mrr_at_3_std value: -31.844499999999996 - type: nauc_mrr_at_3_diff1 value: 63.571 - type: nauc_mrr_at_5_max value: 30.544700000000002 - type: nauc_mrr_at_5_std value: -32.0369 - type: nauc_mrr_at_5_diff1 value: 63.8464 - type: nauc_mrr_at_10_max value: 30.459000000000003 - type: nauc_mrr_at_10_std value: -31.799500000000002 - type: nauc_mrr_at_10_diff1 value: 64.0984 - type: nauc_mrr_at_20_max value: 30.3871 - type: nauc_mrr_at_20_std value: -31.6429 - type: nauc_mrr_at_20_diff1 value: 64.1444 - type: nauc_mrr_at_100_max value: 30.324099999999998 - type: nauc_mrr_at_100_std value: -31.629800000000003 - type: nauc_mrr_at_100_diff1 value: 64.163 - type: nauc_mrr_at_1000_max value: 30.3201 - type: nauc_mrr_at_1000_std value: -31.6352 - type: nauc_mrr_at_1000_diff1 value: 64.1637 - type: main_score value: 84.932 - task: type: Retrieval dataset: name: MTEB FiQA2018 (default) type: mteb/fiqa config: default split: test revision: 27a168819829fe9bcd655c2df245fb19452e8e06 metrics: - type: ndcg_at_1 value: 34.259 - type: ndcg_at_3 value: 32.14 - type: ndcg_at_5 value: 33.391 - type: ndcg_at_10 value: 35.663 - type: ndcg_at_20 value: 38.193 - type: ndcg_at_100 value: 42.232 - type: ndcg_at_1000 value: 45.595 - type: map_at_1 value: 17.124 - type: map_at_3 value: 24.359 - type: map_at_5 value: 26.532 - type: map_at_10 value: 28.183000000000003 - type: map_at_20 value: 29.119 - type: map_at_100 value: 29.881 - type: map_at_1000 value: 30.070000000000004 - type: recall_at_1 value: 17.124 - type: recall_at_3 value: 29.488999999999997 - type: recall_at_5 value: 35.436 - type: recall_at_10 value: 42.665 - type: recall_at_20 value: 50.381 - type: recall_at_100 value: 67.364 - type: recall_at_1000 value: 87.315 - type: precision_at_1 value: 34.259 - type: precision_at_3 value: 21.399 - type: precision_at_5 value: 15.926000000000002 - type: precision_at_10 value: 9.907 - type: precision_at_20 value: 6.026 - type: precision_at_100 value: 1.637 - type: precision_at_1000 value: 0.22599999999999998 - type: mrr_at_1 value: 34.259299999999996 - type: mrr_at_3 value: 40.7922 - type: mrr_at_5 value: 42.1811 - type: mrr_at_10 value: 43.1663 - type: mrr_at_20 value: 43.684400000000004 - type: mrr_at_100 value: 44.079 - type: mrr_at_1000 value: 44.1277 - type: nauc_ndcg_at_1_max value: 45.5993 - type: nauc_ndcg_at_1_std value: 4.2730999999999995 - type: nauc_ndcg_at_1_diff1 value: 51.0941 - type: nauc_ndcg_at_3_max value: 38.6082 - type: nauc_ndcg_at_3_std value: 1.7973 - type: nauc_ndcg_at_3_diff1 value: 41.556599999999996 - type: nauc_ndcg_at_5_max value: 37.0326 - type: nauc_ndcg_at_5_std value: 3.5555000000000003 - type: nauc_ndcg_at_5_diff1 value: 41.166599999999995 - type: nauc_ndcg_at_10_max value: 36.8257 - type: nauc_ndcg_at_10_std value: 4.6765 - type: nauc_ndcg_at_10_diff1 value: 40.7039 - type: nauc_ndcg_at_20_max value: 37.9542 - type: nauc_ndcg_at_20_std value: 6.2273000000000005 - type: nauc_ndcg_at_20_diff1 value: 40.7126 - type: nauc_ndcg_at_100_max value: 40.029399999999995 - type: nauc_ndcg_at_100_std value: 8.8925 - type: nauc_ndcg_at_100_diff1 value: 40.8749 - type: nauc_ndcg_at_1000_max value: 41.0995 - type: nauc_ndcg_at_1000_std value: 9.055399999999999 - type: nauc_ndcg_at_1000_diff1 value: 42.0999 - type: nauc_map_at_1_max value: 29.1034 - type: nauc_map_at_1_std value: -1.3329 - type: nauc_map_at_1_diff1 value: 49.6713 - type: nauc_map_at_3_max value: 31.2555 - type: nauc_map_at_3_std value: -1.2727 - type: nauc_map_at_3_diff1 value: 42.8671 - type: nauc_map_at_5_max value: 32.7495 - type: nauc_map_at_5_std value: 0.4463 - type: nauc_map_at_5_diff1 value: 42.3138 - type: nauc_map_at_10_max value: 34.0564 - type: nauc_map_at_10_std value: 1.8785 - type: nauc_map_at_10_diff1 value: 41.9711 - type: nauc_map_at_20_max value: 34.7449 - type: nauc_map_at_20_std value: 2.6273 - type: nauc_map_at_20_diff1 value: 41.9563 - type: nauc_map_at_100_max value: 35.3724 - type: nauc_map_at_100_std value: 3.1910000000000003 - type: nauc_map_at_100_diff1 value: 41.990899999999996 - type: nauc_map_at_1000_max value: 35.4782 - type: nauc_map_at_1000_std value: 3.2302999999999997 - type: nauc_map_at_1000_diff1 value: 42.0484 - type: nauc_recall_at_1_max value: 29.1034 - type: nauc_recall_at_1_std value: -1.3329 - type: nauc_recall_at_1_diff1 value: 49.6713 - type: nauc_recall_at_3_max value: 28.3729 - type: nauc_recall_at_3_std value: 0.0225 - type: nauc_recall_at_3_diff1 value: 35.2655 - type: nauc_recall_at_5_max value: 28.0157 - type: nauc_recall_at_5_std value: 3.5967 - type: nauc_recall_at_5_diff1 value: 31.5507 - type: nauc_recall_at_10_max value: 28.0271 - type: nauc_recall_at_10_std value: 6.7875000000000005 - type: nauc_recall_at_10_diff1 value: 28.3267 - type: nauc_recall_at_20_max value: 30.2764 - type: nauc_recall_at_20_std value: 11.2697 - type: nauc_recall_at_20_diff1 value: 27.5277 - type: nauc_recall_at_100_max value: 33.2215 - type: nauc_recall_at_100_std value: 23.6362 - type: nauc_recall_at_100_diff1 value: 23.1851 - type: nauc_recall_at_1000_max value: 41.8199 - type: nauc_recall_at_1000_std value: 42.2866 - type: nauc_recall_at_1000_diff1 value: 29.341099999999997 - type: nauc_precision_at_1_max value: 45.5993 - type: nauc_precision_at_1_std value: 4.2730999999999995 - type: nauc_precision_at_1_diff1 value: 51.0941 - type: nauc_precision_at_3_max value: 40.541 - type: nauc_precision_at_3_std value: 3.6046 - type: nauc_precision_at_3_diff1 value: 29.2879 - type: nauc_precision_at_5_max value: 40.4116 - type: nauc_precision_at_5_std value: 9.523 - type: nauc_precision_at_5_diff1 value: 24.9572 - type: nauc_precision_at_10_max value: 39.7377 - type: nauc_precision_at_10_std value: 11.8076 - type: nauc_precision_at_10_diff1 value: 21.1979 - type: nauc_precision_at_20_max value: 40.1851 - type: nauc_precision_at_20_std value: 14.967 - type: nauc_precision_at_20_diff1 value: 19.0881 - type: nauc_precision_at_100_max value: 39.4474 - type: nauc_precision_at_100_std value: 19.6785 - type: nauc_precision_at_100_diff1 value: 12.6951 - type: nauc_precision_at_1000_max value: 32.071600000000004 - type: nauc_precision_at_1000_std value: 14.7899 - type: nauc_precision_at_1000_diff1 value: 7.456599999999999 - type: nauc_mrr_at_1_max value: 45.5993 - type: nauc_mrr_at_1_std value: 4.2730999999999995 - type: nauc_mrr_at_1_diff1 value: 51.0941 - type: nauc_mrr_at_3_max value: 45.5586 - type: nauc_mrr_at_3_std value: 5.6932 - type: nauc_mrr_at_3_diff1 value: 47.1359 - type: nauc_mrr_at_5_max value: 45.0408 - type: nauc_mrr_at_5_std value: 6.4838000000000005 - type: nauc_mrr_at_5_diff1 value: 46.4912 - type: nauc_mrr_at_10_max value: 44.9499 - type: nauc_mrr_at_10_std value: 6.6139 - type: nauc_mrr_at_10_diff1 value: 46.332699999999996 - type: nauc_mrr_at_20_max value: 45.063900000000004 - type: nauc_mrr_at_20_std value: 6.6114999999999995 - type: nauc_mrr_at_20_diff1 value: 46.3181 - type: nauc_mrr_at_100_max value: 45.2249 - type: nauc_mrr_at_100_std value: 6.8897 - type: nauc_mrr_at_100_diff1 value: 46.373799999999996 - type: nauc_mrr_at_1000_max value: 45.2235 - type: nauc_mrr_at_1000_std value: 6.8732 - type: nauc_mrr_at_1000_diff1 value: 46.399699999999996 - type: main_score value: 35.663 - task: type: Retrieval dataset: name: MTEB HotpotQA (default) type: mteb/hotpotqa config: default split: test revision: ab518f4d6fcca38d87c25209f94beba119d02014 metrics: - type: ndcg_at_1 value: 75.908 - type: ndcg_at_3 value: 57.643 - type: ndcg_at_5 value: 59.689 - type: ndcg_at_10 value: 61.513 - type: ndcg_at_20 value: 62.721000000000004 - type: ndcg_at_100 value: 64.57000000000001 - type: ndcg_at_1000 value: 65.981 - type: map_at_1 value: 37.954 - type: map_at_3 value: 49.424 - type: map_at_5 value: 50.99399999999999 - type: map_at_10 value: 52.066 - type: map_at_20 value: 52.54600000000001 - type: map_at_100 value: 52.910000000000004 - type: map_at_1000 value: 52.981 - type: recall_at_1 value: 37.954 - type: recall_at_3 value: 53.201 - type: recall_at_5 value: 57.232000000000006 - type: recall_at_10 value: 61.82299999999999 - type: recall_at_20 value: 65.692 - type: recall_at_100 value: 73.896 - type: recall_at_1000 value: 83.255 - type: precision_at_1 value: 75.908 - type: precision_at_3 value: 35.467 - type: precision_at_5 value: 22.893 - type: precision_at_10 value: 12.365 - type: precision_at_20 value: 6.569 - type: precision_at_100 value: 1.478 - type: precision_at_1000 value: 0.167 - type: mrr_at_1 value: 75.90820000000001 - type: mrr_at_3 value: 80.5717 - type: mrr_at_5 value: 81.15299999999999 - type: mrr_at_10 value: 81.4709 - type: mrr_at_20 value: 81.6082 - type: mrr_at_100 value: 81.69239999999999 - type: mrr_at_1000 value: 81.7034 - type: nauc_ndcg_at_1_max value: 53.456199999999995 - type: nauc_ndcg_at_1_std value: -7.1338 - type: nauc_ndcg_at_1_diff1 value: 72.2296 - type: nauc_ndcg_at_3_max value: 30.760199999999998 - type: nauc_ndcg_at_3_std value: -3.1088999999999998 - type: nauc_ndcg_at_3_diff1 value: 29.957099999999997 - type: nauc_ndcg_at_5_max value: 29.404000000000003 - type: nauc_ndcg_at_5_std value: -1.8713 - type: nauc_ndcg_at_5_diff1 value: 27.3461 - type: nauc_ndcg_at_10_max value: 28.0841 - type: nauc_ndcg_at_10_std value: -0.8572 - type: nauc_ndcg_at_10_diff1 value: 25.1934 - type: nauc_ndcg_at_20_max value: 27.581099999999996 - type: nauc_ndcg_at_20_std value: -0.1989 - type: nauc_ndcg_at_20_diff1 value: 24.3724 - type: nauc_ndcg_at_100_max value: 27.0287 - type: nauc_ndcg_at_100_std value: 0.7972 - type: nauc_ndcg_at_100_diff1 value: 23.6936 - type: nauc_ndcg_at_1000_max value: 27.070800000000002 - type: nauc_ndcg_at_1000_std value: 0.8108000000000001 - type: nauc_ndcg_at_1000_diff1 value: 24.0546 - type: nauc_map_at_1_max value: 53.456199999999995 - type: nauc_map_at_1_std value: -7.1338 - type: nauc_map_at_1_diff1 value: 72.2296 - type: nauc_map_at_3_max value: 26.085199999999997 - type: nauc_map_at_3_std value: -3.3792999999999997 - type: nauc_map_at_3_diff1 value: 23.335900000000002 - type: nauc_map_at_5_max value: 25.2911 - type: nauc_map_at_5_std value: -2.6356 - type: nauc_map_at_5_diff1 value: 21.7569 - type: nauc_map_at_10_max value: 24.5926 - type: nauc_map_at_10_std value: -2.1178 - type: nauc_map_at_10_diff1 value: 20.6735 - type: nauc_map_at_20_max value: 24.479400000000002 - type: nauc_map_at_20_std value: -1.8454000000000002 - type: nauc_map_at_20_diff1 value: 20.4617 - type: nauc_map_at_100_max value: 24.390600000000003 - type: nauc_map_at_100_std value: -1.6625999999999999 - type: nauc_map_at_100_diff1 value: 20.3774 - type: nauc_map_at_1000_max value: 24.387900000000002 - type: nauc_map_at_1000_std value: -1.6534 - type: nauc_map_at_1000_diff1 value: 20.3887 - type: nauc_recall_at_1_max value: 53.456199999999995 - type: nauc_recall_at_1_std value: -7.1338 - type: nauc_recall_at_1_diff1 value: 72.2296 - type: nauc_recall_at_3_max value: 22.2324 - type: nauc_recall_at_3_std value: -1.4433 - type: nauc_recall_at_3_diff1 value: 14.944799999999999 - type: nauc_recall_at_5_max value: 19.1126 - type: nauc_recall_at_5_std value: 0.9252 - type: nauc_recall_at_5_diff1 value: 9.6723 - type: nauc_recall_at_10_max value: 15.4048 - type: nauc_recall_at_10_std value: 3.3196000000000003 - type: nauc_recall_at_10_diff1 value: 4.2059 - type: nauc_recall_at_20_max value: 12.7643 - type: nauc_recall_at_20_std value: 5.431699999999999 - type: nauc_recall_at_20_diff1 value: 0.46880000000000005 - type: nauc_recall_at_100_max value: 7.538 - type: nauc_recall_at_100_std value: 10.5696 - type: nauc_recall_at_100_diff1 value: -6.472300000000001 - type: nauc_recall_at_1000_max value: 1.7873 - type: nauc_recall_at_1000_std value: 13.6112 - type: nauc_recall_at_1000_diff1 value: -13.081000000000001 - type: nauc_precision_at_1_max value: 53.456199999999995 - type: nauc_precision_at_1_std value: -7.1338 - type: nauc_precision_at_1_diff1 value: 72.2296 - type: nauc_precision_at_3_max value: 22.2324 - type: nauc_precision_at_3_std value: -1.4433 - type: nauc_precision_at_3_diff1 value: 14.944799999999999 - type: nauc_precision_at_5_max value: 19.1126 - type: nauc_precision_at_5_std value: 0.9252 - type: nauc_precision_at_5_diff1 value: 9.6723 - type: nauc_precision_at_10_max value: 15.4048 - type: nauc_precision_at_10_std value: 3.3196000000000003 - type: nauc_precision_at_10_diff1 value: 4.2059 - type: nauc_precision_at_20_max value: 12.7643 - type: nauc_precision_at_20_std value: 5.431699999999999 - type: nauc_precision_at_20_diff1 value: 0.46880000000000005 - type: nauc_precision_at_100_max value: 7.538 - type: nauc_precision_at_100_std value: 10.5696 - type: nauc_precision_at_100_diff1 value: -6.472300000000001 - type: nauc_precision_at_1000_max value: 1.7873 - type: nauc_precision_at_1000_std value: 13.6112 - type: nauc_precision_at_1000_diff1 value: -13.081000000000001 - type: nauc_mrr_at_1_max value: 53.456199999999995 - type: nauc_mrr_at_1_std value: -7.1338 - type: nauc_mrr_at_1_diff1 value: 72.2296 - type: nauc_mrr_at_3_max value: 54.94369999999999 - type: nauc_mrr_at_3_std value: -5.0057 - type: nauc_mrr_at_3_diff1 value: 69.6774 - type: nauc_mrr_at_5_max value: 54.970699999999994 - type: nauc_mrr_at_5_std value: -4.3104000000000005 - type: nauc_mrr_at_5_diff1 value: 69.4618 - type: nauc_mrr_at_10_max value: 55.01970000000001 - type: nauc_mrr_at_10_std value: -4.0596 - type: nauc_mrr_at_10_diff1 value: 69.435 - type: nauc_mrr_at_20_max value: 54.9824 - type: nauc_mrr_at_20_std value: -4.1227 - type: nauc_mrr_at_20_diff1 value: 69.4712 - type: nauc_mrr_at_100_max value: 54.9588 - type: nauc_mrr_at_100_std value: -4.1325 - type: nauc_mrr_at_100_diff1 value: 69.498 - type: nauc_mrr_at_1000_max value: 54.95179999999999 - type: nauc_mrr_at_1000_std value: -4.1442 - type: nauc_mrr_at_1000_diff1 value: 69.503 - type: main_score value: 61.513 - task: type: Classification dataset: name: MTEB ImdbClassification (default) type: mteb/imdb config: default split: test revision: 3d86128a09e091d6018b6d26cad27f2739fc2db7 metrics: - type: accuracy value: 63.0232 - type: f1 value: 62.8137 - type: f1_weighted value: 62.8137 - type: ap value: 58.377199999999995 - type: ap_weighted value: 58.377199999999995 - type: main_score value: 63.0232 - task: type: Retrieval dataset: name: MTEB MIRACLRetrieval (ar) type: miracl/mmteb-miracl config: ar split: dev revision: main metrics: - type: ndcg_at_1 value: 57.459 - type: ndcg_at_3 value: 58.162000000000006 - type: ndcg_at_5 value: 60.831 - type: ndcg_at_10 value: 64.238 - type: ndcg_at_20 value: 66.455 - type: ndcg_at_100 value: 68.67 - type: ndcg_at_1000 value: 69.51 - type: map_at_1 value: 38.064 - type: map_at_3 value: 51.217999999999996 - type: map_at_5 value: 54.364999999999995 - type: map_at_10 value: 56.589999999999996 - type: map_at_20 value: 57.545 - type: map_at_100 value: 58.06400000000001 - type: map_at_1000 value: 58.111999999999995 - type: recall_at_1 value: 38.064 - type: recall_at_3 value: 58.618 - type: recall_at_5 value: 66.353 - type: recall_at_10 value: 75.098 - type: recall_at_20 value: 81.978 - type: recall_at_100 value: 91.203 - type: recall_at_1000 value: 96.706 - type: precision_at_1 value: 57.459 - type: precision_at_3 value: 32.965 - type: precision_at_5 value: 23.405 - type: precision_at_10 value: 13.816 - type: precision_at_20 value: 7.742 - type: precision_at_100 value: 1.7739999999999998 - type: precision_at_1000 value: 0.189 - type: mrr_at_1 value: 57.458600000000004 - type: mrr_at_3 value: 65.4523 - type: mrr_at_5 value: 66.6506 - type: mrr_at_10 value: 67.48100000000001 - type: mrr_at_20 value: 67.7522 - type: mrr_at_100 value: 67.88419999999999 - type: mrr_at_1000 value: 67.8972 - type: nauc_ndcg_at_1_max value: 38.2614 - type: nauc_ndcg_at_1_std value: 1.0798999999999999 - type: nauc_ndcg_at_1_diff1 value: 44.3159 - type: nauc_ndcg_at_3_max value: 35.7658 - type: nauc_ndcg_at_3_std value: -3.9097 - type: nauc_ndcg_at_3_diff1 value: 36.8009 - type: nauc_ndcg_at_5_max value: 37.7543 - type: nauc_ndcg_at_5_std value: -2.7727999999999997 - type: nauc_ndcg_at_5_diff1 value: 36.8992 - type: nauc_ndcg_at_10_max value: 39.9339 - type: nauc_ndcg_at_10_std value: -0.2843 - type: nauc_ndcg_at_10_diff1 value: 36.7359 - type: nauc_ndcg_at_20_max value: 40.9231 - type: nauc_ndcg_at_20_std value: 1.5467 - type: nauc_ndcg_at_20_diff1 value: 36.5693 - type: nauc_ndcg_at_100_max value: 41.554 - type: nauc_ndcg_at_100_std value: 3.7470999999999997 - type: nauc_ndcg_at_100_diff1 value: 36.6323 - type: nauc_ndcg_at_1000_max value: 41.1969 - type: nauc_ndcg_at_1000_std value: 2.9972 - type: nauc_ndcg_at_1000_diff1 value: 37.1419 - type: nauc_map_at_1_max value: 21.1612 - type: nauc_map_at_1_std value: -11.2901 - type: nauc_map_at_1_diff1 value: 43.8572 - type: nauc_map_at_3_max value: 31.0197 - type: nauc_map_at_3_std value: -7.5985 - type: nauc_map_at_3_diff1 value: 38.0396 - type: nauc_map_at_5_max value: 33.8261 - type: nauc_map_at_5_std value: -5.501 - type: nauc_map_at_5_diff1 value: 37.2243 - type: nauc_map_at_10_max value: 35.5222 - type: nauc_map_at_10_std value: -3.7351 - type: nauc_map_at_10_diff1 value: 36.8849 - type: nauc_map_at_20_max value: 36.0478 - type: nauc_map_at_20_std value: -2.9566 - type: nauc_map_at_20_diff1 value: 36.7755 - type: nauc_map_at_100_max value: 36.256 - type: nauc_map_at_100_std value: -2.455 - type: nauc_map_at_100_diff1 value: 36.778800000000004 - type: nauc_map_at_1000_max value: 36.249900000000004 - type: nauc_map_at_1000_std value: -2.4678999999999998 - type: nauc_map_at_1000_diff1 value: 36.7962 - type: nauc_recall_at_1_max value: 21.1612 - type: nauc_recall_at_1_std value: -11.2901 - type: nauc_recall_at_1_diff1 value: 43.8572 - type: nauc_recall_at_3_max value: 30.1126 - type: nauc_recall_at_3_std value: -8.705499999999999 - type: nauc_recall_at_3_diff1 value: 33.0274 - type: nauc_recall_at_5_max value: 35.5301 - type: nauc_recall_at_5_std value: -4.1692 - type: nauc_recall_at_5_diff1 value: 30.693900000000003 - type: nauc_recall_at_10_max value: 41.431200000000004 - type: nauc_recall_at_10_std value: 3.1441999999999997 - type: nauc_recall_at_10_diff1 value: 28.5864 - type: nauc_recall_at_20_max value: 46.097100000000005 - type: nauc_recall_at_20_std value: 10.93 - type: nauc_recall_at_20_diff1 value: 26.930100000000003 - type: nauc_recall_at_100_max value: 58.3395 - type: nauc_recall_at_100_std value: 40.328599999999994 - type: nauc_recall_at_100_diff1 value: 21.9273 - type: nauc_recall_at_1000_max value: 72.4689 - type: nauc_recall_at_1000_std value: 59.1972 - type: nauc_recall_at_1000_diff1 value: 27.697899999999997 - type: nauc_precision_at_1_max value: 38.2614 - type: nauc_precision_at_1_std value: 1.0798999999999999 - type: nauc_precision_at_1_diff1 value: 44.3159 - type: nauc_precision_at_3_max value: 35.755700000000004 - type: nauc_precision_at_3_std value: 11.9015 - type: nauc_precision_at_3_diff1 value: 8.3107 - type: nauc_precision_at_5_max value: 33.9849 - type: nauc_precision_at_5_std value: 16.7448 - type: nauc_precision_at_5_diff1 value: 0.6217999999999999 - type: nauc_precision_at_10_max value: 29.9323 - type: nauc_precision_at_10_std value: 21.601100000000002 - type: nauc_precision_at_10_diff1 value: -5.758900000000001 - type: nauc_precision_at_20_max value: 26.142100000000003 - type: nauc_precision_at_20_std value: 25.1079 - type: nauc_precision_at_20_diff1 value: -9.9798 - type: nauc_precision_at_100_max value: 19.456100000000003 - type: nauc_precision_at_100_std value: 28.674899999999997 - type: nauc_precision_at_100_diff1 value: -14.6005 - type: nauc_precision_at_1000_max value: 14.49 - type: nauc_precision_at_1000_std value: 25.480399999999996 - type: nauc_precision_at_1000_diff1 value: -15.570899999999998 - type: nauc_mrr_at_1_max value: 38.2614 - type: nauc_mrr_at_1_std value: 1.0798999999999999 - type: nauc_mrr_at_1_diff1 value: 44.3159 - type: nauc_mrr_at_3_max value: 42.2344 - type: nauc_mrr_at_3_std value: 1.9994 - type: nauc_mrr_at_3_diff1 value: 41.5794 - type: nauc_mrr_at_5_max value: 42.9754 - type: nauc_mrr_at_5_std value: 2.8443 - type: nauc_mrr_at_5_diff1 value: 41.5702 - type: nauc_mrr_at_10_max value: 43.0856 - type: nauc_mrr_at_10_std value: 3.1882 - type: nauc_mrr_at_10_diff1 value: 41.6792 - type: nauc_mrr_at_20_max value: 42.972300000000004 - type: nauc_mrr_at_20_std value: 3.2651 - type: nauc_mrr_at_20_diff1 value: 41.6405 - type: nauc_mrr_at_100_max value: 42.945499999999996 - type: nauc_mrr_at_100_std value: 3.3168 - type: nauc_mrr_at_100_diff1 value: 41.6818 - type: nauc_mrr_at_1000_max value: 42.9332 - type: nauc_mrr_at_1000_std value: 3.3009999999999997 - type: nauc_mrr_at_1000_diff1 value: 41.6879 - type: main_score value: 64.238 - task: type: Retrieval dataset: name: MTEB MIRACLRetrieval (bn) type: miracl/mmteb-miracl config: bn split: dev revision: main metrics: - type: ndcg_at_1 value: 60.341 - type: ndcg_at_3 value: 60.805 - type: ndcg_at_5 value: 64.486 - type: ndcg_at_10 value: 68.05499999999999 - type: ndcg_at_20 value: 69.914 - type: ndcg_at_100 value: 72.00800000000001 - type: ndcg_at_1000 value: 72.71600000000001 - type: map_at_1 value: 37.948 - type: map_at_3 value: 52.89 - type: map_at_5 value: 56.845 - type: map_at_10 value: 59.329 - type: map_at_20 value: 60.158 - type: map_at_100 value: 60.73 - type: map_at_1000 value: 60.778 - type: recall_at_1 value: 37.948 - type: recall_at_3 value: 61.095 - type: recall_at_5 value: 71.316 - type: recall_at_10 value: 80.609 - type: recall_at_20 value: 86.141 - type: recall_at_100 value: 94.305 - type: recall_at_1000 value: 98.625 - type: precision_at_1 value: 60.341 - type: precision_at_3 value: 36.172 - type: precision_at_5 value: 26.277 - type: precision_at_10 value: 15.595999999999998 - type: precision_at_20 value: 8.552 - type: precision_at_100 value: 1.9539999999999997 - type: precision_at_1000 value: 0.207 - type: mrr_at_1 value: 60.3406 - type: mrr_at_3 value: 68.8564 - type: mrr_at_5 value: 70.51089999999999 - type: mrr_at_10 value: 71.3043 - type: mrr_at_20 value: 71.5148 - type: mrr_at_100 value: 71.5779 - type: mrr_at_1000 value: 71.5857 - type: nauc_ndcg_at_1_max value: 39.480900000000005 - type: nauc_ndcg_at_1_std value: 4.66 - type: nauc_ndcg_at_1_diff1 value: 43.4568 - type: nauc_ndcg_at_3_max value: 34.6544 - type: nauc_ndcg_at_3_std value: -1.7936 - type: nauc_ndcg_at_3_diff1 value: 39.1951 - type: nauc_ndcg_at_5_max value: 36.9934 - type: nauc_ndcg_at_5_std value: -1.427 - type: nauc_ndcg_at_5_diff1 value: 39.6396 - type: nauc_ndcg_at_10_max value: 38.9518 - type: nauc_ndcg_at_10_std value: 0.1574 - type: nauc_ndcg_at_10_diff1 value: 37.6783 - type: nauc_ndcg_at_20_max value: 38.5914 - type: nauc_ndcg_at_20_std value: 1.8135999999999999 - type: nauc_ndcg_at_20_diff1 value: 38.063 - type: nauc_ndcg_at_100_max value: 40.2409 - type: nauc_ndcg_at_100_std value: 5.0953 - type: nauc_ndcg_at_100_diff1 value: 38.5175 - type: nauc_ndcg_at_1000_max value: 39.9212 - type: nauc_ndcg_at_1000_std value: 4.5499 - type: nauc_ndcg_at_1000_diff1 value: 38.6193 - type: nauc_map_at_1_max value: 17.9005 - type: nauc_map_at_1_std value: -15.587699999999998 - type: nauc_map_at_1_diff1 value: 48.1378 - type: nauc_map_at_3_max value: 28.119300000000003 - type: nauc_map_at_3_std value: -11.3599 - type: nauc_map_at_3_diff1 value: 41.3327 - type: nauc_map_at_5_max value: 32.3026 - type: nauc_map_at_5_std value: -7.741499999999999 - type: nauc_map_at_5_diff1 value: 40.5989 - type: nauc_map_at_10_max value: 33.8864 - type: nauc_map_at_10_std value: -5.6699 - type: nauc_map_at_10_diff1 value: 39.586 - type: nauc_map_at_20_max value: 34.0193 - type: nauc_map_at_20_std value: -4.6238 - type: nauc_map_at_20_diff1 value: 39.7785 - type: nauc_map_at_100_max value: 34.475699999999996 - type: nauc_map_at_100_std value: -3.6669 - type: nauc_map_at_100_diff1 value: 39.8911 - type: nauc_map_at_1000_max value: 34.4983 - type: nauc_map_at_1000_std value: -3.6664000000000003 - type: nauc_map_at_1000_diff1 value: 39.9015 - type: nauc_recall_at_1_max value: 17.9005 - type: nauc_recall_at_1_std value: -15.587699999999998 - type: nauc_recall_at_1_diff1 value: 48.1378 - type: nauc_recall_at_3_max value: 27.0807 - type: nauc_recall_at_3_std value: -10.071 - type: nauc_recall_at_3_diff1 value: 35.7245 - type: nauc_recall_at_5_max value: 32.561499999999995 - type: nauc_recall_at_5_std value: -7.4364 - type: nauc_recall_at_5_diff1 value: 32.2967 - type: nauc_recall_at_10_max value: 36.9998 - type: nauc_recall_at_10_std value: -1.9453000000000003 - type: nauc_recall_at_10_diff1 value: 23.9665 - type: nauc_recall_at_20_max value: 34.0415 - type: nauc_recall_at_20_std value: 3.2483999999999997 - type: nauc_recall_at_20_diff1 value: 22.3991 - type: nauc_recall_at_100_max value: 52.1359 - type: nauc_recall_at_100_std value: 39.305299999999995 - type: nauc_recall_at_100_diff1 value: 17.8559 - type: nauc_recall_at_1000_max value: 53.5217 - type: nauc_recall_at_1000_std value: 78.536 - type: nauc_recall_at_1000_diff1 value: -24.390600000000003 - type: nauc_precision_at_1_max value: 39.480900000000005 - type: nauc_precision_at_1_std value: 4.66 - type: nauc_precision_at_1_diff1 value: 43.4568 - type: nauc_precision_at_3_max value: 38.954499999999996 - type: nauc_precision_at_3_std value: 21.0387 - type: nauc_precision_at_3_diff1 value: 4.625900000000001 - type: nauc_precision_at_5_max value: 38.8673 - type: nauc_precision_at_5_std value: 31.512800000000002 - type: nauc_precision_at_5_diff1 value: -4.147399999999999 - type: nauc_precision_at_10_max value: 32.7684 - type: nauc_precision_at_10_std value: 36.237700000000004 - type: nauc_precision_at_10_diff1 value: -13.6404 - type: nauc_precision_at_20_max value: 26.0982 - type: nauc_precision_at_20_std value: 38.5385 - type: nauc_precision_at_20_diff1 value: -16.3735 - type: nauc_precision_at_100_max value: 20.8957 - type: nauc_precision_at_100_std value: 42.1707 - type: nauc_precision_at_100_diff1 value: -18.7092 - type: nauc_precision_at_1000_max value: 17.1788 - type: nauc_precision_at_1000_std value: 39.5064 - type: nauc_precision_at_1000_diff1 value: -20.671400000000002 - type: nauc_mrr_at_1_max value: 39.480900000000005 - type: nauc_mrr_at_1_std value: 4.66 - type: nauc_mrr_at_1_diff1 value: 43.4568 - type: nauc_mrr_at_3_max value: 44.2708 - type: nauc_mrr_at_3_std value: 11.021799999999999 - type: nauc_mrr_at_3_diff1 value: 41.6187 - type: nauc_mrr_at_5_max value: 44.9277 - type: nauc_mrr_at_5_std value: 11.3479 - type: nauc_mrr_at_5_diff1 value: 41.14 - type: nauc_mrr_at_10_max value: 44.6467 - type: nauc_mrr_at_10_std value: 11.3277 - type: nauc_mrr_at_10_diff1 value: 40.5017 - type: nauc_mrr_at_20_max value: 44.298 - type: nauc_mrr_at_20_std value: 11.0061 - type: nauc_mrr_at_20_diff1 value: 40.6235 - type: nauc_mrr_at_100_max value: 44.2517 - type: nauc_mrr_at_100_std value: 10.9246 - type: nauc_mrr_at_100_diff1 value: 40.7234 - type: nauc_mrr_at_1000_max value: 44.241 - type: nauc_mrr_at_1000_std value: 10.9113 - type: nauc_mrr_at_1000_diff1 value: 40.7358 - type: main_score value: 68.05499999999999 - task: type: Retrieval dataset: name: MTEB MIRACLRetrieval (de) type: miracl/mmteb-miracl config: de split: dev revision: main metrics: - type: ndcg_at_1 value: 45.574 - type: ndcg_at_3 value: 41.243 - type: ndcg_at_5 value: 43.86 - type: ndcg_at_10 value: 48.123 - type: ndcg_at_20 value: 51.785000000000004 - type: ndcg_at_100 value: 56.04900000000001 - type: ndcg_at_1000 value: 57.979 - type: map_at_1 value: 20.401 - type: map_at_3 value: 31.308000000000003 - type: map_at_5 value: 35.356 - type: map_at_10 value: 38.24 - type: map_at_20 value: 39.879 - type: map_at_100 value: 40.979 - type: map_at_1000 value: 41.103 - type: recall_at_1 value: 20.401 - type: recall_at_3 value: 36.573 - type: recall_at_5 value: 47.495 - type: recall_at_10 value: 58.779 - type: recall_at_20 value: 69.06099999999999 - type: recall_at_100 value: 85.84 - type: recall_at_1000 value: 97.36399999999999 - type: precision_at_1 value: 45.574 - type: precision_at_3 value: 30.055 - type: precision_at_5 value: 23.344 - type: precision_at_10 value: 14.754000000000001 - type: precision_at_20 value: 9.033 - type: precision_at_100 value: 2.275 - type: precision_at_1000 value: 0.258 - type: mrr_at_1 value: 45.5738 - type: mrr_at_3 value: 52.18580000000001 - type: mrr_at_5 value: 54.5628 - type: mrr_at_10 value: 55.604699999999994 - type: mrr_at_20 value: 55.9833 - type: mrr_at_100 value: 56.2015 - type: mrr_at_1000 value: 56.2431 - type: nauc_ndcg_at_1_max value: 48.355 - type: nauc_ndcg_at_1_std value: 15.508 - type: nauc_ndcg_at_1_diff1 value: 42.6569 - type: nauc_ndcg_at_3_max value: 45.5945 - type: nauc_ndcg_at_3_std value: 16.6953 - type: nauc_ndcg_at_3_diff1 value: 38.6081 - type: nauc_ndcg_at_5_max value: 43.3231 - type: nauc_ndcg_at_5_std value: 14.394100000000002 - type: nauc_ndcg_at_5_diff1 value: 38.846799999999995 - type: nauc_ndcg_at_10_max value: 44.0599 - type: nauc_ndcg_at_10_std value: 16.0584 - type: nauc_ndcg_at_10_diff1 value: 38.2432 - type: nauc_ndcg_at_20_max value: 45.8588 - type: nauc_ndcg_at_20_std value: 17.531 - type: nauc_ndcg_at_20_diff1 value: 38.982099999999996 - type: nauc_ndcg_at_100_max value: 48.7095 - type: nauc_ndcg_at_100_std value: 20.7655 - type: nauc_ndcg_at_100_diff1 value: 39.7349 - type: nauc_ndcg_at_1000_max value: 48.024499999999996 - type: nauc_ndcg_at_1000_std value: 20.1299 - type: nauc_ndcg_at_1000_diff1 value: 39.8087 - type: nauc_map_at_1_max value: 30.0998 - type: nauc_map_at_1_std value: 4.7429 - type: nauc_map_at_1_diff1 value: 45.4045 - type: nauc_map_at_3_max value: 39.053399999999996 - type: nauc_map_at_3_std value: 10.807 - type: nauc_map_at_3_diff1 value: 40.8294 - type: nauc_map_at_5_max value: 39.204499999999996 - type: nauc_map_at_5_std value: 11.5165 - type: nauc_map_at_5_diff1 value: 38.9168 - type: nauc_map_at_10_max value: 41.099799999999995 - type: nauc_map_at_10_std value: 13.758899999999999 - type: nauc_map_at_10_diff1 value: 38.2256 - type: nauc_map_at_20_max value: 42.2131 - type: nauc_map_at_20_std value: 14.366000000000001 - type: nauc_map_at_20_diff1 value: 38.572 - type: nauc_map_at_100_max value: 43.0508 - type: nauc_map_at_100_std value: 15.060100000000002 - type: nauc_map_at_100_diff1 value: 38.9831 - type: nauc_map_at_1000_max value: 43.048700000000004 - type: nauc_map_at_1000_std value: 15.085999999999999 - type: nauc_map_at_1000_diff1 value: 38.9957 - type: nauc_recall_at_1_max value: 30.0998 - type: nauc_recall_at_1_std value: 4.7429 - type: nauc_recall_at_1_diff1 value: 45.4045 - type: nauc_recall_at_3_max value: 36.9204 - type: nauc_recall_at_3_std value: 11.2734 - type: nauc_recall_at_3_diff1 value: 37.431 - type: nauc_recall_at_5_max value: 33.4392 - type: nauc_recall_at_5_std value: 9.4283 - type: nauc_recall_at_5_diff1 value: 32.7815 - type: nauc_recall_at_10_max value: 34.427099999999996 - type: nauc_recall_at_10_std value: 13.147400000000001 - type: nauc_recall_at_10_diff1 value: 29.394199999999998 - type: nauc_recall_at_20_max value: 36.8459 - type: nauc_recall_at_20_std value: 16.1323 - type: nauc_recall_at_20_diff1 value: 29.9502 - type: nauc_recall_at_100_max value: 56.360600000000005 - type: nauc_recall_at_100_std value: 40.8465 - type: nauc_recall_at_100_diff1 value: 33.2542 - type: nauc_recall_at_1000_max value: 62.121 - type: nauc_recall_at_1000_std value: 65.4518 - type: nauc_recall_at_1000_diff1 value: 23.9221 - type: nauc_precision_at_1_max value: 48.355 - type: nauc_precision_at_1_std value: 15.508 - type: nauc_precision_at_1_diff1 value: 42.6569 - type: nauc_precision_at_3_max value: 46.72 - type: nauc_precision_at_3_std value: 21.5057 - type: nauc_precision_at_3_diff1 value: 23.3313 - type: nauc_precision_at_5_max value: 39.5888 - type: nauc_precision_at_5_std value: 20.930699999999998 - type: nauc_precision_at_5_diff1 value: 15.661900000000001 - type: nauc_precision_at_10_max value: 37.8371 - type: nauc_precision_at_10_std value: 25.2882 - type: nauc_precision_at_10_diff1 value: 8.7263 - type: nauc_precision_at_20_max value: 34.7638 - type: nauc_precision_at_20_std value: 25.795800000000003 - type: nauc_precision_at_20_diff1 value: 5.5533 - type: nauc_precision_at_100_max value: 31.1513 - type: nauc_precision_at_100_std value: 28.7441 - type: nauc_precision_at_100_diff1 value: -0.2107 - type: nauc_precision_at_1000_max value: 24.329700000000003 - type: nauc_precision_at_1000_std value: 27.4593 - type: nauc_precision_at_1000_diff1 value: -5.1174 - type: nauc_mrr_at_1_max value: 48.355 - type: nauc_mrr_at_1_std value: 15.508 - type: nauc_mrr_at_1_diff1 value: 42.6569 - type: nauc_mrr_at_3_max value: 50.1901 - type: nauc_mrr_at_3_std value: 17.6811 - type: nauc_mrr_at_3_diff1 value: 42.7492 - type: nauc_mrr_at_5_max value: 50.210699999999996 - type: nauc_mrr_at_5_std value: 17.4661 - type: nauc_mrr_at_5_diff1 value: 42.9336 - type: nauc_mrr_at_10_max value: 49.9472 - type: nauc_mrr_at_10_std value: 17.3815 - type: nauc_mrr_at_10_diff1 value: 42.4177 - type: nauc_mrr_at_20_max value: 49.9918 - type: nauc_mrr_at_20_std value: 17.7321 - type: nauc_mrr_at_20_diff1 value: 42.5105 - type: nauc_mrr_at_100_max value: 49.9862 - type: nauc_mrr_at_100_std value: 17.7582 - type: nauc_mrr_at_100_diff1 value: 42.5947 - type: nauc_mrr_at_1000_max value: 49.9819 - type: nauc_mrr_at_1000_std value: 17.7188 - type: nauc_mrr_at_1000_diff1 value: 42.620000000000005 - type: main_score value: 48.123 - task: type: Retrieval dataset: name: MTEB MIRACLRetrieval (en) type: miracl/mmteb-miracl config: en split: dev revision: main metrics: - type: ndcg_at_1 value: 45.556999999999995 - type: ndcg_at_3 value: 43.969 - type: ndcg_at_5 value: 45.551 - type: ndcg_at_10 value: 49.372 - type: ndcg_at_20 value: 52.86300000000001 - type: ndcg_at_100 value: 57.28 - type: ndcg_at_1000 value: 59.187 - type: map_at_1 value: 21.785 - type: map_at_3 value: 32.679 - type: map_at_5 value: 35.885 - type: map_at_10 value: 38.836 - type: map_at_20 value: 40.425 - type: map_at_100 value: 41.592 - type: map_at_1000 value: 41.749 - type: recall_at_1 value: 21.785 - type: recall_at_3 value: 40.403 - type: recall_at_5 value: 48.498999999999995 - type: recall_at_10 value: 59.513000000000005 - type: recall_at_20 value: 69.357 - type: recall_at_100 value: 85.785 - type: recall_at_1000 value: 96.041 - type: precision_at_1 value: 45.556999999999995 - type: precision_at_3 value: 30.287999999999997 - type: precision_at_5 value: 23.204 - type: precision_at_10 value: 15.006 - type: precision_at_20 value: 9.118 - type: precision_at_100 value: 2.404 - type: precision_at_1000 value: 0.27799999999999997 - type: mrr_at_1 value: 45.5569 - type: mrr_at_3 value: 55.4234 - type: mrr_at_5 value: 57.3884 - type: mrr_at_10 value: 58.391400000000004 - type: mrr_at_20 value: 58.7477 - type: mrr_at_100 value: 58.93620000000001 - type: mrr_at_1000 value: 58.949600000000004 - type: nauc_ndcg_at_1_max value: 34.794799999999995 - type: nauc_ndcg_at_1_std value: 2.102 - type: nauc_ndcg_at_1_diff1 value: 33.8113 - type: nauc_ndcg_at_3_max value: 31.6187 - type: nauc_ndcg_at_3_std value: -1.3106 - type: nauc_ndcg_at_3_diff1 value: 28.5676 - type: nauc_ndcg_at_5_max value: 30.4962 - type: nauc_ndcg_at_5_std value: -1.016 - type: nauc_ndcg_at_5_diff1 value: 28.0032 - type: nauc_ndcg_at_10_max value: 29.460900000000002 - type: nauc_ndcg_at_10_std value: -0.6328 - type: nauc_ndcg_at_10_diff1 value: 26.351000000000003 - type: nauc_ndcg_at_20_max value: 31.443900000000003 - type: nauc_ndcg_at_20_std value: 1.1067 - type: nauc_ndcg_at_20_diff1 value: 26.2068 - type: nauc_ndcg_at_100_max value: 34.273199999999996 - type: nauc_ndcg_at_100_std value: 5.1303 - type: nauc_ndcg_at_100_diff1 value: 26.4772 - type: nauc_ndcg_at_1000_max value: 34.1663 - type: nauc_ndcg_at_1000_std value: 5.1834999999999996 - type: nauc_ndcg_at_1000_diff1 value: 26.6768 - type: nauc_map_at_1_max value: 23.6327 - type: nauc_map_at_1_std value: -6.3777 - type: nauc_map_at_1_diff1 value: 32.028800000000004 - type: nauc_map_at_3_max value: 27.869300000000003 - type: nauc_map_at_3_std value: -5.9788 - type: nauc_map_at_3_diff1 value: 29.8636 - type: nauc_map_at_5_max value: 28.6043 - type: nauc_map_at_5_std value: -4.4539 - type: nauc_map_at_5_diff1 value: 29.044999999999998 - type: nauc_map_at_10_max value: 29.065600000000003 - type: nauc_map_at_10_std value: -3.2986 - type: nauc_map_at_10_diff1 value: 27.8952 - type: nauc_map_at_20_max value: 30.191200000000002 - type: nauc_map_at_20_std value: -2.4181999999999997 - type: nauc_map_at_20_diff1 value: 27.973399999999998 - type: nauc_map_at_100_max value: 31.0841 - type: nauc_map_at_100_std value: -1.1223 - type: nauc_map_at_100_diff1 value: 28.089199999999998 - type: nauc_map_at_1000_max value: 31.114399999999996 - type: nauc_map_at_1000_std value: -1.0668 - type: nauc_map_at_1000_diff1 value: 28.098 - type: nauc_recall_at_1_max value: 23.6327 - type: nauc_recall_at_1_std value: -6.3777 - type: nauc_recall_at_1_diff1 value: 32.028800000000004 - type: nauc_recall_at_3_max value: 20.9084 - type: nauc_recall_at_3_std value: -7.3713 - type: nauc_recall_at_3_diff1 value: 23.488300000000002 - type: nauc_recall_at_5_max value: 20.4249 - type: nauc_recall_at_5_std value: -3.8598 - type: nauc_recall_at_5_diff1 value: 20.935200000000002 - type: nauc_recall_at_10_max value: 17.5405 - type: nauc_recall_at_10_std value: -3.5011 - type: nauc_recall_at_10_diff1 value: 16.9646 - type: nauc_recall_at_20_max value: 20.6496 - type: nauc_recall_at_20_std value: 0.1168 - type: nauc_recall_at_20_diff1 value: 14.2125 - type: nauc_recall_at_100_max value: 31.916099999999997 - type: nauc_recall_at_100_std value: 20.2048 - type: nauc_recall_at_100_diff1 value: 9.3709 - type: nauc_recall_at_1000_max value: 46.2569 - type: nauc_recall_at_1000_std value: 55.2292 - type: nauc_recall_at_1000_diff1 value: -0.2909 - type: nauc_precision_at_1_max value: 34.794799999999995 - type: nauc_precision_at_1_std value: 2.102 - type: nauc_precision_at_1_diff1 value: 33.8113 - type: nauc_precision_at_3_max value: 31.221700000000002 - type: nauc_precision_at_3_std value: 7.513 - type: nauc_precision_at_3_diff1 value: 15.9311 - type: nauc_precision_at_5_max value: 28.5241 - type: nauc_precision_at_5_std value: 12.2286 - type: nauc_precision_at_5_diff1 value: 9.5435 - type: nauc_precision_at_10_max value: 24.3663 - type: nauc_precision_at_10_std value: 15.867700000000001 - type: nauc_precision_at_10_diff1 value: 2.396 - type: nauc_precision_at_20_max value: 22.322300000000002 - type: nauc_precision_at_20_std value: 18.3505 - type: nauc_precision_at_20_diff1 value: 0.0719 - type: nauc_precision_at_100_max value: 18.8029 - type: nauc_precision_at_100_std value: 24.728 - type: nauc_precision_at_100_diff1 value: -4.0887 - type: nauc_precision_at_1000_max value: 12.315800000000001 - type: nauc_precision_at_1000_std value: 20.9058 - type: nauc_precision_at_1000_diff1 value: -6.4069 - type: nauc_mrr_at_1_max value: 34.794799999999995 - type: nauc_mrr_at_1_std value: 2.102 - type: nauc_mrr_at_1_diff1 value: 33.8113 - type: nauc_mrr_at_3_max value: 33.3929 - type: nauc_mrr_at_3_std value: 3.4512 - type: nauc_mrr_at_3_diff1 value: 29.718 - type: nauc_mrr_at_5_max value: 34.586 - type: nauc_mrr_at_5_std value: 5.4722 - type: nauc_mrr_at_5_diff1 value: 30.0744 - type: nauc_mrr_at_10_max value: 34.3898 - type: nauc_mrr_at_10_std value: 4.854 - type: nauc_mrr_at_10_diff1 value: 29.979 - type: nauc_mrr_at_20_max value: 34.516000000000005 - type: nauc_mrr_at_20_std value: 4.9616 - type: nauc_mrr_at_20_diff1 value: 29.907899999999998 - type: nauc_mrr_at_100_max value: 34.515499999999996 - type: nauc_mrr_at_100_std value: 4.8578 - type: nauc_mrr_at_100_diff1 value: 29.997 - type: nauc_mrr_at_1000_max value: 34.5046 - type: nauc_mrr_at_1000_std value: 4.8536 - type: nauc_mrr_at_1000_diff1 value: 30.0019 - type: main_score value: 49.372 - task: type: Retrieval dataset: name: MTEB MIRACLRetrieval (es) type: miracl/mmteb-miracl config: es split: dev revision: main metrics: - type: ndcg_at_1 value: 55.71 - type: ndcg_at_3 value: 47.981 - type: ndcg_at_5 value: 46.583999999999996 - type: ndcg_at_10 value: 49.688 - type: ndcg_at_20 value: 54.437999999999995 - type: ndcg_at_100 value: 60.492999999999995 - type: ndcg_at_1000 value: 62.922 - type: map_at_1 value: 16.38 - type: map_at_3 value: 27.137 - type: map_at_5 value: 31.81 - type: map_at_10 value: 36.986999999999995 - type: map_at_20 value: 39.749 - type: map_at_100 value: 41.69 - type: map_at_1000 value: 41.924 - type: recall_at_1 value: 16.38 - type: recall_at_3 value: 31.502999999999997 - type: recall_at_5 value: 40.355999999999995 - type: recall_at_10 value: 54.155 - type: recall_at_20 value: 65.32900000000001 - type: recall_at_100 value: 85.136 - type: recall_at_1000 value: 96.951 - type: precision_at_1 value: 55.71 - type: precision_at_3 value: 39.969 - type: precision_at_5 value: 32.469 - type: precision_at_10 value: 23.071 - type: precision_at_20 value: 14.482999999999999 - type: precision_at_100 value: 3.8920000000000003 - type: precision_at_1000 value: 0.44799999999999995 - type: mrr_at_1 value: 55.709900000000005 - type: mrr_at_3 value: 63.9146 - type: mrr_at_5 value: 65.4192 - type: mrr_at_10 value: 66.4602 - type: mrr_at_20 value: 66.71249999999999 - type: mrr_at_100 value: 66.8844 - type: mrr_at_1000 value: 66.893 - type: nauc_ndcg_at_1_max value: 39.4623 - type: nauc_ndcg_at_1_std value: 18.2237 - type: nauc_ndcg_at_1_diff1 value: 34.3382 - type: nauc_ndcg_at_3_max value: 33.3518 - type: nauc_ndcg_at_3_std value: 14.2885 - type: nauc_ndcg_at_3_diff1 value: 22.4965 - type: nauc_ndcg_at_5_max value: 31.5822 - type: nauc_ndcg_at_5_std value: 10.4064 - type: nauc_ndcg_at_5_diff1 value: 24.4417 - type: nauc_ndcg_at_10_max value: 33.4838 - type: nauc_ndcg_at_10_std value: 11.5351 - type: nauc_ndcg_at_10_diff1 value: 27.1137 - type: nauc_ndcg_at_20_max value: 38.831700000000005 - type: nauc_ndcg_at_20_std value: 18.784 - type: nauc_ndcg_at_20_diff1 value: 27.408700000000003 - type: nauc_ndcg_at_100_max value: 42.8785 - type: nauc_ndcg_at_100_std value: 24.596 - type: nauc_ndcg_at_100_diff1 value: 25.8252 - type: nauc_ndcg_at_1000_max value: 42.023500000000006 - type: nauc_ndcg_at_1000_std value: 23.2727 - type: nauc_ndcg_at_1000_diff1 value: 24.8455 - type: nauc_map_at_1_max value: 10.5243 - type: nauc_map_at_1_std value: -10.143699999999999 - type: nauc_map_at_1_diff1 value: 32.2699 - type: nauc_map_at_3_max value: 16.902900000000002 - type: nauc_map_at_3_std value: -5.6548 - type: nauc_map_at_3_diff1 value: 26.238699999999998 - type: nauc_map_at_5_max value: 21.4475 - type: nauc_map_at_5_std value: -2.1950000000000003 - type: nauc_map_at_5_diff1 value: 25.2077 - type: nauc_map_at_10_max value: 27.2231 - type: nauc_map_at_10_std value: 3.9522000000000004 - type: nauc_map_at_10_diff1 value: 26.0175 - type: nauc_map_at_20_max value: 30.8106 - type: nauc_map_at_20_std value: 8.9534 - type: nauc_map_at_20_diff1 value: 25.8477 - type: nauc_map_at_100_max value: 32.5864 - type: nauc_map_at_100_std value: 11.2878 - type: nauc_map_at_100_diff1 value: 25.3496 - type: nauc_map_at_1000_max value: 32.573 - type: nauc_map_at_1000_std value: 11.2812 - type: nauc_map_at_1000_diff1 value: 25.2334 - type: nauc_recall_at_1_max value: 10.5243 - type: nauc_recall_at_1_std value: -10.143699999999999 - type: nauc_recall_at_1_diff1 value: 32.2699 - type: nauc_recall_at_3_max value: 12.1019 - type: nauc_recall_at_3_std value: -8.2304 - type: nauc_recall_at_3_diff1 value: 22.9436 - type: nauc_recall_at_5_max value: 15.0438 - type: nauc_recall_at_5_std value: -6.216200000000001 - type: nauc_recall_at_5_diff1 value: 21.5158 - type: nauc_recall_at_10_max value: 22.825100000000003 - type: nauc_recall_at_10_std value: 4.994400000000001 - type: nauc_recall_at_10_diff1 value: 22.4346 - type: nauc_recall_at_20_max value: 33.1395 - type: nauc_recall_at_20_std value: 19.5456 - type: nauc_recall_at_20_diff1 value: 24.0575 - type: nauc_recall_at_100_max value: 50.0911 - type: nauc_recall_at_100_std value: 45.542300000000004 - type: nauc_recall_at_100_diff1 value: 19.9322 - type: nauc_recall_at_1000_max value: 73.2055 - type: nauc_recall_at_1000_std value: 74.8121 - type: nauc_recall_at_1000_diff1 value: 6.7021999999999995 - type: nauc_precision_at_1_max value: 39.4623 - type: nauc_precision_at_1_std value: 18.2237 - type: nauc_precision_at_1_diff1 value: 34.3382 - type: nauc_precision_at_3_max value: 37.2684 - type: nauc_precision_at_3_std value: 24.1559 - type: nauc_precision_at_3_diff1 value: 10.6349 - type: nauc_precision_at_5_max value: 37.9483 - type: nauc_precision_at_5_std value: 26.973000000000003 - type: nauc_precision_at_5_diff1 value: 6.722499999999999 - type: nauc_precision_at_10_max value: 41.4223 - type: nauc_precision_at_10_std value: 35.661100000000005 - type: nauc_precision_at_10_diff1 value: 3.8463 - type: nauc_precision_at_20_max value: 41.917300000000004 - type: nauc_precision_at_20_std value: 42.0563 - type: nauc_precision_at_20_diff1 value: 0.4484 - type: nauc_precision_at_100_max value: 37.4895 - type: nauc_precision_at_100_std value: 45.1734 - type: nauc_precision_at_100_diff1 value: -7.4965 - type: nauc_precision_at_1000_max value: 27.853299999999997 - type: nauc_precision_at_1000_std value: 36.997 - type: nauc_precision_at_1000_diff1 value: -13.5956 - type: nauc_mrr_at_1_max value: 39.4623 - type: nauc_mrr_at_1_std value: 18.2237 - type: nauc_mrr_at_1_diff1 value: 34.3382 - type: nauc_mrr_at_3_max value: 43.2341 - type: nauc_mrr_at_3_std value: 22.287599999999998 - type: nauc_mrr_at_3_diff1 value: 32.1338 - type: nauc_mrr_at_5_max value: 43.1729 - type: nauc_mrr_at_5_std value: 21.9232 - type: nauc_mrr_at_5_diff1 value: 32.0241 - type: nauc_mrr_at_10_max value: 43.8014 - type: nauc_mrr_at_10_std value: 23.1591 - type: nauc_mrr_at_10_diff1 value: 31.898100000000003 - type: nauc_mrr_at_20_max value: 43.7825 - type: nauc_mrr_at_20_std value: 23.1845 - type: nauc_mrr_at_20_diff1 value: 32.2338 - type: nauc_mrr_at_100_max value: 43.6665 - type: nauc_mrr_at_100_std value: 23.0026 - type: nauc_mrr_at_100_diff1 value: 32.177299999999995 - type: nauc_mrr_at_1000_max value: 43.6579 - type: nauc_mrr_at_1000_std value: 22.986500000000003 - type: nauc_mrr_at_1000_diff1 value: 32.1927 - type: main_score value: 49.688 - task: type: Retrieval dataset: name: MTEB MIRACLRetrieval (fa) type: miracl/mmteb-miracl config: fa split: dev revision: main metrics: - type: ndcg_at_1 value: 39.873 - type: ndcg_at_3 value: 42.738 - type: ndcg_at_5 value: 45.843 - type: ndcg_at_10 value: 50.226000000000006 - type: ndcg_at_20 value: 52.92 - type: ndcg_at_100 value: 56.516999999999996 - type: ndcg_at_1000 value: 57.967 - type: map_at_1 value: 25.369000000000003 - type: map_at_3 value: 35.791000000000004 - type: map_at_5 value: 39.027 - type: map_at_10 value: 41.759 - type: map_at_20 value: 42.899 - type: map_at_100 value: 43.637 - type: map_at_1000 value: 43.734 - type: recall_at_1 value: 25.369000000000003 - type: recall_at_3 value: 43.808 - type: recall_at_5 value: 52.378 - type: recall_at_10 value: 63.775999999999996 - type: recall_at_20 value: 72.099 - type: recall_at_100 value: 87.68599999999999 - type: recall_at_1000 value: 96.71 - type: precision_at_1 value: 39.873 - type: precision_at_3 value: 25.580000000000002 - type: precision_at_5 value: 19.367 - type: precision_at_10 value: 12.437 - type: precision_at_20 value: 7.247000000000001 - type: precision_at_100 value: 1.807 - type: precision_at_1000 value: 0.202 - type: mrr_at_1 value: 39.8734 - type: mrr_at_3 value: 49.1297 - type: mrr_at_5 value: 50.8703 - type: mrr_at_10 value: 52.0393 - type: mrr_at_20 value: 52.428 - type: mrr_at_100 value: 52.7259 - type: mrr_at_1000 value: 52.7512 - type: nauc_ndcg_at_1_max value: 37.2005 - type: nauc_ndcg_at_1_std value: 7.2856000000000005 - type: nauc_ndcg_at_1_diff1 value: 24.3391 - type: nauc_ndcg_at_3_max value: 34.9919 - type: nauc_ndcg_at_3_std value: 4.1377 - type: nauc_ndcg_at_3_diff1 value: 22.7251 - type: nauc_ndcg_at_5_max value: 35.3802 - type: nauc_ndcg_at_5_std value: 5.1718 - type: nauc_ndcg_at_5_diff1 value: 20.7966 - type: nauc_ndcg_at_10_max value: 37.5244 - type: nauc_ndcg_at_10_std value: 8.4159 - type: nauc_ndcg_at_10_diff1 value: 20.3825 - type: nauc_ndcg_at_20_max value: 39.457 - type: nauc_ndcg_at_20_std value: 10.9359 - type: nauc_ndcg_at_20_diff1 value: 20.1633 - type: nauc_ndcg_at_100_max value: 40.605799999999995 - type: nauc_ndcg_at_100_std value: 12.8063 - type: nauc_ndcg_at_100_diff1 value: 20.1186 - type: nauc_ndcg_at_1000_max value: 39.6952 - type: nauc_ndcg_at_1000_std value: 12.0795 - type: nauc_ndcg_at_1000_diff1 value: 20.1048 - type: nauc_map_at_1_max value: 22.758200000000002 - type: nauc_map_at_1_std value: -4.4208 - type: nauc_map_at_1_diff1 value: 32.8042 - type: nauc_map_at_3_max value: 29.5871 - type: nauc_map_at_3_std value: -1.0369 - type: nauc_map_at_3_diff1 value: 26.7399 - type: nauc_map_at_5_max value: 31.630799999999997 - type: nauc_map_at_5_std value: 1.133 - type: nauc_map_at_5_diff1 value: 23.9264 - type: nauc_map_at_10_max value: 33.5866 - type: nauc_map_at_10_std value: 3.8602999999999996 - type: nauc_map_at_10_diff1 value: 23.0431 - type: nauc_map_at_20_max value: 34.7099 - type: nauc_map_at_20_std value: 5.2187 - type: nauc_map_at_20_diff1 value: 22.751099999999997 - type: nauc_map_at_100_max value: 35.0549 - type: nauc_map_at_100_std value: 5.7357 - type: nauc_map_at_100_diff1 value: 22.7261 - type: nauc_map_at_1000_max value: 35.02 - type: nauc_map_at_1000_std value: 5.7542 - type: nauc_map_at_1000_diff1 value: 22.717000000000002 - type: nauc_recall_at_1_max value: 22.758200000000002 - type: nauc_recall_at_1_std value: -4.4208 - type: nauc_recall_at_1_diff1 value: 32.8042 - type: nauc_recall_at_3_max value: 29.2098 - type: nauc_recall_at_3_std value: 0.1884 - type: nauc_recall_at_3_diff1 value: 21.9167 - type: nauc_recall_at_5_max value: 30.634099999999997 - type: nauc_recall_at_5_std value: 2.9632 - type: nauc_recall_at_5_diff1 value: 15.8588 - type: nauc_recall_at_10_max value: 34.958 - type: nauc_recall_at_10_std value: 10.6769 - type: nauc_recall_at_10_diff1 value: 13.9022 - type: nauc_recall_at_20_max value: 40.5569 - type: nauc_recall_at_20_std value: 18.1782 - type: nauc_recall_at_20_diff1 value: 13.4488 - type: nauc_recall_at_100_max value: 54.6126 - type: nauc_recall_at_100_std value: 39.507999999999996 - type: nauc_recall_at_100_diff1 value: 10.122 - type: nauc_recall_at_1000_max value: 64.1019 - type: nauc_recall_at_1000_std value: 65.3022 - type: nauc_recall_at_1000_diff1 value: -0.9008 - type: nauc_precision_at_1_max value: 37.2005 - type: nauc_precision_at_1_std value: 7.2856000000000005 - type: nauc_precision_at_1_diff1 value: 24.3391 - type: nauc_precision_at_3_max value: 40.8492 - type: nauc_precision_at_3_std value: 14.955099999999998 - type: nauc_precision_at_3_diff1 value: 5.8083 - type: nauc_precision_at_5_max value: 37.6411 - type: nauc_precision_at_5_std value: 20.1371 - type: nauc_precision_at_5_diff1 value: -4.7182 - type: nauc_precision_at_10_max value: 35.9345 - type: nauc_precision_at_10_std value: 27.593899999999998 - type: nauc_precision_at_10_diff1 value: -9.1429 - type: nauc_precision_at_20_max value: 33.7364 - type: nauc_precision_at_20_std value: 31.8223 - type: nauc_precision_at_20_diff1 value: -11.98 - type: nauc_precision_at_100_max value: 25.7037 - type: nauc_precision_at_100_std value: 32.6954 - type: nauc_precision_at_100_diff1 value: -15.2838 - type: nauc_precision_at_1000_max value: 16.6881 - type: nauc_precision_at_1000_std value: 27.787200000000002 - type: nauc_precision_at_1000_diff1 value: -16.964000000000002 - type: nauc_mrr_at_1_max value: 37.2005 - type: nauc_mrr_at_1_std value: 7.2856000000000005 - type: nauc_mrr_at_1_diff1 value: 24.3391 - type: nauc_mrr_at_3_max value: 40.9867 - type: nauc_mrr_at_3_std value: 10.7794 - type: nauc_mrr_at_3_diff1 value: 21.0522 - type: nauc_mrr_at_5_max value: 40.7712 - type: nauc_mrr_at_5_std value: 11.2036 - type: nauc_mrr_at_5_diff1 value: 20.3769 - type: nauc_mrr_at_10_max value: 40.8976 - type: nauc_mrr_at_10_std value: 11.7276 - type: nauc_mrr_at_10_diff1 value: 20.261699999999998 - type: nauc_mrr_at_20_max value: 40.8283 - type: nauc_mrr_at_20_std value: 11.6606 - type: nauc_mrr_at_20_diff1 value: 20.430300000000003 - type: nauc_mrr_at_100_max value: 40.9123 - type: nauc_mrr_at_100_std value: 11.6937 - type: nauc_mrr_at_100_diff1 value: 20.4759 - type: nauc_mrr_at_1000_max value: 40.895399999999995 - type: nauc_mrr_at_1000_std value: 11.6648 - type: nauc_mrr_at_1000_diff1 value: 20.4831 - type: main_score value: 50.226000000000006 - task: type: Retrieval dataset: name: MTEB MIRACLRetrieval (fi) type: miracl/mmteb-miracl config: fi split: dev revision: main metrics: - type: ndcg_at_1 value: 60.818000000000005 - type: ndcg_at_3 value: 60.06 - type: ndcg_at_5 value: 63.842 - type: ndcg_at_10 value: 67.46 - type: ndcg_at_20 value: 69.692 - type: ndcg_at_100 value: 71.516 - type: ndcg_at_1000 value: 72.18 - type: map_at_1 value: 39.263999999999996 - type: map_at_3 value: 53.723 - type: map_at_5 value: 57.118 - type: map_at_10 value: 59.394000000000005 - type: map_at_20 value: 60.339 - type: map_at_100 value: 60.739 - type: map_at_1000 value: 60.782000000000004 - type: recall_at_1 value: 39.263999999999996 - type: recall_at_3 value: 61.05500000000001 - type: recall_at_5 value: 69.774 - type: recall_at_10 value: 78.577 - type: recall_at_20 value: 85.435 - type: recall_at_100 value: 93.291 - type: recall_at_1000 value: 97.493 - type: precision_at_1 value: 60.818000000000005 - type: precision_at_3 value: 35.064 - type: precision_at_5 value: 24.815 - type: precision_at_10 value: 14.445 - type: precision_at_20 value: 8.049000000000001 - type: precision_at_100 value: 1.7819999999999998 - type: precision_at_1000 value: 0.187 - type: mrr_at_1 value: 60.8183 - type: mrr_at_3 value: 68.7516 - type: mrr_at_5 value: 70.1678 - type: mrr_at_10 value: 70.85040000000001 - type: mrr_at_20 value: 71.1314 - type: mrr_at_100 value: 71.2271 - type: mrr_at_1000 value: 71.2334 - type: nauc_ndcg_at_1_max value: 39.623000000000005 - type: nauc_ndcg_at_1_std value: -0.6057 - type: nauc_ndcg_at_1_diff1 value: 50.2688 - type: nauc_ndcg_at_3_max value: 36.2982 - type: nauc_ndcg_at_3_std value: -0.4931 - type: nauc_ndcg_at_3_diff1 value: 41.5229 - type: nauc_ndcg_at_5_max value: 37.1813 - type: nauc_ndcg_at_5_std value: -1.1114000000000002 - type: nauc_ndcg_at_5_diff1 value: 41.429700000000004 - type: nauc_ndcg_at_10_max value: 39.3656 - type: nauc_ndcg_at_10_std value: 0.2202 - type: nauc_ndcg_at_10_diff1 value: 41.4453 - type: nauc_ndcg_at_20_max value: 40.186 - type: nauc_ndcg_at_20_std value: 2.8166 - type: nauc_ndcg_at_20_diff1 value: 41.0657 - type: nauc_ndcg_at_100_max value: 40.2423 - type: nauc_ndcg_at_100_std value: 4.5445 - type: nauc_ndcg_at_100_diff1 value: 42.1274 - type: nauc_ndcg_at_1000_max value: 39.821200000000005 - type: nauc_ndcg_at_1000_std value: 3.71 - type: nauc_ndcg_at_1000_diff1 value: 42.2532 - type: nauc_map_at_1_max value: 25.539 - type: nauc_map_at_1_std value: -7.6318 - type: nauc_map_at_1_diff1 value: 47.2875 - type: nauc_map_at_3_max value: 33.5096 - type: nauc_map_at_3_std value: -3.4685 - type: nauc_map_at_3_diff1 value: 41.2351 - type: nauc_map_at_5_max value: 35.0144 - type: nauc_map_at_5_std value: -2.9198999999999997 - type: nauc_map_at_5_diff1 value: 40.892 - type: nauc_map_at_10_max value: 36.4497 - type: nauc_map_at_10_std value: -1.8148999999999997 - type: nauc_map_at_10_diff1 value: 40.823100000000004 - type: nauc_map_at_20_max value: 36.863 - type: nauc_map_at_20_std value: -0.7572 - type: nauc_map_at_20_diff1 value: 40.6285 - type: nauc_map_at_100_max value: 36.882 - type: nauc_map_at_100_std value: -0.40850000000000003 - type: nauc_map_at_100_diff1 value: 40.844500000000004 - type: nauc_map_at_1000_max value: 36.8736 - type: nauc_map_at_1000_std value: -0.4359 - type: nauc_map_at_1000_diff1 value: 40.8569 - type: nauc_recall_at_1_max value: 25.539 - type: nauc_recall_at_1_std value: -7.6318 - type: nauc_recall_at_1_diff1 value: 47.2875 - type: nauc_recall_at_3_max value: 32.7716 - type: nauc_recall_at_3_std value: -1.6856 - type: nauc_recall_at_3_diff1 value: 36.4533 - type: nauc_recall_at_5_max value: 33.5681 - type: nauc_recall_at_5_std value: -2.4453 - type: nauc_recall_at_5_diff1 value: 33.8472 - type: nauc_recall_at_10_max value: 39.5319 - type: nauc_recall_at_10_std value: 0.6228 - type: nauc_recall_at_10_diff1 value: 31.935200000000002 - type: nauc_recall_at_20_max value: 44.3495 - type: nauc_recall_at_20_std value: 12.5445 - type: nauc_recall_at_20_diff1 value: 27.6315 - type: nauc_recall_at_100_max value: 53.924499999999995 - type: nauc_recall_at_100_std value: 44.5927 - type: nauc_recall_at_100_diff1 value: 32.2776 - type: nauc_recall_at_1000_max value: 59.7088 - type: nauc_recall_at_1000_std value: 61.6974 - type: nauc_recall_at_1000_diff1 value: 28.367700000000003 - type: nauc_precision_at_1_max value: 39.623000000000005 - type: nauc_precision_at_1_std value: -0.6057 - type: nauc_precision_at_1_diff1 value: 50.2688 - type: nauc_precision_at_3_max value: 29.5187 - type: nauc_precision_at_3_std value: 11.1305 - type: nauc_precision_at_3_diff1 value: 11.674 - type: nauc_precision_at_5_max value: 25.5889 - type: nauc_precision_at_5_std value: 13.4716 - type: nauc_precision_at_5_diff1 value: 3.2894 - type: nauc_precision_at_10_max value: 21.2446 - type: nauc_precision_at_10_std value: 15.7787 - type: nauc_precision_at_10_diff1 value: -4.0968 - type: nauc_precision_at_20_max value: 15.9944 - type: nauc_precision_at_20_std value: 22.4212 - type: nauc_precision_at_20_diff1 value: -11.3771 - type: nauc_precision_at_100_max value: 8.592600000000001 - type: nauc_precision_at_100_std value: 26.4342 - type: nauc_precision_at_100_diff1 value: -15.402 - type: nauc_precision_at_1000_max value: 2.8388 - type: nauc_precision_at_1000_std value: 23.2317 - type: nauc_precision_at_1000_diff1 value: -19.1173 - type: nauc_mrr_at_1_max value: 39.623000000000005 - type: nauc_mrr_at_1_std value: -0.6057 - type: nauc_mrr_at_1_diff1 value: 50.2688 - type: nauc_mrr_at_3_max value: 41.694199999999995 - type: nauc_mrr_at_3_std value: 2.5751 - type: nauc_mrr_at_3_diff1 value: 48.6111 - type: nauc_mrr_at_5_max value: 41.5674 - type: nauc_mrr_at_5_std value: 2.7312 - type: nauc_mrr_at_5_diff1 value: 48.6988 - type: nauc_mrr_at_10_max value: 41.7364 - type: nauc_mrr_at_10_std value: 2.5787 - type: nauc_mrr_at_10_diff1 value: 48.5842 - type: nauc_mrr_at_20_max value: 41.7509 - type: nauc_mrr_at_20_std value: 2.6837 - type: nauc_mrr_at_20_diff1 value: 48.7196 - type: nauc_mrr_at_100_max value: 41.6895 - type: nauc_mrr_at_100_std value: 2.6545 - type: nauc_mrr_at_100_diff1 value: 48.7483 - type: nauc_mrr_at_1000_max value: 41.6849 - type: nauc_mrr_at_1000_std value: 2.6379 - type: nauc_mrr_at_1000_diff1 value: 48.753600000000006 - type: main_score value: 67.46 - task: type: Retrieval dataset: name: MTEB MIRACLRetrieval (fr) type: miracl/mmteb-miracl config: fr split: dev revision: main metrics: - type: ndcg_at_1 value: 39.65 - type: ndcg_at_3 value: 39.843 - type: ndcg_at_5 value: 44.416 - type: ndcg_at_10 value: 49.891000000000005 - type: ndcg_at_20 value: 53.163000000000004 - type: ndcg_at_100 value: 56.492 - type: ndcg_at_1000 value: 57.837 - type: map_at_1 value: 22.644000000000002 - type: map_at_3 value: 33.021 - type: map_at_5 value: 36.958 - type: map_at_10 value: 39.967999999999996 - type: map_at_20 value: 41.298 - type: map_at_100 value: 42.03 - type: map_at_1000 value: 42.119 - type: recall_at_1 value: 22.644000000000002 - type: recall_at_3 value: 39.798 - type: recall_at_5 value: 51.001 - type: recall_at_10 value: 65.169 - type: recall_at_20 value: 75.33800000000001 - type: recall_at_100 value: 89.786 - type: recall_at_1000 value: 98.08099999999999 - type: precision_at_1 value: 39.65 - type: precision_at_3 value: 25.656000000000002 - type: precision_at_5 value: 20.175 - type: precision_at_10 value: 13.120000000000001 - type: precision_at_20 value: 7.7410000000000005 - type: precision_at_100 value: 1.883 - type: precision_at_1000 value: 0.208 - type: mrr_at_1 value: 39.6501 - type: mrr_at_3 value: 48.7366 - type: mrr_at_5 value: 50.9961 - type: mrr_at_10 value: 52.659 - type: mrr_at_20 value: 53.0856 - type: mrr_at_100 value: 53.273199999999996 - type: mrr_at_1000 value: 53.2931 - type: nauc_ndcg_at_1_max value: 29.1135 - type: nauc_ndcg_at_1_std value: 13.9561 - type: nauc_ndcg_at_1_diff1 value: 28.410400000000003 - type: nauc_ndcg_at_3_max value: 29.0117 - type: nauc_ndcg_at_3_std value: 15.655 - type: nauc_ndcg_at_3_diff1 value: 19.7043 - type: nauc_ndcg_at_5_max value: 31.3257 - type: nauc_ndcg_at_5_std value: 17.4096 - type: nauc_ndcg_at_5_diff1 value: 20.5295 - type: nauc_ndcg_at_10_max value: 33.244 - type: nauc_ndcg_at_10_std value: 18.8436 - type: nauc_ndcg_at_10_diff1 value: 17.9986 - type: nauc_ndcg_at_20_max value: 35.0697 - type: nauc_ndcg_at_20_std value: 19.84 - type: nauc_ndcg_at_20_diff1 value: 19.611600000000003 - type: nauc_ndcg_at_100_max value: 34.7837 - type: nauc_ndcg_at_100_std value: 22.2762 - type: nauc_ndcg_at_100_diff1 value: 19.3138 - type: nauc_ndcg_at_1000_max value: 34.4487 - type: nauc_ndcg_at_1000_std value: 20.8402 - type: nauc_ndcg_at_1000_diff1 value: 20.2691 - type: nauc_map_at_1_max value: 20.247200000000003 - type: nauc_map_at_1_std value: 8.8046 - type: nauc_map_at_1_diff1 value: 27.227600000000002 - type: nauc_map_at_3_max value: 26.7076 - type: nauc_map_at_3_std value: 13.7464 - type: nauc_map_at_3_diff1 value: 21.1266 - type: nauc_map_at_5_max value: 28.777399999999997 - type: nauc_map_at_5_std value: 15.348400000000002 - type: nauc_map_at_5_diff1 value: 21.4282 - type: nauc_map_at_10_max value: 29.907600000000002 - type: nauc_map_at_10_std value: 16.3636 - type: nauc_map_at_10_diff1 value: 20.1957 - type: nauc_map_at_20_max value: 30.864399999999996 - type: nauc_map_at_20_std value: 16.936999999999998 - type: nauc_map_at_20_diff1 value: 20.8871 - type: nauc_map_at_100_max value: 30.998900000000003 - type: nauc_map_at_100_std value: 17.673 - type: nauc_map_at_100_diff1 value: 20.7773 - type: nauc_map_at_1000_max value: 31.0185 - type: nauc_map_at_1000_std value: 17.6212 - type: nauc_map_at_1000_diff1 value: 20.846700000000002 - type: nauc_recall_at_1_max value: 20.247200000000003 - type: nauc_recall_at_1_std value: 8.8046 - type: nauc_recall_at_1_diff1 value: 27.227600000000002 - type: nauc_recall_at_3_max value: 25.074600000000004 - type: nauc_recall_at_3_std value: 14.0657 - type: nauc_recall_at_3_diff1 value: 14.7258 - type: nauc_recall_at_5_max value: 29.442899999999998 - type: nauc_recall_at_5_std value: 16.2404 - type: nauc_recall_at_5_diff1 value: 15.4134 - type: nauc_recall_at_10_max value: 33.5052 - type: nauc_recall_at_10_std value: 19.417 - type: nauc_recall_at_10_diff1 value: 7.933700000000001 - type: nauc_recall_at_20_max value: 40.2402 - type: nauc_recall_at_20_std value: 22.7218 - type: nauc_recall_at_20_diff1 value: 11.777600000000001 - type: nauc_recall_at_100_max value: 44.4613 - type: nauc_recall_at_100_std value: 52.5751 - type: nauc_recall_at_100_diff1 value: 5.1827 - type: nauc_recall_at_1000_max value: 80.4059 - type: nauc_recall_at_1000_std value: 82.2582 - type: nauc_recall_at_1000_diff1 value: 37.9332 - type: nauc_precision_at_1_max value: 29.1135 - type: nauc_precision_at_1_std value: 13.9561 - type: nauc_precision_at_1_diff1 value: 28.410400000000003 - type: nauc_precision_at_3_max value: 32.4031 - type: nauc_precision_at_3_std value: 21.222099999999998 - type: nauc_precision_at_3_diff1 value: 9.2426 - type: nauc_precision_at_5_max value: 31.372600000000002 - type: nauc_precision_at_5_std value: 22.4259 - type: nauc_precision_at_5_diff1 value: 7.199 - type: nauc_precision_at_10_max value: 29.5298 - type: nauc_precision_at_10_std value: 22.183 - type: nauc_precision_at_10_diff1 value: -1.2202 - type: nauc_precision_at_20_max value: 28.1874 - type: nauc_precision_at_20_std value: 21.7393 - type: nauc_precision_at_20_diff1 value: 0.2774 - type: nauc_precision_at_100_max value: 18.2122 - type: nauc_precision_at_100_std value: 21.566 - type: nauc_precision_at_100_diff1 value: -5.8792 - type: nauc_precision_at_1000_max value: 11.3258 - type: nauc_precision_at_1000_std value: 12.261700000000001 - type: nauc_precision_at_1000_diff1 value: -5.8514 - type: nauc_mrr_at_1_max value: 29.1135 - type: nauc_mrr_at_1_std value: 13.9561 - type: nauc_mrr_at_1_diff1 value: 28.410400000000003 - type: nauc_mrr_at_3_max value: 30.904999999999998 - type: nauc_mrr_at_3_std value: 16.5695 - type: nauc_mrr_at_3_diff1 value: 22.555 - type: nauc_mrr_at_5_max value: 32.408 - type: nauc_mrr_at_5_std value: 17.7334 - type: nauc_mrr_at_5_diff1 value: 22.912399999999998 - type: nauc_mrr_at_10_max value: 33.069500000000005 - type: nauc_mrr_at_10_std value: 17.8731 - type: nauc_mrr_at_10_diff1 value: 22.270300000000002 - type: nauc_mrr_at_20_max value: 33.062000000000005 - type: nauc_mrr_at_20_std value: 17.8293 - type: nauc_mrr_at_20_diff1 value: 22.5118 - type: nauc_mrr_at_100_max value: 32.9394 - type: nauc_mrr_at_100_std value: 17.7815 - type: nauc_mrr_at_100_diff1 value: 22.676199999999998 - type: nauc_mrr_at_1000_max value: 32.9188 - type: nauc_mrr_at_1000_std value: 17.7435 - type: nauc_mrr_at_1000_diff1 value: 22.6855 - type: main_score value: 49.891000000000005 - task: type: Retrieval dataset: name: MTEB MIRACLRetrieval (hi) type: miracl/mmteb-miracl config: hi split: dev revision: main metrics: - type: ndcg_at_1 value: 36.857 - type: ndcg_at_3 value: 39.469 - type: ndcg_at_5 value: 41.839999999999996 - type: ndcg_at_10 value: 46.141 - type: ndcg_at_20 value: 49.384 - type: ndcg_at_100 value: 52.565 - type: ndcg_at_1000 value: 54.318999999999996 - type: map_at_1 value: 20.185 - type: map_at_3 value: 30.9 - type: map_at_5 value: 34.311 - type: map_at_10 value: 37.074 - type: map_at_20 value: 38.493 - type: map_at_100 value: 39.174 - type: map_at_1000 value: 39.269 - type: recall_at_1 value: 20.185 - type: recall_at_3 value: 38.993 - type: recall_at_5 value: 47.881 - type: recall_at_10 value: 59.474000000000004 - type: recall_at_20 value: 69.437 - type: recall_at_100 value: 83.38499999999999 - type: recall_at_1000 value: 94.813 - type: precision_at_1 value: 36.857 - type: precision_at_3 value: 26.19 - type: precision_at_5 value: 19.829 - type: precision_at_10 value: 12.543000000000001 - type: precision_at_20 value: 7.542999999999999 - type: precision_at_100 value: 1.8030000000000002 - type: precision_at_1000 value: 0.20500000000000002 - type: mrr_at_1 value: 36.857099999999996 - type: mrr_at_3 value: 46.5238 - type: mrr_at_5 value: 47.9952 - type: mrr_at_10 value: 49.331399999999995 - type: mrr_at_20 value: 49.8255 - type: mrr_at_100 value: 50.0575 - type: mrr_at_1000 value: 50.097 - type: nauc_ndcg_at_1_max value: 42.226200000000006 - type: nauc_ndcg_at_1_std value: 4.0359 - type: nauc_ndcg_at_1_diff1 value: 41.728500000000004 - type: nauc_ndcg_at_3_max value: 37.5731 - type: nauc_ndcg_at_3_std value: 7.4824 - type: nauc_ndcg_at_3_diff1 value: 25.607499999999998 - type: nauc_ndcg_at_5_max value: 36.1243 - type: nauc_ndcg_at_5_std value: 6.7822 - type: nauc_ndcg_at_5_diff1 value: 26.4955 - type: nauc_ndcg_at_10_max value: 38.8673 - type: nauc_ndcg_at_10_std value: 9.925699999999999 - type: nauc_ndcg_at_10_diff1 value: 25.262400000000003 - type: nauc_ndcg_at_20_max value: 41.564099999999996 - type: nauc_ndcg_at_20_std value: 12.4619 - type: nauc_ndcg_at_20_diff1 value: 26.902900000000002 - type: nauc_ndcg_at_100_max value: 42.2534 - type: nauc_ndcg_at_100_std value: 12.1461 - type: nauc_ndcg_at_100_diff1 value: 27.721600000000002 - type: nauc_ndcg_at_1000_max value: 42.3689 - type: nauc_ndcg_at_1000_std value: 11.9947 - type: nauc_ndcg_at_1000_diff1 value: 28.6224 - type: nauc_map_at_1_max value: 23.4774 - type: nauc_map_at_1_std value: -1.6596 - type: nauc_map_at_1_diff1 value: 32.9091 - type: nauc_map_at_3_max value: 29.2888 - type: nauc_map_at_3_std value: 2.8310999999999997 - type: nauc_map_at_3_diff1 value: 25.7556 - type: nauc_map_at_5_max value: 32.013200000000005 - type: nauc_map_at_5_std value: 3.8372 - type: nauc_map_at_5_diff1 value: 26.3662 - type: nauc_map_at_10_max value: 34.6644 - type: nauc_map_at_10_std value: 5.9211 - type: nauc_map_at_10_diff1 value: 25.737700000000004 - type: nauc_map_at_20_max value: 36.5315 - type: nauc_map_at_20_std value: 7.657500000000001 - type: nauc_map_at_20_diff1 value: 26.2519 - type: nauc_map_at_100_max value: 36.7956 - type: nauc_map_at_100_std value: 7.6282000000000005 - type: nauc_map_at_100_diff1 value: 26.5173 - type: nauc_map_at_1000_max value: 36.822500000000005 - type: nauc_map_at_1000_std value: 7.641100000000001 - type: nauc_map_at_1000_diff1 value: 26.5875 - type: nauc_recall_at_1_max value: 23.4774 - type: nauc_recall_at_1_std value: -1.6596 - type: nauc_recall_at_1_diff1 value: 32.9091 - type: nauc_recall_at_3_max value: 23.9443 - type: nauc_recall_at_3_std value: 7.0466 - type: nauc_recall_at_3_diff1 value: 15.045 - type: nauc_recall_at_5_max value: 27.515 - type: nauc_recall_at_5_std value: 7.8471 - type: nauc_recall_at_5_diff1 value: 16.0936 - type: nauc_recall_at_10_max value: 32.9675 - type: nauc_recall_at_10_std value: 15.6248 - type: nauc_recall_at_10_diff1 value: 11.8783 - type: nauc_recall_at_20_max value: 40.6864 - type: nauc_recall_at_20_std value: 23.9995 - type: nauc_recall_at_20_diff1 value: 16.9561 - type: nauc_recall_at_100_max value: 47.5027 - type: nauc_recall_at_100_std value: 30.6021 - type: nauc_recall_at_100_diff1 value: 17.3955 - type: nauc_recall_at_1000_max value: 66.6978 - type: nauc_recall_at_1000_std value: 62.0413 - type: nauc_recall_at_1000_diff1 value: 27.5068 - type: nauc_precision_at_1_max value: 42.226200000000006 - type: nauc_precision_at_1_std value: 4.0359 - type: nauc_precision_at_1_diff1 value: 41.728500000000004 - type: nauc_precision_at_3_max value: 44.7816 - type: nauc_precision_at_3_std value: 15.473300000000002 - type: nauc_precision_at_3_diff1 value: 17.0949 - type: nauc_precision_at_5_max value: 44.6483 - type: nauc_precision_at_5_std value: 14.8981 - type: nauc_precision_at_5_diff1 value: 17.1841 - type: nauc_precision_at_10_max value: 45.796 - type: nauc_precision_at_10_std value: 21.046300000000002 - type: nauc_precision_at_10_diff1 value: 10.9757 - type: nauc_precision_at_20_max value: 45.0264 - type: nauc_precision_at_20_std value: 24.8162 - type: nauc_precision_at_20_diff1 value: 10.624699999999999 - type: nauc_precision_at_100_max value: 39.8456 - type: nauc_precision_at_100_std value: 21.0487 - type: nauc_precision_at_100_diff1 value: 8.372 - type: nauc_precision_at_1000_max value: 34.7517 - type: nauc_precision_at_1000_std value: 18.3825 - type: nauc_precision_at_1000_diff1 value: 7.969900000000001 - type: nauc_mrr_at_1_max value: 42.226200000000006 - type: nauc_mrr_at_1_std value: 4.0359 - type: nauc_mrr_at_1_diff1 value: 41.728500000000004 - type: nauc_mrr_at_3_max value: 42.1134 - type: nauc_mrr_at_3_std value: 7.674799999999999 - type: nauc_mrr_at_3_diff1 value: 34.1447 - type: nauc_mrr_at_5_max value: 42.668800000000005 - type: nauc_mrr_at_5_std value: 7.3921 - type: nauc_mrr_at_5_diff1 value: 34.6011 - type: nauc_mrr_at_10_max value: 43.473099999999995 - type: nauc_mrr_at_10_std value: 8.0841 - type: nauc_mrr_at_10_diff1 value: 34.679500000000004 - type: nauc_mrr_at_20_max value: 43.3626 - type: nauc_mrr_at_20_std value: 7.7897 - type: nauc_mrr_at_20_diff1 value: 35.0828 - type: nauc_mrr_at_100_max value: 43.287 - type: nauc_mrr_at_100_std value: 7.7234 - type: nauc_mrr_at_100_diff1 value: 35.169200000000004 - type: nauc_mrr_at_1000_max value: 43.2954 - type: nauc_mrr_at_1000_std value: 7.7224 - type: nauc_mrr_at_1000_diff1 value: 35.1808 - type: main_score value: 46.141 - task: type: Retrieval dataset: name: MTEB MIRACLRetrieval (id) type: miracl/mmteb-miracl config: id split: dev revision: main metrics: - type: ndcg_at_1 value: 46.354 - type: ndcg_at_3 value: 42.538 - type: ndcg_at_5 value: 43.717 - type: ndcg_at_10 value: 47.229 - type: ndcg_at_20 value: 50.605999999999995 - type: ndcg_at_100 value: 55.25 - type: ndcg_at_1000 value: 57.647999999999996 - type: map_at_1 value: 20.787 - type: map_at_3 value: 30.721999999999998 - type: map_at_5 value: 34.096 - type: map_at_10 value: 36.994 - type: map_at_20 value: 38.622 - type: map_at_100 value: 39.872 - type: map_at_1000 value: 40.056000000000004 - type: recall_at_1 value: 20.787 - type: recall_at_3 value: 36.229 - type: recall_at_5 value: 44.437 - type: recall_at_10 value: 54.771 - type: recall_at_20 value: 63.842 - type: recall_at_100 value: 80.689 - type: recall_at_1000 value: 94.03200000000001 - type: precision_at_1 value: 46.354 - type: precision_at_3 value: 30.625000000000004 - type: precision_at_5 value: 23.708000000000002 - type: precision_at_10 value: 15.719 - type: precision_at_20 value: 9.589 - type: precision_at_100 value: 2.5700000000000003 - type: precision_at_1000 value: 0.302 - type: mrr_at_1 value: 46.3542 - type: mrr_at_3 value: 54.6875 - type: mrr_at_5 value: 56.5521 - type: mrr_at_10 value: 57.6894 - type: mrr_at_20 value: 58.05630000000001 - type: mrr_at_100 value: 58.217 - type: mrr_at_1000 value: 58.2387 - type: nauc_ndcg_at_1_max value: 27.987000000000002 - type: nauc_ndcg_at_1_std value: 7.784000000000001 - type: nauc_ndcg_at_1_diff1 value: 29.116799999999998 - type: nauc_ndcg_at_3_max value: 25.316899999999997 - type: nauc_ndcg_at_3_std value: 3.3255 - type: nauc_ndcg_at_3_diff1 value: 25.4685 - type: nauc_ndcg_at_5_max value: 26.1614 - type: nauc_ndcg_at_5_std value: 0.8946000000000001 - type: nauc_ndcg_at_5_diff1 value: 25.269799999999996 - type: nauc_ndcg_at_10_max value: 26.898 - type: nauc_ndcg_at_10_std value: 0.505 - type: nauc_ndcg_at_10_diff1 value: 25.0664 - type: nauc_ndcg_at_20_max value: 28.384900000000002 - type: nauc_ndcg_at_20_std value: 3.0328 - type: nauc_ndcg_at_20_diff1 value: 25.011 - type: nauc_ndcg_at_100_max value: 29.4682 - type: nauc_ndcg_at_100_std value: 8.5929 - type: nauc_ndcg_at_100_diff1 value: 23.0951 - type: nauc_ndcg_at_1000_max value: 29.384900000000002 - type: nauc_ndcg_at_1000_std value: 8.7787 - type: nauc_ndcg_at_1000_diff1 value: 23.454900000000002 - type: nauc_map_at_1_max value: 17.6022 - type: nauc_map_at_1_std value: -3.9352 - type: nauc_map_at_1_diff1 value: 31.478 - type: nauc_map_at_3_max value: 22.4116 - type: nauc_map_at_3_std value: -3.0375 - type: nauc_map_at_3_diff1 value: 28.6608 - type: nauc_map_at_5_max value: 23.4486 - type: nauc_map_at_5_std value: -3.7261 - type: nauc_map_at_5_diff1 value: 27.2458 - type: nauc_map_at_10_max value: 24.4413 - type: nauc_map_at_10_std value: -2.4634 - type: nauc_map_at_10_diff1 value: 26.3372 - type: nauc_map_at_20_max value: 25.1924 - type: nauc_map_at_20_std value: -1.0928 - type: nauc_map_at_20_diff1 value: 26.028299999999998 - type: nauc_map_at_100_max value: 25.7081 - type: nauc_map_at_100_std value: 0.6245999999999999 - type: nauc_map_at_100_diff1 value: 25.599 - type: nauc_map_at_1000_max value: 25.714100000000002 - type: nauc_map_at_1000_std value: 0.7106 - type: nauc_map_at_1000_diff1 value: 25.609700000000004 - type: nauc_recall_at_1_max value: 17.6022 - type: nauc_recall_at_1_std value: -3.9352 - type: nauc_recall_at_1_diff1 value: 31.478 - type: nauc_recall_at_3_max value: 20.314799999999998 - type: nauc_recall_at_3_std value: -4.1603 - type: nauc_recall_at_3_diff1 value: 26.1438 - type: nauc_recall_at_5_max value: 22.866500000000002 - type: nauc_recall_at_5_std value: -4.755 - type: nauc_recall_at_5_diff1 value: 22.1412 - type: nauc_recall_at_10_max value: 22.900000000000002 - type: nauc_recall_at_10_std value: -3.9179 - type: nauc_recall_at_10_diff1 value: 19.3005 - type: nauc_recall_at_20_max value: 26.3519 - type: nauc_recall_at_20_std value: 1.1686 - type: nauc_recall_at_20_diff1 value: 18.94 - type: nauc_recall_at_100_max value: 30.2413 - type: nauc_recall_at_100_std value: 24.4636 - type: nauc_recall_at_100_diff1 value: 6.5627 - type: nauc_recall_at_1000_max value: 43.778 - type: nauc_recall_at_1000_std value: 48.835699999999996 - type: nauc_recall_at_1000_diff1 value: -1.5112 - type: nauc_precision_at_1_max value: 27.987000000000002 - type: nauc_precision_at_1_std value: 7.784000000000001 - type: nauc_precision_at_1_diff1 value: 29.116799999999998 - type: nauc_precision_at_3_max value: 24.6393 - type: nauc_precision_at_3_std value: 7.932599999999999 - type: nauc_precision_at_3_diff1 value: 11.9215 - type: nauc_precision_at_5_max value: 23.0426 - type: nauc_precision_at_5_std value: 8.9273 - type: nauc_precision_at_5_diff1 value: 5.0737 - type: nauc_precision_at_10_max value: 18.0093 - type: nauc_precision_at_10_std value: 13.093 - type: nauc_precision_at_10_diff1 value: -1.5028 - type: nauc_precision_at_20_max value: 16.1061 - type: nauc_precision_at_20_std value: 18.3582 - type: nauc_precision_at_20_diff1 value: -4.3066 - type: nauc_precision_at_100_max value: 10.9945 - type: nauc_precision_at_100_std value: 28.2804 - type: nauc_precision_at_100_diff1 value: -11.6381 - type: nauc_precision_at_1000_max value: 4.9859 - type: nauc_precision_at_1000_std value: 26.3117 - type: nauc_precision_at_1000_diff1 value: -13.819300000000002 - type: nauc_mrr_at_1_max value: 27.987000000000002 - type: nauc_mrr_at_1_std value: 7.784000000000001 - type: nauc_mrr_at_1_diff1 value: 29.116799999999998 - type: nauc_mrr_at_3_max value: 28.635899999999996 - type: nauc_mrr_at_3_std value: 8.309700000000001 - type: nauc_mrr_at_3_diff1 value: 27.976499999999998 - type: nauc_mrr_at_5_max value: 29.8296 - type: nauc_mrr_at_5_std value: 9.4775 - type: nauc_mrr_at_5_diff1 value: 26.685799999999997 - type: nauc_mrr_at_10_max value: 29.4522 - type: nauc_mrr_at_10_std value: 9.1613 - type: nauc_mrr_at_10_diff1 value: 26.933600000000002 - type: nauc_mrr_at_20_max value: 29.5446 - type: nauc_mrr_at_20_std value: 9.3451 - type: nauc_mrr_at_20_diff1 value: 27.074900000000003 - type: nauc_mrr_at_100_max value: 29.4977 - type: nauc_mrr_at_100_std value: 9.4252 - type: nauc_mrr_at_100_diff1 value: 27.0534 - type: nauc_mrr_at_1000_max value: 29.499599999999997 - type: nauc_mrr_at_1000_std value: 9.4193 - type: nauc_mrr_at_1000_diff1 value: 27.054000000000002 - type: main_score value: 47.229 - task: type: Retrieval dataset: name: MTEB MIRACLRetrieval (ja) type: miracl/mmteb-miracl config: ja split: dev revision: main metrics: - type: ndcg_at_1 value: 56.279 - type: ndcg_at_3 value: 56.226 - type: ndcg_at_5 value: 58.660000000000004 - type: ndcg_at_10 value: 62.81 - type: ndcg_at_20 value: 65.21000000000001 - type: ndcg_at_100 value: 67.757 - type: ndcg_at_1000 value: 68.667 - type: map_at_1 value: 36.647999999999996 - type: map_at_3 value: 48.154 - type: map_at_5 value: 51.336999999999996 - type: map_at_10 value: 53.998000000000005 - type: map_at_20 value: 55.074 - type: map_at_100 value: 55.701 - type: map_at_1000 value: 55.767 - type: recall_at_1 value: 36.647999999999996 - type: recall_at_3 value: 55.845 - type: recall_at_5 value: 63.854 - type: recall_at_10 value: 74.96000000000001 - type: recall_at_20 value: 82.326 - type: recall_at_100 value: 92.461 - type: recall_at_1000 value: 97.827 - type: precision_at_1 value: 56.279 - type: precision_at_3 value: 31.86 - type: precision_at_5 value: 22.884 - type: precision_at_10 value: 14.058000000000002 - type: precision_at_20 value: 7.965 - type: precision_at_100 value: 1.883 - type: precision_at_1000 value: 0.203 - type: mrr_at_1 value: 56.27910000000001 - type: mrr_at_3 value: 64.7868 - type: mrr_at_5 value: 65.9496 - type: mrr_at_10 value: 67.0763 - type: mrr_at_20 value: 67.3531 - type: mrr_at_100 value: 67.48920000000001 - type: mrr_at_1000 value: 67.5016 - type: nauc_ndcg_at_1_max value: 34.801300000000005 - type: nauc_ndcg_at_1_std value: 3.6539 - type: nauc_ndcg_at_1_diff1 value: 42.9912 - type: nauc_ndcg_at_3_max value: 27.3758 - type: nauc_ndcg_at_3_std value: -5.6399 - type: nauc_ndcg_at_3_diff1 value: 35.0235 - type: nauc_ndcg_at_5_max value: 26.5087 - type: nauc_ndcg_at_5_std value: -7.2121 - type: nauc_ndcg_at_5_diff1 value: 34.3684 - type: nauc_ndcg_at_10_max value: 27.756199999999996 - type: nauc_ndcg_at_10_std value: -6.9499 - type: nauc_ndcg_at_10_diff1 value: 34.9472 - type: nauc_ndcg_at_20_max value: 30.6925 - type: nauc_ndcg_at_20_std value: -3.7859 - type: nauc_ndcg_at_20_diff1 value: 35.833 - type: nauc_ndcg_at_100_max value: 31.6641 - type: nauc_ndcg_at_100_std value: -1.1897 - type: nauc_ndcg_at_100_diff1 value: 36.218 - type: nauc_ndcg_at_1000_max value: 31.5623 - type: nauc_ndcg_at_1000_std value: -1.2468 - type: nauc_ndcg_at_1000_diff1 value: 36.4007 - type: nauc_map_at_1_max value: 13.1087 - type: nauc_map_at_1_std value: -13.6324 - type: nauc_map_at_1_diff1 value: 36.5411 - type: nauc_map_at_3_max value: 19.108900000000002 - type: nauc_map_at_3_std value: -12.8558 - type: nauc_map_at_3_diff1 value: 33.797 - type: nauc_map_at_5_max value: 20.935100000000002 - type: nauc_map_at_5_std value: -11.6525 - type: nauc_map_at_5_diff1 value: 33.392500000000005 - type: nauc_map_at_10_max value: 22.9758 - type: nauc_map_at_10_std value: -10.3728 - type: nauc_map_at_10_diff1 value: 33.8681 - type: nauc_map_at_20_max value: 24.357100000000003 - type: nauc_map_at_20_std value: -8.9932 - type: nauc_map_at_20_diff1 value: 34.2437 - type: nauc_map_at_100_max value: 24.622700000000002 - type: nauc_map_at_100_std value: -8.3079 - type: nauc_map_at_100_diff1 value: 34.3227 - type: nauc_map_at_1000_max value: 24.6436 - type: nauc_map_at_1000_std value: -8.280999999999999 - type: nauc_map_at_1000_diff1 value: 34.3499 - type: nauc_recall_at_1_max value: 13.1087 - type: nauc_recall_at_1_std value: -13.6324 - type: nauc_recall_at_1_diff1 value: 36.5411 - type: nauc_recall_at_3_max value: 17.369899999999998 - type: nauc_recall_at_3_std value: -14.6564 - type: nauc_recall_at_3_diff1 value: 29.4825 - type: nauc_recall_at_5_max value: 18.2446 - type: nauc_recall_at_5_std value: -13.422400000000001 - type: nauc_recall_at_5_diff1 value: 26.5515 - type: nauc_recall_at_10_max value: 18.6431 - type: nauc_recall_at_10_std value: -13.3386 - type: nauc_recall_at_10_diff1 value: 25.001299999999997 - type: nauc_recall_at_20_max value: 28.248099999999997 - type: nauc_recall_at_20_std value: -2.9409 - type: nauc_recall_at_20_diff1 value: 26.283800000000003 - type: nauc_recall_at_100_max value: 38.6213 - type: nauc_recall_at_100_std value: 20.5175 - type: nauc_recall_at_100_diff1 value: 23.8743 - type: nauc_recall_at_1000_max value: 54.1945 - type: nauc_recall_at_1000_std value: 48.3776 - type: nauc_recall_at_1000_diff1 value: 21.786 - type: nauc_precision_at_1_max value: 34.801300000000005 - type: nauc_precision_at_1_std value: 3.6539 - type: nauc_precision_at_1_diff1 value: 42.9912 - type: nauc_precision_at_3_max value: 36.7085 - type: nauc_precision_at_3_std value: 13.653799999999999 - type: nauc_precision_at_3_diff1 value: 16.8438 - type: nauc_precision_at_5_max value: 33.541199999999996 - type: nauc_precision_at_5_std value: 17.418400000000002 - type: nauc_precision_at_5_diff1 value: 8.5281 - type: nauc_precision_at_10_max value: 32.448100000000004 - type: nauc_precision_at_10_std value: 22.8249 - type: nauc_precision_at_10_diff1 value: 2.5392 - type: nauc_precision_at_20_max value: 32.423 - type: nauc_precision_at_20_std value: 29.353800000000003 - type: nauc_precision_at_20_diff1 value: 0.1455 - type: nauc_precision_at_100_max value: 25.0045 - type: nauc_precision_at_100_std value: 34.6492 - type: nauc_precision_at_100_diff1 value: -5.5314000000000005 - type: nauc_precision_at_1000_max value: 21.319499999999998 - type: nauc_precision_at_1000_std value: 33.3312 - type: nauc_precision_at_1000_diff1 value: -7.0243 - type: nauc_mrr_at_1_max value: 34.801300000000005 - type: nauc_mrr_at_1_std value: 3.6539 - type: nauc_mrr_at_1_diff1 value: 42.9912 - type: nauc_mrr_at_3_max value: 39.8179 - type: nauc_mrr_at_3_std value: 4.4769000000000005 - type: nauc_mrr_at_3_diff1 value: 42.4358 - type: nauc_mrr_at_5_max value: 39.6822 - type: nauc_mrr_at_5_std value: 4.7865 - type: nauc_mrr_at_5_diff1 value: 41.9923 - type: nauc_mrr_at_10_max value: 39.2963 - type: nauc_mrr_at_10_std value: 4.8511 - type: nauc_mrr_at_10_diff1 value: 41.994 - type: nauc_mrr_at_20_max value: 39.395799999999994 - type: nauc_mrr_at_20_std value: 4.9907 - type: nauc_mrr_at_20_diff1 value: 42.1806 - type: nauc_mrr_at_100_max value: 39.3251 - type: nauc_mrr_at_100_std value: 4.948 - type: nauc_mrr_at_100_diff1 value: 42.1769 - type: nauc_mrr_at_1000_max value: 39.3153 - type: nauc_mrr_at_1000_std value: 4.9384999999999994 - type: nauc_mrr_at_1000_diff1 value: 42.1768 - type: main_score value: 62.81 - task: type: Retrieval dataset: name: MTEB MIRACLRetrieval (ko) type: miracl/mmteb-miracl config: ko split: dev revision: main metrics: - type: ndcg_at_1 value: 52.581999999999994 - type: ndcg_at_3 value: 53.73 - type: ndcg_at_5 value: 55.886 - type: ndcg_at_10 value: 59.216 - type: ndcg_at_20 value: 62.427 - type: ndcg_at_100 value: 65.093 - type: ndcg_at_1000 value: 66.204 - type: map_at_1 value: 30.520999999999997 - type: map_at_3 value: 42.601 - type: map_at_5 value: 46.516000000000005 - type: map_at_10 value: 49.61 - type: map_at_20 value: 51.359 - type: map_at_100 value: 52.171 - type: map_at_1000 value: 52.249 - type: recall_at_1 value: 30.520999999999997 - type: recall_at_3 value: 51.5 - type: recall_at_5 value: 60.709999999999994 - type: recall_at_10 value: 71.15899999999999 - type: recall_at_20 value: 80.209 - type: recall_at_100 value: 90.203 - type: recall_at_1000 value: 96.714 - type: precision_at_1 value: 52.581999999999994 - type: precision_at_3 value: 33.019999999999996 - type: precision_at_5 value: 25.446 - type: precision_at_10 value: 16.244 - type: precision_at_20 value: 9.695 - type: precision_at_100 value: 2.286 - type: precision_at_1000 value: 0.248 - type: mrr_at_1 value: 52.5822 - type: mrr_at_3 value: 61.9718 - type: mrr_at_5 value: 63.450700000000005 - type: mrr_at_10 value: 64.50479999999999 - type: mrr_at_20 value: 64.7745 - type: mrr_at_100 value: 64.86840000000001 - type: mrr_at_1000 value: 64.8792 - type: nauc_ndcg_at_1_max value: 57.2789 - type: nauc_ndcg_at_1_std value: 34.9863 - type: nauc_ndcg_at_1_diff1 value: 44.0111 - type: nauc_ndcg_at_3_max value: 34.18 - type: nauc_ndcg_at_3_std value: 11.1503 - type: nauc_ndcg_at_3_diff1 value: 40.339999999999996 - type: nauc_ndcg_at_5_max value: 34.4364 - type: nauc_ndcg_at_5_std value: 8.7133 - type: nauc_ndcg_at_5_diff1 value: 43.3464 - type: nauc_ndcg_at_10_max value: 35.990899999999996 - type: nauc_ndcg_at_10_std value: 10.886700000000001 - type: nauc_ndcg_at_10_diff1 value: 43.3519 - type: nauc_ndcg_at_20_max value: 40.259499999999996 - type: nauc_ndcg_at_20_std value: 16.305600000000002 - type: nauc_ndcg_at_20_diff1 value: 43.526900000000005 - type: nauc_ndcg_at_100_max value: 44.4663 - type: nauc_ndcg_at_100_std value: 21.5157 - type: nauc_ndcg_at_100_diff1 value: 43.269999999999996 - type: nauc_ndcg_at_1000_max value: 44.5037 - type: nauc_ndcg_at_1000_std value: 21.6384 - type: nauc_ndcg_at_1000_diff1 value: 43.5169 - type: nauc_map_at_1_max value: 9.6775 - type: nauc_map_at_1_std value: -7.5287999999999995 - type: nauc_map_at_1_diff1 value: 56.714200000000005 - type: nauc_map_at_3_max value: 14.175199999999998 - type: nauc_map_at_3_std value: -9.251800000000001 - type: nauc_map_at_3_diff1 value: 47.239 - type: nauc_map_at_5_max value: 20.4059 - type: nauc_map_at_5_std value: -3.9799 - type: nauc_map_at_5_diff1 value: 46.5588 - type: nauc_map_at_10_max value: 26.7796 - type: nauc_map_at_10_std value: 2.3718 - type: nauc_map_at_10_diff1 value: 45.5976 - type: nauc_map_at_20_max value: 30.291400000000003 - type: nauc_map_at_20_std value: 6.3573 - type: nauc_map_at_20_diff1 value: 45.5914 - type: nauc_map_at_100_max value: 32.0062 - type: nauc_map_at_100_std value: 8.2968 - type: nauc_map_at_100_diff1 value: 45.6306 - type: nauc_map_at_1000_max value: 32.0482 - type: nauc_map_at_1000_std value: 8.3688 - type: nauc_map_at_1000_diff1 value: 45.6447 - type: nauc_recall_at_1_max value: 9.6775 - type: nauc_recall_at_1_std value: -7.5287999999999995 - type: nauc_recall_at_1_diff1 value: 56.714200000000005 - type: nauc_recall_at_3_max value: 4.7592 - type: nauc_recall_at_3_std value: -17.7268 - type: nauc_recall_at_3_diff1 value: 36.593599999999995 - type: nauc_recall_at_5_max value: 11.0166 - type: nauc_recall_at_5_std value: -14.832799999999999 - type: nauc_recall_at_5_diff1 value: 36.6471 - type: nauc_recall_at_10_max value: 20.272299999999998 - type: nauc_recall_at_10_std value: -3.9745000000000004 - type: nauc_recall_at_10_diff1 value: 34.875699999999995 - type: nauc_recall_at_20_max value: 27.0707 - type: nauc_recall_at_20_std value: 5.8709 - type: nauc_recall_at_20_diff1 value: 34.921600000000005 - type: nauc_recall_at_100_max value: 48.045100000000005 - type: nauc_recall_at_100_std value: 32.3099 - type: nauc_recall_at_100_diff1 value: 30.127 - type: nauc_recall_at_1000_max value: 60.827299999999994 - type: nauc_recall_at_1000_std value: 49.6791 - type: nauc_recall_at_1000_diff1 value: 32.2816 - type: nauc_precision_at_1_max value: 57.2789 - type: nauc_precision_at_1_std value: 34.9863 - type: nauc_precision_at_1_diff1 value: 44.0111 - type: nauc_precision_at_3_max value: 55.550900000000006 - type: nauc_precision_at_3_std value: 39.1605 - type: nauc_precision_at_3_diff1 value: 2.1411 - type: nauc_precision_at_5_max value: 60.1216 - type: nauc_precision_at_5_std value: 49.1925 - type: nauc_precision_at_5_diff1 value: -4.2296 - type: nauc_precision_at_10_max value: 63.53339999999999 - type: nauc_precision_at_10_std value: 57.2366 - type: nauc_precision_at_10_diff1 value: -9.1914 - type: nauc_precision_at_20_max value: 63.2997 - type: nauc_precision_at_20_std value: 62.778 - type: nauc_precision_at_20_diff1 value: -11.4618 - type: nauc_precision_at_100_max value: 61.345000000000006 - type: nauc_precision_at_100_std value: 66.3033 - type: nauc_precision_at_100_diff1 value: -14.8779 - type: nauc_precision_at_1000_max value: 56.28300000000001 - type: nauc_precision_at_1000_std value: 62.91290000000001 - type: nauc_precision_at_1000_diff1 value: -16.6149 - type: nauc_mrr_at_1_max value: 57.2789 - type: nauc_mrr_at_1_std value: 34.9863 - type: nauc_mrr_at_1_diff1 value: 44.0111 - type: nauc_mrr_at_3_max value: 57.678200000000004 - type: nauc_mrr_at_3_std value: 33.5744 - type: nauc_mrr_at_3_diff1 value: 39.5643 - type: nauc_mrr_at_5_max value: 58.668600000000005 - type: nauc_mrr_at_5_std value: 33.5118 - type: nauc_mrr_at_5_diff1 value: 40.888200000000005 - type: nauc_mrr_at_10_max value: 58.4754 - type: nauc_mrr_at_10_std value: 33.7964 - type: nauc_mrr_at_10_diff1 value: 41.314 - type: nauc_mrr_at_20_max value: 58.434 - type: nauc_mrr_at_20_std value: 33.903 - type: nauc_mrr_at_20_diff1 value: 41.217999999999996 - type: nauc_mrr_at_100_max value: 58.4576 - type: nauc_mrr_at_100_std value: 33.9478 - type: nauc_mrr_at_100_diff1 value: 41.172599999999996 - type: nauc_mrr_at_1000_max value: 58.444399999999995 - type: nauc_mrr_at_1000_std value: 33.9292 - type: nauc_mrr_at_1000_diff1 value: 41.166199999999996 - type: main_score value: 59.216 - task: type: Retrieval dataset: name: MTEB MIRACLRetrieval (ru) type: miracl/mmteb-miracl config: ru split: dev revision: main metrics: - type: ndcg_at_1 value: 47.524 - type: ndcg_at_3 value: 46.812 - type: ndcg_at_5 value: 48.442 - type: ndcg_at_10 value: 52.349000000000004 - type: ndcg_at_20 value: 55.669000000000004 - type: ndcg_at_100 value: 59.724999999999994 - type: ndcg_at_1000 value: 61.312999999999995 - type: map_at_1 value: 24.337 - type: map_at_3 value: 35.765 - type: map_at_5 value: 39.153 - type: map_at_10 value: 42.225 - type: map_at_20 value: 43.782 - type: map_at_100 value: 44.887 - type: map_at_1000 value: 45.013 - type: recall_at_1 value: 24.337 - type: recall_at_3 value: 42.927 - type: recall_at_5 value: 51.258 - type: recall_at_10 value: 62.437 - type: recall_at_20 value: 71.411 - type: recall_at_100 value: 86.489 - type: recall_at_1000 value: 95.26599999999999 - type: precision_at_1 value: 47.524 - type: precision_at_3 value: 31.948999999999998 - type: precision_at_5 value: 24.121000000000002 - type: precision_at_10 value: 15.534999999999998 - type: precision_at_20 value: 9.408999999999999 - type: precision_at_100 value: 2.407 - type: precision_at_1000 value: 0.271 - type: mrr_at_1 value: 47.524 - type: mrr_at_3 value: 57.6012 - type: mrr_at_5 value: 59.130700000000004 - type: mrr_at_10 value: 60.1824 - type: mrr_at_20 value: 60.507200000000005 - type: mrr_at_100 value: 60.6675 - type: mrr_at_1000 value: 60.6789 - type: nauc_ndcg_at_1_max value: 32.3091 - type: nauc_ndcg_at_1_std value: 10.915700000000001 - type: nauc_ndcg_at_1_diff1 value: 35.0477 - type: nauc_ndcg_at_3_max value: 30.5579 - type: nauc_ndcg_at_3_std value: 9.9651 - type: nauc_ndcg_at_3_diff1 value: 28.537200000000002 - type: nauc_ndcg_at_5_max value: 30.7637 - type: nauc_ndcg_at_5_std value: 9.7618 - type: nauc_ndcg_at_5_diff1 value: 28.225699999999996 - type: nauc_ndcg_at_10_max value: 32.0146 - type: nauc_ndcg_at_10_std value: 9.681099999999999 - type: nauc_ndcg_at_10_diff1 value: 27.6866 - type: nauc_ndcg_at_20_max value: 34.7846 - type: nauc_ndcg_at_20_std value: 13.270599999999998 - type: nauc_ndcg_at_20_diff1 value: 27.8097 - type: nauc_ndcg_at_100_max value: 37.1031 - type: nauc_ndcg_at_100_std value: 16.512 - type: nauc_ndcg_at_100_diff1 value: 28.294200000000004 - type: nauc_ndcg_at_1000_max value: 36.5248 - type: nauc_ndcg_at_1000_std value: 16.1206 - type: nauc_ndcg_at_1000_diff1 value: 28.6308 - type: nauc_map_at_1_max value: 17.363300000000002 - type: nauc_map_at_1_std value: -3.3156 - type: nauc_map_at_1_diff1 value: 33.9402 - type: nauc_map_at_3_max value: 23.0235 - type: nauc_map_at_3_std value: 1.2713999999999999 - type: nauc_map_at_3_diff1 value: 28.946499999999997 - type: nauc_map_at_5_max value: 25.8014 - type: nauc_map_at_5_std value: 3.8541 - type: nauc_map_at_5_diff1 value: 28.526 - type: nauc_map_at_10_max value: 27.6617 - type: nauc_map_at_10_std value: 5.2938 - type: nauc_map_at_10_diff1 value: 28.122700000000002 - type: nauc_map_at_20_max value: 29.071399999999997 - type: nauc_map_at_20_std value: 7.005 - type: nauc_map_at_20_diff1 value: 28.075 - type: nauc_map_at_100_max value: 29.9533 - type: nauc_map_at_100_std value: 8.0838 - type: nauc_map_at_100_diff1 value: 28.2424 - type: nauc_map_at_1000_max value: 29.936200000000003 - type: nauc_map_at_1000_std value: 8.0967 - type: nauc_map_at_1000_diff1 value: 28.259 - type: nauc_recall_at_1_max value: 17.363300000000002 - type: nauc_recall_at_1_std value: -3.3156 - type: nauc_recall_at_1_diff1 value: 33.9402 - type: nauc_recall_at_3_max value: 20.7272 - type: nauc_recall_at_3_std value: 1.9171 - type: nauc_recall_at_3_diff1 value: 23.505300000000002 - type: nauc_recall_at_5_max value: 24.55 - type: nauc_recall_at_5_std value: 6.1491999999999996 - type: nauc_recall_at_5_diff1 value: 21.1769 - type: nauc_recall_at_10_max value: 26.6134 - type: nauc_recall_at_10_std value: 7.3684 - type: nauc_recall_at_10_diff1 value: 18.0016 - type: nauc_recall_at_20_max value: 33.744 - type: nauc_recall_at_20_std value: 17.2573 - type: nauc_recall_at_20_diff1 value: 17.3872 - type: nauc_recall_at_100_max value: 49.5745 - type: nauc_recall_at_100_std value: 39.4003 - type: nauc_recall_at_100_diff1 value: 16.1814 - type: nauc_recall_at_1000_max value: 62.5842 - type: nauc_recall_at_1000_std value: 64.7392 - type: nauc_recall_at_1000_diff1 value: 16.9464 - type: nauc_precision_at_1_max value: 32.3091 - type: nauc_precision_at_1_std value: 10.915700000000001 - type: nauc_precision_at_1_diff1 value: 35.0477 - type: nauc_precision_at_3_max value: 34.9888 - type: nauc_precision_at_3_std value: 22.009600000000002 - type: nauc_precision_at_3_diff1 value: 13.4801 - type: nauc_precision_at_5_max value: 34.1539 - type: nauc_precision_at_5_std value: 25.2388 - type: nauc_precision_at_5_diff1 value: 8.622 - type: nauc_precision_at_10_max value: 31.194 - type: nauc_precision_at_10_std value: 25.397100000000002 - type: nauc_precision_at_10_diff1 value: 3.4173 - type: nauc_precision_at_20_max value: 29.3116 - type: nauc_precision_at_20_std value: 28.8229 - type: nauc_precision_at_20_diff1 value: -0.4374 - type: nauc_precision_at_100_max value: 23.853099999999998 - type: nauc_precision_at_100_std value: 29.942800000000002 - type: nauc_precision_at_100_diff1 value: -3.9575 - type: nauc_precision_at_1000_max value: 16.5958 - type: nauc_precision_at_1000_std value: 25.208599999999997 - type: nauc_precision_at_1000_diff1 value: -6.1125 - type: nauc_mrr_at_1_max value: 32.3091 - type: nauc_mrr_at_1_std value: 10.915700000000001 - type: nauc_mrr_at_1_diff1 value: 35.0477 - type: nauc_mrr_at_3_max value: 36.9469 - type: nauc_mrr_at_3_std value: 15.4767 - type: nauc_mrr_at_3_diff1 value: 33.3922 - type: nauc_mrr_at_5_max value: 37.7043 - type: nauc_mrr_at_5_std value: 16.2089 - type: nauc_mrr_at_5_diff1 value: 33.3182 - type: nauc_mrr_at_10_max value: 37.5403 - type: nauc_mrr_at_10_std value: 16.229599999999998 - type: nauc_mrr_at_10_diff1 value: 33.2431 - type: nauc_mrr_at_20_max value: 37.4812 - type: nauc_mrr_at_20_std value: 16.278100000000002 - type: nauc_mrr_at_20_diff1 value: 33.3127 - type: nauc_mrr_at_100_max value: 37.43 - type: nauc_mrr_at_100_std value: 16.2077 - type: nauc_mrr_at_100_diff1 value: 33.3439 - type: nauc_mrr_at_1000_max value: 37.4133 - type: nauc_mrr_at_1000_std value: 16.1859 - type: nauc_mrr_at_1000_diff1 value: 33.353300000000004 - type: main_score value: 52.349000000000004 - task: type: Retrieval dataset: name: MTEB MIRACLRetrieval (sw) type: miracl/mmteb-miracl config: sw split: dev revision: main metrics: - type: ndcg_at_1 value: 51.66 - type: ndcg_at_3 value: 54.827999999999996 - type: ndcg_at_5 value: 57.382 - type: ndcg_at_10 value: 61.271 - type: ndcg_at_20 value: 63.64300000000001 - type: ndcg_at_100 value: 66.09899999999999 - type: ndcg_at_1000 value: 66.867 - type: map_at_1 value: 35.276999999999994 - type: map_at_3 value: 48.260999999999996 - type: map_at_5 value: 51.029 - type: map_at_10 value: 53.405 - type: map_at_20 value: 54.298 - type: map_at_100 value: 54.836 - type: map_at_1000 value: 54.887 - type: recall_at_1 value: 35.276999999999994 - type: recall_at_3 value: 56.739 - type: recall_at_5 value: 64.21 - type: recall_at_10 value: 74.368 - type: recall_at_20 value: 81.888 - type: recall_at_100 value: 92.26100000000001 - type: recall_at_1000 value: 97.109 - type: precision_at_1 value: 51.66 - type: precision_at_3 value: 30.843999999999998 - type: precision_at_5 value: 21.743000000000002 - type: precision_at_10 value: 12.988 - type: precision_at_20 value: 7.364999999999999 - type: precision_at_100 value: 1.714 - type: precision_at_1000 value: 0.184 - type: mrr_at_1 value: 51.6598 - type: mrr_at_3 value: 60.338899999999995 - type: mrr_at_5 value: 61.7808 - type: mrr_at_10 value: 62.751599999999996 - type: mrr_at_20 value: 63.1412 - type: mrr_at_100 value: 63.309099999999994 - type: mrr_at_1000 value: 63.317299999999996 - type: nauc_ndcg_at_1_max value: 33.6073 - type: nauc_ndcg_at_1_std value: 6.1046000000000005 - type: nauc_ndcg_at_1_diff1 value: 41.1955 - type: nauc_ndcg_at_3_max value: 31.268400000000003 - type: nauc_ndcg_at_3_std value: -2.9395000000000002 - type: nauc_ndcg_at_3_diff1 value: 35.6186 - type: nauc_ndcg_at_5_max value: 32.3145 - type: nauc_ndcg_at_5_std value: -0.7283999999999999 - type: nauc_ndcg_at_5_diff1 value: 37.7602 - type: nauc_ndcg_at_10_max value: 35.1426 - type: nauc_ndcg_at_10_std value: -0.13829999999999998 - type: nauc_ndcg_at_10_diff1 value: 36.8929 - type: nauc_ndcg_at_20_max value: 35.4227 - type: nauc_ndcg_at_20_std value: 0.8394999999999999 - type: nauc_ndcg_at_20_diff1 value: 36.9758 - type: nauc_ndcg_at_100_max value: 36.9415 - type: nauc_ndcg_at_100_std value: 5.9117999999999995 - type: nauc_ndcg_at_100_diff1 value: 37.0021 - type: nauc_ndcg_at_1000_max value: 37.0195 - type: nauc_ndcg_at_1000_std value: 5.5642 - type: nauc_ndcg_at_1000_diff1 value: 37.1389 - type: nauc_map_at_1_max value: 14.893600000000001 - type: nauc_map_at_1_std value: -6.9723 - type: nauc_map_at_1_diff1 value: 47.328399999999995 - type: nauc_map_at_3_max value: 25.1304 - type: nauc_map_at_3_std value: -5.5777 - type: nauc_map_at_3_diff1 value: 39.5728 - type: nauc_map_at_5_max value: 28.206599999999998 - type: nauc_map_at_5_std value: -3.2870000000000004 - type: nauc_map_at_5_diff1 value: 39.868500000000004 - type: nauc_map_at_10_max value: 30.520999999999997 - type: nauc_map_at_10_std value: -2.539 - type: nauc_map_at_10_diff1 value: 39.1287 - type: nauc_map_at_20_max value: 30.712899999999998 - type: nauc_map_at_20_std value: -2.0093 - type: nauc_map_at_20_diff1 value: 39.0357 - type: nauc_map_at_100_max value: 31.0687 - type: nauc_map_at_100_std value: -1.0538 - type: nauc_map_at_100_diff1 value: 38.9851 - type: nauc_map_at_1000_max value: 31.0939 - type: nauc_map_at_1000_std value: -1.0348 - type: nauc_map_at_1000_diff1 value: 38.9719 - type: nauc_recall_at_1_max value: 14.893600000000001 - type: nauc_recall_at_1_std value: -6.9723 - type: nauc_recall_at_1_diff1 value: 47.328399999999995 - type: nauc_recall_at_3_max value: 25.0525 - type: nauc_recall_at_3_std value: -9.808300000000001 - type: nauc_recall_at_3_diff1 value: 32.9087 - type: nauc_recall_at_5_max value: 28.8065 - type: nauc_recall_at_5_std value: -4.5512999999999995 - type: nauc_recall_at_5_diff1 value: 32.9308 - type: nauc_recall_at_10_max value: 34.9121 - type: nauc_recall_at_10_std value: -5.8499 - type: nauc_recall_at_10_diff1 value: 29.791 - type: nauc_recall_at_20_max value: 35.6729 - type: nauc_recall_at_20_std value: -4.3512 - type: nauc_recall_at_20_diff1 value: 29.087600000000002 - type: nauc_recall_at_100_max value: 53.5866 - type: nauc_recall_at_100_std value: 49.692 - type: nauc_recall_at_100_diff1 value: 28.9725 - type: nauc_recall_at_1000_max value: 80.23949999999999 - type: nauc_recall_at_1000_std value: 86.7359 - type: nauc_recall_at_1000_diff1 value: 37.333 - type: nauc_precision_at_1_max value: 33.6073 - type: nauc_precision_at_1_std value: 6.1046000000000005 - type: nauc_precision_at_1_diff1 value: 41.1955 - type: nauc_precision_at_3_max value: 40.2515 - type: nauc_precision_at_3_std value: 12.1973 - type: nauc_precision_at_3_diff1 value: 3.9177999999999997 - type: nauc_precision_at_5_max value: 41.7312 - type: nauc_precision_at_5_std value: 17.921400000000002 - type: nauc_precision_at_5_diff1 value: -0.2405 - type: nauc_precision_at_10_max value: 39.9025 - type: nauc_precision_at_10_std value: 18.9909 - type: nauc_precision_at_10_diff1 value: -8.5406 - type: nauc_precision_at_20_max value: 34.1753 - type: nauc_precision_at_20_std value: 21.9853 - type: nauc_precision_at_20_diff1 value: -13.966700000000001 - type: nauc_precision_at_100_max value: 30.461 - type: nauc_precision_at_100_std value: 34.063900000000004 - type: nauc_precision_at_100_diff1 value: -21.1252 - type: nauc_precision_at_1000_max value: 26.5512 - type: nauc_precision_at_1000_std value: 30.7066 - type: nauc_precision_at_1000_diff1 value: -22.2902 - type: nauc_mrr_at_1_max value: 33.6073 - type: nauc_mrr_at_1_std value: 6.1046000000000005 - type: nauc_mrr_at_1_diff1 value: 41.1955 - type: nauc_mrr_at_3_max value: 37.6571 - type: nauc_mrr_at_3_std value: 5.2793 - type: nauc_mrr_at_3_diff1 value: 36.5302 - type: nauc_mrr_at_5_max value: 38.6239 - type: nauc_mrr_at_5_std value: 7.762700000000001 - type: nauc_mrr_at_5_diff1 value: 36.525 - type: nauc_mrr_at_10_max value: 38.4608 - type: nauc_mrr_at_10_std value: 7.131 - type: nauc_mrr_at_10_diff1 value: 36.4653 - type: nauc_mrr_at_20_max value: 38.2783 - type: nauc_mrr_at_20_std value: 6.9415000000000004 - type: nauc_mrr_at_20_diff1 value: 36.5089 - type: nauc_mrr_at_100_max value: 38.337199999999996 - type: nauc_mrr_at_100_std value: 7.2228 - type: nauc_mrr_at_100_diff1 value: 36.6891 - type: nauc_mrr_at_1000_max value: 38.327600000000004 - type: nauc_mrr_at_1000_std value: 7.206300000000001 - type: nauc_mrr_at_1000_diff1 value: 36.696400000000004 - type: main_score value: 61.271 - task: type: Retrieval dataset: name: MTEB MIRACLRetrieval (te) type: miracl/mmteb-miracl config: te split: dev revision: main metrics: - type: ndcg_at_1 value: 63.647 - type: ndcg_at_3 value: 75.98700000000001 - type: ndcg_at_5 value: 77.86999999999999 - type: ndcg_at_10 value: 79.149 - type: ndcg_at_20 value: 79.50399999999999 - type: ndcg_at_100 value: 80.199 - type: ndcg_at_1000 value: 80.393 - type: map_at_1 value: 62.963 - type: map_at_3 value: 72.94699999999999 - type: map_at_5 value: 74.042 - type: map_at_10 value: 74.612 - type: map_at_20 value: 74.727 - type: map_at_100 value: 74.831 - type: map_at_1000 value: 74.839 - type: recall_at_1 value: 62.963 - type: recall_at_3 value: 84.15899999999999 - type: recall_at_5 value: 88.627 - type: recall_at_10 value: 92.411 - type: recall_at_20 value: 93.74 - type: recall_at_100 value: 97.363 - type: recall_at_1000 value: 98.833 - type: precision_at_1 value: 63.647 - type: precision_at_3 value: 28.622999999999998 - type: precision_at_5 value: 18.163999999999998 - type: precision_at_10 value: 9.481 - type: precision_at_20 value: 4.819 - type: precision_at_100 value: 1.001 - type: precision_at_1000 value: 0.10200000000000001 - type: mrr_at_1 value: 63.647299999999994 - type: mrr_at_3 value: 73.49029999999999 - type: mrr_at_5 value: 74.4626 - type: mrr_at_10 value: 74.98280000000001 - type: mrr_at_20 value: 75.0719 - type: mrr_at_100 value: 75.1695 - type: mrr_at_1000 value: 75.1769 - type: nauc_ndcg_at_1_max value: 33.3063 - type: nauc_ndcg_at_1_std value: -27.609699999999997 - type: nauc_ndcg_at_1_diff1 value: 64.8293 - type: nauc_ndcg_at_3_max value: 42.4738 - type: nauc_ndcg_at_3_std value: -23.8921 - type: nauc_ndcg_at_3_diff1 value: 56.43749999999999 - type: nauc_ndcg_at_5_max value: 43.132 - type: nauc_ndcg_at_5_std value: -23.2181 - type: nauc_ndcg_at_5_diff1 value: 55.722899999999996 - type: nauc_ndcg_at_10_max value: 43.036 - type: nauc_ndcg_at_10_std value: -22.880300000000002 - type: nauc_ndcg_at_10_diff1 value: 56.22279999999999 - type: nauc_ndcg_at_20_max value: 43.1538 - type: nauc_ndcg_at_20_std value: -22.7674 - type: nauc_ndcg_at_20_diff1 value: 56.4893 - type: nauc_ndcg_at_100_max value: 42.0908 - type: nauc_ndcg_at_100_std value: -22.3071 - type: nauc_ndcg_at_100_diff1 value: 57.5928 - type: nauc_ndcg_at_1000_max value: 41.6223 - type: nauc_ndcg_at_1000_std value: -22.747600000000002 - type: nauc_ndcg_at_1000_diff1 value: 57.6603 - type: nauc_map_at_1_max value: 31.9355 - type: nauc_map_at_1_std value: -29.4362 - type: nauc_map_at_1_diff1 value: 64.9802 - type: nauc_map_at_3_max value: 39.3304 - type: nauc_map_at_3_std value: -25.819 - type: nauc_map_at_3_diff1 value: 58.8664 - type: nauc_map_at_5_max value: 39.659800000000004 - type: nauc_map_at_5_std value: -25.3619 - type: nauc_map_at_5_diff1 value: 58.57449999999999 - type: nauc_map_at_10_max value: 39.6121 - type: nauc_map_at_10_std value: -25.2399 - type: nauc_map_at_10_diff1 value: 58.8083 - type: nauc_map_at_20_max value: 39.6958 - type: nauc_map_at_20_std value: -25.116 - type: nauc_map_at_20_diff1 value: 58.8995 - type: nauc_map_at_100_max value: 39.5617 - type: nauc_map_at_100_std value: -25.0319 - type: nauc_map_at_100_diff1 value: 59.053599999999996 - type: nauc_map_at_1000_max value: 39.5469 - type: nauc_map_at_1000_std value: -25.0473 - type: nauc_map_at_1000_diff1 value: 59.0556 - type: nauc_recall_at_1_max value: 31.9355 - type: nauc_recall_at_1_std value: -29.4362 - type: nauc_recall_at_1_diff1 value: 64.9802 - type: nauc_recall_at_3_max value: 54.57149999999999 - type: nauc_recall_at_3_std value: -17.9671 - type: nauc_recall_at_3_diff1 value: 45.4961 - type: nauc_recall_at_5_max value: 61.2002 - type: nauc_recall_at_5_std value: -13.9075 - type: nauc_recall_at_5_diff1 value: 39.1115 - type: nauc_recall_at_10_max value: 68.2226 - type: nauc_recall_at_10_std value: -7.230200000000001 - type: nauc_recall_at_10_diff1 value: 34.9241 - type: nauc_recall_at_20_max value: 74.08019999999999 - type: nauc_recall_at_20_std value: -4.4287 - type: nauc_recall_at_20_diff1 value: 33.4441 - type: nauc_recall_at_100_max value: 80.2462 - type: nauc_recall_at_100_std value: 30.9842 - type: nauc_recall_at_100_diff1 value: 38.0659 - type: nauc_recall_at_1000_max value: 77.5197 - type: nauc_recall_at_1000_std value: 51.5945 - type: nauc_recall_at_1000_diff1 value: 22.9724 - type: nauc_precision_at_1_max value: 33.3063 - type: nauc_precision_at_1_std value: -27.609699999999997 - type: nauc_precision_at_1_diff1 value: 64.8293 - type: nauc_precision_at_3_max value: 56.837199999999996 - type: nauc_precision_at_3_std value: -7.5578 - type: nauc_precision_at_3_diff1 value: 36.4516 - type: nauc_precision_at_5_max value: 57.3511 - type: nauc_precision_at_5_std value: 2.889 - type: nauc_precision_at_5_diff1 value: 23.0276 - type: nauc_precision_at_10_max value: 56.852999999999994 - type: nauc_precision_at_10_std value: 13.305900000000001 - type: nauc_precision_at_10_diff1 value: 12.1547 - type: nauc_precision_at_20_max value: 55.735299999999995 - type: nauc_precision_at_20_std value: 20.3483 - type: nauc_precision_at_20_diff1 value: 6.6423 - type: nauc_precision_at_100_max value: 43.358999999999995 - type: nauc_precision_at_100_std value: 44.4213 - type: nauc_precision_at_100_diff1 value: -5.556500000000001 - type: nauc_precision_at_1000_max value: 27.974 - type: nauc_precision_at_1000_std value: 47.254400000000004 - type: nauc_precision_at_1000_diff1 value: -21.8157 - type: nauc_mrr_at_1_max value: 33.3063 - type: nauc_mrr_at_1_std value: -27.609699999999997 - type: nauc_mrr_at_1_diff1 value: 64.8293 - type: nauc_mrr_at_3_max value: 40.129 - type: nauc_mrr_at_3_std value: -24.0152 - type: nauc_mrr_at_3_diff1 value: 58.9134 - type: nauc_mrr_at_5_max value: 40.1054 - type: nauc_mrr_at_5_std value: -24.0554 - type: nauc_mrr_at_5_diff1 value: 58.71920000000001 - type: nauc_mrr_at_10_max value: 40.0067 - type: nauc_mrr_at_10_std value: -23.9912 - type: nauc_mrr_at_10_diff1 value: 58.964099999999995 - type: nauc_mrr_at_20_max value: 39.9983 - type: nauc_mrr_at_20_std value: -24.0277 - type: nauc_mrr_at_20_diff1 value: 59.0425 - type: nauc_mrr_at_100_max value: 39.8766 - type: nauc_mrr_at_100_std value: -23.9296 - type: nauc_mrr_at_100_diff1 value: 59.1824 - type: nauc_mrr_at_1000_max value: 39.861799999999995 - type: nauc_mrr_at_1000_std value: -23.9468 - type: nauc_mrr_at_1000_diff1 value: 59.1847 - type: main_score value: 79.149 - task: type: Retrieval dataset: name: MTEB MIRACLRetrieval (th) type: miracl/mmteb-miracl config: th split: dev revision: main metrics: - type: ndcg_at_1 value: 66.712 - type: ndcg_at_3 value: 67.393 - type: ndcg_at_5 value: 70.20100000000001 - type: ndcg_at_10 value: 73.324 - type: ndcg_at_20 value: 75.24300000000001 - type: ndcg_at_100 value: 76.633 - type: ndcg_at_1000 value: 77.119 - type: map_at_1 value: 47.105999999999995 - type: map_at_3 value: 60.67700000000001 - type: map_at_5 value: 63.81099999999999 - type: map_at_10 value: 65.998 - type: map_at_20 value: 66.914 - type: map_at_100 value: 67.258 - type: map_at_1000 value: 67.293 - type: recall_at_1 value: 47.105999999999995 - type: recall_at_3 value: 68.45599999999999 - type: recall_at_5 value: 75.91499999999999 - type: recall_at_10 value: 84.294 - type: recall_at_20 value: 90.08500000000001 - type: recall_at_100 value: 95.949 - type: recall_at_1000 value: 98.874 - type: precision_at_1 value: 66.712 - type: precision_at_3 value: 36.016 - type: precision_at_5 value: 25.157 - type: precision_at_10 value: 14.516000000000002 - type: precision_at_20 value: 7.994999999999999 - type: precision_at_100 value: 1.738 - type: precision_at_1000 value: 0.181 - type: mrr_at_1 value: 66.71209999999999 - type: mrr_at_3 value: 74.3747 - type: mrr_at_5 value: 75.3297 - type: mrr_at_10 value: 75.9858 - type: mrr_at_20 value: 76.1819 - type: mrr_at_100 value: 76.2551 - type: mrr_at_1000 value: 76.2587 - type: nauc_ndcg_at_1_max value: 43.199799999999996 - type: nauc_ndcg_at_1_std value: 8.6242 - type: nauc_ndcg_at_1_diff1 value: 49.3688 - type: nauc_ndcg_at_3_max value: 37.9248 - type: nauc_ndcg_at_3_std value: -1.3769 - type: nauc_ndcg_at_3_diff1 value: 39.9588 - type: nauc_ndcg_at_5_max value: 38.4241 - type: nauc_ndcg_at_5_std value: -1.0533000000000001 - type: nauc_ndcg_at_5_diff1 value: 40.0453 - type: nauc_ndcg_at_10_max value: 40.4105 - type: nauc_ndcg_at_10_std value: 1.4455 - type: nauc_ndcg_at_10_diff1 value: 40.6256 - type: nauc_ndcg_at_20_max value: 41.1133 - type: nauc_ndcg_at_20_std value: 2.931 - type: nauc_ndcg_at_20_diff1 value: 40.920899999999996 - type: nauc_ndcg_at_100_max value: 41.6336 - type: nauc_ndcg_at_100_std value: 4.9768 - type: nauc_ndcg_at_100_diff1 value: 41.3658 - type: nauc_ndcg_at_1000_max value: 41.6223 - type: nauc_ndcg_at_1000_std value: 5.2031 - type: nauc_ndcg_at_1000_diff1 value: 41.4062 - type: nauc_map_at_1_max value: 20.7626 - type: nauc_map_at_1_std value: -8.0023 - type: nauc_map_at_1_diff1 value: 44.4569 - type: nauc_map_at_3_max value: 32.5175 - type: nauc_map_at_3_std value: -7.458099999999999 - type: nauc_map_at_3_diff1 value: 40.2164 - type: nauc_map_at_5_max value: 34.4803 - type: nauc_map_at_5_std value: -5.149 - type: nauc_map_at_5_diff1 value: 39.7814 - type: nauc_map_at_10_max value: 36.0112 - type: nauc_map_at_10_std value: -2.7143 - type: nauc_map_at_10_diff1 value: 40.231 - type: nauc_map_at_20_max value: 36.574200000000005 - type: nauc_map_at_20_std value: -1.718 - type: nauc_map_at_20_diff1 value: 40.278000000000006 - type: nauc_map_at_100_max value: 36.7445 - type: nauc_map_at_100_std value: -1.208 - type: nauc_map_at_100_diff1 value: 40.4046 - type: nauc_map_at_1000_max value: 36.770199999999996 - type: nauc_map_at_1000_std value: -1.1672 - type: nauc_map_at_1000_diff1 value: 40.409099999999995 - type: nauc_recall_at_1_max value: 20.7626 - type: nauc_recall_at_1_std value: -8.0023 - type: nauc_recall_at_1_diff1 value: 44.4569 - type: nauc_recall_at_3_max value: 31.2938 - type: nauc_recall_at_3_std value: -12.4723 - type: nauc_recall_at_3_diff1 value: 35.0524 - type: nauc_recall_at_5_max value: 34.4221 - type: nauc_recall_at_5_std value: -9.0849 - type: nauc_recall_at_5_diff1 value: 33.6966 - type: nauc_recall_at_10_max value: 40.1481 - type: nauc_recall_at_10_std value: -2.4007 - type: nauc_recall_at_10_diff1 value: 32.398700000000005 - type: nauc_recall_at_20_max value: 43.068400000000004 - type: nauc_recall_at_20_std value: 0.4869 - type: nauc_recall_at_20_diff1 value: 31.7169 - type: nauc_recall_at_100_max value: 54.1481 - type: nauc_recall_at_100_std value: 28.3243 - type: nauc_recall_at_100_diff1 value: 29.1055 - type: nauc_recall_at_1000_max value: 82.51389999999999 - type: nauc_recall_at_1000_std value: 88.3602 - type: nauc_recall_at_1000_diff1 value: 14.9201 - type: nauc_precision_at_1_max value: 43.199799999999996 - type: nauc_precision_at_1_std value: 8.6242 - type: nauc_precision_at_1_diff1 value: 49.3688 - type: nauc_precision_at_3_max value: 35.1732 - type: nauc_precision_at_3_std value: 16.3941 - type: nauc_precision_at_3_diff1 value: 4.4193999999999996 - type: nauc_precision_at_5_max value: 28.2059 - type: nauc_precision_at_5_std value: 22.4744 - type: nauc_precision_at_5_diff1 value: -4.0808 - type: nauc_precision_at_10_max value: 22.7955 - type: nauc_precision_at_10_std value: 28.8744 - type: nauc_precision_at_10_diff1 value: -9.9309 - type: nauc_precision_at_20_max value: 17.2362 - type: nauc_precision_at_20_std value: 30.7132 - type: nauc_precision_at_20_diff1 value: -13.5708 - type: nauc_precision_at_100_max value: 13.3455 - type: nauc_precision_at_100_std value: 34.1715 - type: nauc_precision_at_100_diff1 value: -16.4298 - type: nauc_precision_at_1000_max value: 10.639700000000001 - type: nauc_precision_at_1000_std value: 33.1325 - type: nauc_precision_at_1000_diff1 value: -17.5938 - type: nauc_mrr_at_1_max value: 43.199799999999996 - type: nauc_mrr_at_1_std value: 8.6242 - type: nauc_mrr_at_1_diff1 value: 49.3688 - type: nauc_mrr_at_3_max value: 47.106500000000004 - type: nauc_mrr_at_3_std value: 10.3023 - type: nauc_mrr_at_3_diff1 value: 46.2565 - type: nauc_mrr_at_5_max value: 47.151900000000005 - type: nauc_mrr_at_5_std value: 11.2485 - type: nauc_mrr_at_5_diff1 value: 46.4519 - type: nauc_mrr_at_10_max value: 47.468700000000005 - type: nauc_mrr_at_10_std value: 11.5245 - type: nauc_mrr_at_10_diff1 value: 46.291399999999996 - type: nauc_mrr_at_20_max value: 47.3577 - type: nauc_mrr_at_20_std value: 11.3081 - type: nauc_mrr_at_20_diff1 value: 46.490700000000004 - type: nauc_mrr_at_100_max value: 47.3153 - type: nauc_mrr_at_100_std value: 11.2816 - type: nauc_mrr_at_100_diff1 value: 46.5288 - type: nauc_mrr_at_1000_max value: 47.308299999999996 - type: nauc_mrr_at_1000_std value: 11.2835 - type: nauc_mrr_at_1000_diff1 value: 46.5276 - type: main_score value: 73.324 - task: type: Retrieval dataset: name: MTEB MIRACLRetrieval (yo) type: miracl/mmteb-miracl config: yo split: dev revision: main metrics: - type: ndcg_at_1 value: 49.58 - type: ndcg_at_3 value: 64.793 - type: ndcg_at_5 value: 66.709 - type: ndcg_at_10 value: 68.705 - type: ndcg_at_20 value: 69.8 - type: ndcg_at_100 value: 70.664 - type: ndcg_at_1000 value: 71.197 - type: map_at_1 value: 46.289 - type: map_at_3 value: 59.921 - type: map_at_5 value: 61.409000000000006 - type: map_at_10 value: 62.379 - type: map_at_20 value: 62.773 - type: map_at_100 value: 62.907000000000004 - type: map_at_1000 value: 62.922999999999995 - type: recall_at_1 value: 46.289 - type: recall_at_3 value: 75.07000000000001 - type: recall_at_5 value: 79.202 - type: recall_at_10 value: 85.154 - type: recall_at_20 value: 89.076 - type: recall_at_100 value: 93.557 - type: recall_at_1000 value: 97.479 - type: precision_at_1 value: 49.58 - type: precision_at_3 value: 28.571 - type: precision_at_5 value: 18.655 - type: precision_at_10 value: 10.084 - type: precision_at_20 value: 5.2940000000000005 - type: precision_at_100 value: 1.109 - type: precision_at_1000 value: 0.11800000000000001 - type: mrr_at_1 value: 49.5798 - type: mrr_at_3 value: 63.025200000000005 - type: mrr_at_5 value: 63.6134 - type: mrr_at_10 value: 64.2504 - type: mrr_at_20 value: 64.5152 - type: mrr_at_100 value: 64.6281 - type: mrr_at_1000 value: 64.63839999999999 - type: nauc_ndcg_at_1_max value: 18.5119 - type: nauc_ndcg_at_1_std value: -26.7799 - type: nauc_ndcg_at_1_diff1 value: 49.55 - type: nauc_ndcg_at_3_max value: 35.6833 - type: nauc_ndcg_at_3_std value: -19.023699999999998 - type: nauc_ndcg_at_3_diff1 value: 51.4553 - type: nauc_ndcg_at_5_max value: 34.252700000000004 - type: nauc_ndcg_at_5_std value: -16.9909 - type: nauc_ndcg_at_5_diff1 value: 50.034 - type: nauc_ndcg_at_10_max value: 35.115899999999996 - type: nauc_ndcg_at_10_std value: -15.454300000000002 - type: nauc_ndcg_at_10_diff1 value: 51.13419999999999 - type: nauc_ndcg_at_20_max value: 36.3127 - type: nauc_ndcg_at_20_std value: -13.5123 - type: nauc_ndcg_at_20_diff1 value: 52.505100000000006 - type: nauc_ndcg_at_100_max value: 35.0788 - type: nauc_ndcg_at_100_std value: -15.118 - type: nauc_ndcg_at_100_diff1 value: 52.2994 - type: nauc_ndcg_at_1000_max value: 34.1448 - type: nauc_ndcg_at_1000_std value: -15.695300000000001 - type: nauc_ndcg_at_1000_diff1 value: 51.7561 - type: nauc_map_at_1_max value: 17.9766 - type: nauc_map_at_1_std value: -26.0689 - type: nauc_map_at_1_diff1 value: 51.3004 - type: nauc_map_at_3_max value: 30.426 - type: nauc_map_at_3_std value: -21.5618 - type: nauc_map_at_3_diff1 value: 51.9665 - type: nauc_map_at_5_max value: 30.3093 - type: nauc_map_at_5_std value: -19.1582 - type: nauc_map_at_5_diff1 value: 50.9919 - type: nauc_map_at_10_max value: 31.1197 - type: nauc_map_at_10_std value: -18.5626 - type: nauc_map_at_10_diff1 value: 51.3278 - type: nauc_map_at_20_max value: 31.3984 - type: nauc_map_at_20_std value: -17.8214 - type: nauc_map_at_20_diff1 value: 51.5951 - type: nauc_map_at_100_max value: 31.1974 - type: nauc_map_at_100_std value: -18.0483 - type: nauc_map_at_100_diff1 value: 51.51559999999999 - type: nauc_map_at_1000_max value: 31.167699999999996 - type: nauc_map_at_1000_std value: -18.076800000000002 - type: nauc_map_at_1000_diff1 value: 51.50130000000001 - type: nauc_recall_at_1_max value: 17.9766 - type: nauc_recall_at_1_std value: -26.0689 - type: nauc_recall_at_1_diff1 value: 51.3004 - type: nauc_recall_at_3_max value: 48.720200000000006 - type: nauc_recall_at_3_std value: -12.1143 - type: nauc_recall_at_3_diff1 value: 49.863800000000005 - type: nauc_recall_at_5_max value: 48.1997 - type: nauc_recall_at_5_std value: -5.8457 - type: nauc_recall_at_5_diff1 value: 46.062599999999996 - type: nauc_recall_at_10_max value: 56.5698 - type: nauc_recall_at_10_std value: 6.0906 - type: nauc_recall_at_10_diff1 value: 51.9053 - type: nauc_recall_at_20_max value: 73.61569999999999 - type: nauc_recall_at_20_std value: 25.8535 - type: nauc_recall_at_20_diff1 value: 64.7516 - type: nauc_recall_at_100_max value: 78.054 - type: nauc_recall_at_100_std value: 23.7984 - type: nauc_recall_at_100_diff1 value: 71.61999999999999 - type: nauc_recall_at_1000_max value: 92.5519 - type: nauc_recall_at_1000_std value: 59.609100000000005 - type: nauc_recall_at_1000_diff1 value: 78.6415 - type: nauc_precision_at_1_max value: 18.5119 - type: nauc_precision_at_1_std value: -26.7799 - type: nauc_precision_at_1_diff1 value: 49.55 - type: nauc_precision_at_3_max value: 45.402100000000004 - type: nauc_precision_at_3_std value: -5.331 - type: nauc_precision_at_3_diff1 value: 20.6481 - type: nauc_precision_at_5_max value: 33.7262 - type: nauc_precision_at_5_std value: 10.3483 - type: nauc_precision_at_5_diff1 value: 5.9393 - type: nauc_precision_at_10_max value: 35.3715 - type: nauc_precision_at_10_std value: 17.0809 - type: nauc_precision_at_10_diff1 value: 0.9325 - type: nauc_precision_at_20_max value: 35.2666 - type: nauc_precision_at_20_std value: 26.3214 - type: nauc_precision_at_20_diff1 value: -1.8064 - type: nauc_precision_at_100_max value: 29.0385 - type: nauc_precision_at_100_std value: 23.416500000000003 - type: nauc_precision_at_100_diff1 value: -10.83 - type: nauc_precision_at_1000_max value: 13.825299999999999 - type: nauc_precision_at_1000_std value: 16.7663 - type: nauc_precision_at_1000_diff1 value: -24.854200000000002 - type: nauc_mrr_at_1_max value: 18.5119 - type: nauc_mrr_at_1_std value: -26.7799 - type: nauc_mrr_at_1_diff1 value: 49.55 - type: nauc_mrr_at_3_max value: 29.916500000000003 - type: nauc_mrr_at_3_std value: -21.5719 - type: nauc_mrr_at_3_diff1 value: 50.2057 - type: nauc_mrr_at_5_max value: 28.929 - type: nauc_mrr_at_5_std value: -21.9015 - type: nauc_mrr_at_5_diff1 value: 49.6675 - type: nauc_mrr_at_10_max value: 28.6377 - type: nauc_mrr_at_10_std value: -21.4266 - type: nauc_mrr_at_10_diff1 value: 50.034800000000004 - type: nauc_mrr_at_20_max value: 28.7905 - type: nauc_mrr_at_20_std value: -21.192 - type: nauc_mrr_at_20_diff1 value: 50.3745 - type: nauc_mrr_at_100_max value: 28.5717 - type: nauc_mrr_at_100_std value: -21.3735 - type: nauc_mrr_at_100_diff1 value: 50.3333 - type: nauc_mrr_at_1000_max value: 28.5655 - type: nauc_mrr_at_1000_std value: -21.373 - type: nauc_mrr_at_1000_diff1 value: 50.3215 - type: main_score value: 68.705 - task: type: Retrieval dataset: name: MTEB MIRACLRetrieval (zh) type: miracl/mmteb-miracl config: zh split: dev revision: main metrics: - type: ndcg_at_1 value: 47.583 - type: ndcg_at_3 value: 45.839 - type: ndcg_at_5 value: 48.126999999999995 - type: ndcg_at_10 value: 52.553000000000004 - type: ndcg_at_20 value: 55.66799999999999 - type: ndcg_at_100 value: 60.0 - type: ndcg_at_1000 value: 61.415 - type: map_at_1 value: 24.488 - type: map_at_3 value: 36.202 - type: map_at_5 value: 39.771 - type: map_at_10 value: 42.725 - type: map_at_20 value: 44.163999999999994 - type: map_at_100 value: 45.269 - type: map_at_1000 value: 45.372 - type: recall_at_1 value: 24.488 - type: recall_at_3 value: 42.827 - type: recall_at_5 value: 52.081 - type: recall_at_10 value: 63.659 - type: recall_at_20 value: 72.652 - type: recall_at_100 value: 89.702 - type: recall_at_1000 value: 97.99600000000001 - type: precision_at_1 value: 47.583 - type: precision_at_3 value: 30.789 - type: precision_at_5 value: 23.206 - type: precision_at_10 value: 14.885000000000002 - type: precision_at_20 value: 8.803999999999998 - type: precision_at_100 value: 2.237 - type: precision_at_1000 value: 0.247 - type: mrr_at_1 value: 47.5827 - type: mrr_at_3 value: 56.4461 - type: mrr_at_5 value: 58.036500000000004 - type: mrr_at_10 value: 59.2419 - type: mrr_at_20 value: 59.5684 - type: mrr_at_100 value: 59.8496 - type: mrr_at_1000 value: 59.868500000000004 - type: nauc_ndcg_at_1_max value: 30.3153 - type: nauc_ndcg_at_1_std value: 16.1917 - type: nauc_ndcg_at_1_diff1 value: 33.1291 - type: nauc_ndcg_at_3_max value: 29.9473 - type: nauc_ndcg_at_3_std value: 9.9602 - type: nauc_ndcg_at_3_diff1 value: 26.354899999999997 - type: nauc_ndcg_at_5_max value: 27.5364 - type: nauc_ndcg_at_5_std value: 9.0106 - type: nauc_ndcg_at_5_diff1 value: 26.4299 - type: nauc_ndcg_at_10_max value: 30.1141 - type: nauc_ndcg_at_10_std value: 10.6319 - type: nauc_ndcg_at_10_diff1 value: 26.1015 - type: nauc_ndcg_at_20_max value: 31.864700000000003 - type: nauc_ndcg_at_20_std value: 14.376 - type: nauc_ndcg_at_20_diff1 value: 24.278 - type: nauc_ndcg_at_100_max value: 33.8328 - type: nauc_ndcg_at_100_std value: 17.1646 - type: nauc_ndcg_at_100_diff1 value: 24.7582 - type: nauc_ndcg_at_1000_max value: 33.0653 - type: nauc_ndcg_at_1000_std value: 15.717400000000001 - type: nauc_ndcg_at_1000_diff1 value: 25.708399999999997 - type: nauc_map_at_1_max value: 14.5636 - type: nauc_map_at_1_std value: -0.5065 - type: nauc_map_at_1_diff1 value: 37.5816 - type: nauc_map_at_3_max value: 21.752 - type: nauc_map_at_3_std value: 0.2942 - type: nauc_map_at_3_diff1 value: 29.662100000000002 - type: nauc_map_at_5_max value: 23.3994 - type: nauc_map_at_5_std value: 3.2369000000000003 - type: nauc_map_at_5_diff1 value: 28.479 - type: nauc_map_at_10_max value: 26.969500000000004 - type: nauc_map_at_10_std value: 6.4338999999999995 - type: nauc_map_at_10_diff1 value: 27.548000000000002 - type: nauc_map_at_20_max value: 28.2804 - type: nauc_map_at_20_std value: 8.3557 - type: nauc_map_at_20_diff1 value: 26.561600000000002 - type: nauc_map_at_100_max value: 28.979899999999997 - type: nauc_map_at_100_std value: 9.3446 - type: nauc_map_at_100_diff1 value: 26.539099999999998 - type: nauc_map_at_1000_max value: 28.9572 - type: nauc_map_at_1000_std value: 9.3017 - type: nauc_map_at_1000_diff1 value: 26.6029 - type: nauc_recall_at_1_max value: 14.5636 - type: nauc_recall_at_1_std value: -0.5065 - type: nauc_recall_at_1_diff1 value: 37.5816 - type: nauc_recall_at_3_max value: 19.8958 - type: nauc_recall_at_3_std value: -1.7080000000000002 - type: nauc_recall_at_3_diff1 value: 24.4885 - type: nauc_recall_at_5_max value: 18.8426 - type: nauc_recall_at_5_std value: 3.5769 - type: nauc_recall_at_5_diff1 value: 21.253700000000002 - type: nauc_recall_at_10_max value: 25.061299999999996 - type: nauc_recall_at_10_std value: 7.1753 - type: nauc_recall_at_10_diff1 value: 18.7378 - type: nauc_recall_at_20_max value: 28.6096 - type: nauc_recall_at_20_std value: 18.5789 - type: nauc_recall_at_20_diff1 value: 11.686 - type: nauc_recall_at_100_max value: 45.903 - type: nauc_recall_at_100_std value: 46.9916 - type: nauc_recall_at_100_diff1 value: 9.813600000000001 - type: nauc_recall_at_1000_max value: 62.512699999999995 - type: nauc_recall_at_1000_std value: 67.9442 - type: nauc_recall_at_1000_diff1 value: 34.3912 - type: nauc_precision_at_1_max value: 30.3153 - type: nauc_precision_at_1_std value: 16.1917 - type: nauc_precision_at_1_diff1 value: 33.1291 - type: nauc_precision_at_3_max value: 35.6697 - type: nauc_precision_at_3_std value: 18.0247 - type: nauc_precision_at_3_diff1 value: 7.0163 - type: nauc_precision_at_5_max value: 34.0555 - type: nauc_precision_at_5_std value: 23.5324 - type: nauc_precision_at_5_diff1 value: 0.44270000000000004 - type: nauc_precision_at_10_max value: 37.8515 - type: nauc_precision_at_10_std value: 31.657000000000004 - type: nauc_precision_at_10_diff1 value: -5.2642 - type: nauc_precision_at_20_max value: 36.025 - type: nauc_precision_at_20_std value: 35.236000000000004 - type: nauc_precision_at_20_diff1 value: -10.6916 - type: nauc_precision_at_100_max value: 29.678900000000002 - type: nauc_precision_at_100_std value: 35.2162 - type: nauc_precision_at_100_diff1 value: -13.7845 - type: nauc_precision_at_1000_max value: 22.2855 - type: nauc_precision_at_1000_std value: 27.221600000000002 - type: nauc_precision_at_1000_diff1 value: -13.4482 - type: nauc_mrr_at_1_max value: 30.3153 - type: nauc_mrr_at_1_std value: 16.1917 - type: nauc_mrr_at_1_diff1 value: 33.1291 - type: nauc_mrr_at_3_max value: 33.2966 - type: nauc_mrr_at_3_std value: 16.9755 - type: nauc_mrr_at_3_diff1 value: 29.814 - type: nauc_mrr_at_5_max value: 32.920300000000005 - type: nauc_mrr_at_5_std value: 17.832600000000003 - type: nauc_mrr_at_5_diff1 value: 29.683300000000003 - type: nauc_mrr_at_10_max value: 32.9394 - type: nauc_mrr_at_10_std value: 17.5036 - type: nauc_mrr_at_10_diff1 value: 29.6425 - type: nauc_mrr_at_20_max value: 32.852599999999995 - type: nauc_mrr_at_20_std value: 17.8307 - type: nauc_mrr_at_20_diff1 value: 29.4502 - type: nauc_mrr_at_100_max value: 32.9242 - type: nauc_mrr_at_100_std value: 17.7699 - type: nauc_mrr_at_100_diff1 value: 29.504399999999997 - type: nauc_mrr_at_1000_max value: 32.9303 - type: nauc_mrr_at_1000_std value: 17.7636 - type: nauc_mrr_at_1000_diff1 value: 29.526799999999998 - type: main_score value: 52.553000000000004 - task: type: Retrieval dataset: name: MTEB MSMARCO (default) type: mteb/msmarco config: default split: dev revision: c5a29a104738b98a9e76336939199e264163d4a0 metrics: - type: ndcg_at_1 value: 14.155000000000001 - type: ndcg_at_3 value: 22.499 - type: ndcg_at_5 value: 26.233 - type: ndcg_at_10 value: 29.866999999999997 - type: ndcg_at_20 value: 32.616 - type: ndcg_at_100 value: 36.301 - type: ndcg_at_1000 value: 38.318999999999996 - type: map_at_1 value: 13.793 - type: map_at_3 value: 20.237 - type: map_at_5 value: 22.32 - type: map_at_10 value: 23.829 - type: map_at_20 value: 24.596999999999998 - type: map_at_100 value: 25.117 - type: map_at_1000 value: 25.194 - type: recall_at_1 value: 13.793 - type: recall_at_3 value: 28.592000000000002 - type: recall_at_5 value: 37.556 - type: recall_at_10 value: 48.669000000000004 - type: recall_at_20 value: 59.379000000000005 - type: recall_at_100 value: 78.927 - type: recall_at_1000 value: 94.568 - type: precision_at_1 value: 14.155000000000001 - type: precision_at_3 value: 9.828000000000001 - type: precision_at_5 value: 7.785 - type: precision_at_10 value: 5.06 - type: precision_at_20 value: 3.097 - type: precision_at_100 value: 0.83 - type: precision_at_1000 value: 0.1 - type: mrr_at_1 value: 14.1547 - type: mrr_at_3 value: 20.7139 - type: mrr_at_5 value: 22.8028 - type: mrr_at_10 value: 24.3047 - type: mrr_at_20 value: 25.0548 - type: mrr_at_100 value: 25.552000000000003 - type: mrr_at_1000 value: 25.622 - type: nauc_ndcg_at_1_max value: 1.4238 - type: nauc_ndcg_at_1_std value: -13.091800000000001 - type: nauc_ndcg_at_1_diff1 value: 29.1051 - type: nauc_ndcg_at_3_max value: 2.6131 - type: nauc_ndcg_at_3_std value: -14.6122 - type: nauc_ndcg_at_3_diff1 value: 24.0988 - type: nauc_ndcg_at_5_max value: 2.3456 - type: nauc_ndcg_at_5_std value: -15.092500000000001 - type: nauc_ndcg_at_5_diff1 value: 23.5516 - type: nauc_ndcg_at_10_max value: 2.8182 - type: nauc_ndcg_at_10_std value: -14.623700000000001 - type: nauc_ndcg_at_10_diff1 value: 23.1711 - type: nauc_ndcg_at_20_max value: 3.5518 - type: nauc_ndcg_at_20_std value: -12.931500000000002 - type: nauc_ndcg_at_20_diff1 value: 23.1818 - type: nauc_ndcg_at_100_max value: 4.7755 - type: nauc_ndcg_at_100_std value: -9.851899999999999 - type: nauc_ndcg_at_100_diff1 value: 23.340700000000002 - type: nauc_ndcg_at_1000_max value: 4.5916 - type: nauc_ndcg_at_1000_std value: -10.4923 - type: nauc_ndcg_at_1000_diff1 value: 23.5174 - type: nauc_map_at_1_max value: 1.4764 - type: nauc_map_at_1_std value: -13.2414 - type: nauc_map_at_1_diff1 value: 29.1169 - type: nauc_map_at_3_max value: 2.3523 - type: nauc_map_at_3_std value: -14.453 - type: nauc_map_at_3_diff1 value: 25.0786 - type: nauc_map_at_5_max value: 2.1924 - type: nauc_map_at_5_std value: -14.7681 - type: nauc_map_at_5_diff1 value: 24.7695 - type: nauc_map_at_10_max value: 2.3542 - type: nauc_map_at_10_std value: -14.6287 - type: nauc_map_at_10_diff1 value: 24.6169 - type: nauc_map_at_20_max value: 2.5815 - type: nauc_map_at_20_std value: -14.141699999999998 - type: nauc_map_at_20_diff1 value: 24.6406 - type: nauc_map_at_100_max value: 2.7435 - type: nauc_map_at_100_std value: -13.7208 - type: nauc_map_at_100_diff1 value: 24.6504 - type: nauc_map_at_1000_max value: 2.7392 - type: nauc_map_at_1000_std value: -13.7302 - type: nauc_map_at_1000_diff1 value: 24.654300000000003 - type: nauc_recall_at_1_max value: 1.4764 - type: nauc_recall_at_1_std value: -13.2414 - type: nauc_recall_at_1_diff1 value: 29.1169 - type: nauc_recall_at_3_max value: 3.2174 - type: nauc_recall_at_3_std value: -15.143300000000002 - type: nauc_recall_at_3_diff1 value: 21.593899999999998 - type: nauc_recall_at_5_max value: 2.6845 - type: nauc_recall_at_5_std value: -15.9795 - type: nauc_recall_at_5_diff1 value: 20.567 - type: nauc_recall_at_10_max value: 3.913 - type: nauc_recall_at_10_std value: -14.566899999999999 - type: nauc_recall_at_10_diff1 value: 19.4393 - type: nauc_recall_at_20_max value: 6.5038 - type: nauc_recall_at_20_std value: -8.572799999999999 - type: nauc_recall_at_20_diff1 value: 19.0899 - type: nauc_recall_at_100_max value: 16.7968 - type: nauc_recall_at_100_std value: 15.837200000000001 - type: nauc_recall_at_100_diff1 value: 18.3296 - type: nauc_recall_at_1000_max value: 39.6225 - type: nauc_recall_at_1000_std value: 53.9736 - type: nauc_recall_at_1000_diff1 value: 12.565499999999998 - type: nauc_precision_at_1_max value: 1.4238 - type: nauc_precision_at_1_std value: -13.091800000000001 - type: nauc_precision_at_1_diff1 value: 29.1051 - type: nauc_precision_at_3_max value: 3.3477 - type: nauc_precision_at_3_std value: -14.8784 - type: nauc_precision_at_3_diff1 value: 21.8029 - type: nauc_precision_at_5_max value: 2.8493 - type: nauc_precision_at_5_std value: -15.767000000000001 - type: nauc_precision_at_5_diff1 value: 20.5677 - type: nauc_precision_at_10_max value: 4.2772 - type: nauc_precision_at_10_std value: -14.0627 - type: nauc_precision_at_10_diff1 value: 19.1205 - type: nauc_precision_at_20_max value: 7.135800000000001 - type: nauc_precision_at_20_std value: -7.5076 - type: nauc_precision_at_20_diff1 value: 18.0149 - type: nauc_precision_at_100_max value: 16.791 - type: nauc_precision_at_100_std value: 16.2346 - type: nauc_precision_at_100_diff1 value: 13.9316 - type: nauc_precision_at_1000_max value: 20.7529 - type: nauc_precision_at_1000_std value: 27.4859 - type: nauc_precision_at_1000_diff1 value: 3.9303 - type: nauc_mrr_at_1_max value: 1.4238 - type: nauc_mrr_at_1_std value: -13.091800000000001 - type: nauc_mrr_at_1_diff1 value: 29.1051 - type: nauc_mrr_at_3_max value: 2.3397 - type: nauc_mrr_at_3_std value: -14.1544 - type: nauc_mrr_at_3_diff1 value: 25.208799999999997 - type: nauc_mrr_at_5_max value: 2.1534 - type: nauc_mrr_at_5_std value: -14.4094 - type: nauc_mrr_at_5_diff1 value: 24.8258 - type: nauc_mrr_at_10_max value: 2.4274 - type: nauc_mrr_at_10_std value: -14.2121 - type: nauc_mrr_at_10_diff1 value: 24.6847 - type: nauc_mrr_at_20_max value: 2.6235999999999997 - type: nauc_mrr_at_20_std value: -13.736400000000001 - type: nauc_mrr_at_20_diff1 value: 24.6859 - type: nauc_mrr_at_100_max value: 2.7653 - type: nauc_mrr_at_100_std value: -13.358600000000001 - type: nauc_mrr_at_100_diff1 value: 24.7238 - type: nauc_mrr_at_1000_max value: 2.7588999999999997 - type: nauc_mrr_at_1000_std value: -13.373199999999999 - type: nauc_mrr_at_1000_diff1 value: 24.7274 - type: main_score value: 29.866999999999997 - task: type: Classification dataset: name: MTEB MTOPDomainClassification (en) type: mteb/mtop_domain config: en split: test revision: d80d48c1eb48d3562165c59d59d0034df9fff0bf metrics: - type: accuracy value: 89.89970000000001 - type: f1 value: 89.6705 - type: f1_weighted value: 89.8682 - type: main_score value: 89.89970000000001 - task: type: Classification dataset: name: MTEB MTOPIntentClassification (en) type: mteb/mtop_intent config: en split: test revision: ae001d0e6b1228650b7bd1c2c65fb50ad11a8aba metrics: - type: accuracy value: 60.26899999999999 - type: f1 value: 40.8003 - type: f1_weighted value: 63.033899999999996 - type: main_score value: 60.26899999999999 - task: type: Classification dataset: name: MTEB MassiveIntentClassification (en) type: mteb/amazon_massive_intent config: en split: test revision: 4672e20407010da34463acc759c162ca9734bca6 metrics: - type: accuracy value: 63.9509 - type: f1 value: 60.7828 - type: f1_weighted value: 62.8 - type: main_score value: 63.9509 - task: type: Classification dataset: name: MTEB MassiveScenarioClassification (en) type: mteb/amazon_massive_scenario config: en split: test revision: fad2c6e8459f9e1c45d9315f4953d921437d70f8 metrics: - type: accuracy value: 70.928 - type: f1 value: 69.4755 - type: f1_weighted value: 70.6366 - type: main_score value: 70.928 - task: type: Clustering dataset: name: MTEB MedrxivClusteringP2P (default) type: mteb/medrxiv-clustering-p2p config: default split: test revision: e7a26af6f3ae46b30dde8737f02c07b1505bcc73 metrics: - type: v_measure value: 31.522 - type: v_measure_std value: 1.5528 - type: main_score value: 31.522 - task: type: Clustering dataset: name: MTEB MedrxivClusteringS2S (default) type: mteb/medrxiv-clustering-s2s config: default split: test revision: 35191c8c0dca72d8ff3efcd72aa802307d469663 metrics: - type: v_measure value: 28.572599999999998 - type: v_measure_std value: 1.8154 - type: main_score value: 28.572599999999998 - task: type: Reranking dataset: name: MTEB MindSmallReranking (default) type: mteb/mind_small config: default split: test revision: 59042f120c80e8afa9cdbb224f67076cec0fc9a7 metrics: - type: map value: 30.5381 - type: mrr value: 31.574099999999998 - type: nAUC_map_max value: -19.592000000000002 - type: nAUC_map_std value: -3.0272 - type: nAUC_map_diff1 value: 14.0537 - type: nAUC_mrr_max value: -13.974900000000002 - type: nAUC_mrr_std value: -0.8847 - type: nAUC_mrr_diff1 value: 13.2721 - type: main_score value: 30.5381 - task: type: Retrieval dataset: name: MTEB NFCorpus (default) type: mteb/nfcorpus config: default split: test revision: ec0fa4fe99da2ff19ca1214b7966684033a58814 metrics: - type: ndcg_at_1 value: 38.080000000000005 - type: ndcg_at_3 value: 34.405 - type: ndcg_at_5 value: 32.019999999999996 - type: ndcg_at_10 value: 28.903000000000002 - type: ndcg_at_20 value: 26.693 - type: ndcg_at_100 value: 26.662999999999997 - type: ndcg_at_1000 value: 35.698 - type: map_at_1 value: 4.423 - type: map_at_3 value: 7.733 - type: map_at_5 value: 9.006 - type: map_at_10 value: 10.366 - type: map_at_20 value: 11.333 - type: map_at_100 value: 12.811 - type: map_at_1000 value: 14.066 - type: recall_at_1 value: 4.423 - type: recall_at_3 value: 8.908000000000001 - type: recall_at_5 value: 11.179 - type: recall_at_10 value: 14.280999999999999 - type: recall_at_20 value: 17.192 - type: recall_at_100 value: 27.685 - type: recall_at_1000 value: 59.108000000000004 - type: precision_at_1 value: 40.248 - type: precision_at_3 value: 33.127 - type: precision_at_5 value: 27.864 - type: precision_at_10 value: 21.053 - type: precision_at_20 value: 15.356 - type: precision_at_100 value: 6.709 - type: precision_at_1000 value: 1.9529999999999998 - type: mrr_at_1 value: 40.247699999999995 - type: mrr_at_3 value: 47.7812 - type: mrr_at_5 value: 48.8958 - type: mrr_at_10 value: 49.4034 - type: mrr_at_20 value: 49.8468 - type: mrr_at_100 value: 50.104800000000004 - type: mrr_at_1000 value: 50.1703 - type: nauc_ndcg_at_1_max value: 34.5735 - type: nauc_ndcg_at_1_std value: 15.1084 - type: nauc_ndcg_at_1_diff1 value: 37.779 - type: nauc_ndcg_at_3_max value: 38.8071 - type: nauc_ndcg_at_3_std value: 24.7697 - type: nauc_ndcg_at_3_diff1 value: 29.5807 - type: nauc_ndcg_at_5_max value: 39.128800000000005 - type: nauc_ndcg_at_5_std value: 26.398 - type: nauc_ndcg_at_5_diff1 value: 30.3835 - type: nauc_ndcg_at_10_max value: 37.7665 - type: nauc_ndcg_at_10_std value: 27.5455 - type: nauc_ndcg_at_10_diff1 value: 30.1575 - type: nauc_ndcg_at_20_max value: 36.3537 - type: nauc_ndcg_at_20_std value: 28.4047 - type: nauc_ndcg_at_20_diff1 value: 27.9553 - type: nauc_ndcg_at_100_max value: 39.0086 - type: nauc_ndcg_at_100_std value: 28.4221 - type: nauc_ndcg_at_100_diff1 value: 27.833799999999997 - type: nauc_ndcg_at_1000_max value: 44.7295 - type: nauc_ndcg_at_1000_std value: 35.369 - type: nauc_ndcg_at_1000_diff1 value: 29.4449 - type: nauc_map_at_1_max value: 12.645100000000001 - type: nauc_map_at_1_std value: -13.536999999999999 - type: nauc_map_at_1_diff1 value: 45.0881 - type: nauc_map_at_3_max value: 14.6862 - type: nauc_map_at_3_std value: -6.6259 - type: nauc_map_at_3_diff1 value: 34.2575 - type: nauc_map_at_5_max value: 18.6559 - type: nauc_map_at_5_std value: -2.8853 - type: nauc_map_at_5_diff1 value: 32.9187 - type: nauc_map_at_10_max value: 22.1906 - type: nauc_map_at_10_std value: 1.8654 - type: nauc_map_at_10_diff1 value: 31.3784 - type: nauc_map_at_20_max value: 24.696199999999997 - type: nauc_map_at_20_std value: 6.1949 - type: nauc_map_at_20_diff1 value: 30.9956 - type: nauc_map_at_100_max value: 27.2011 - type: nauc_map_at_100_std value: 12.3619 - type: nauc_map_at_100_diff1 value: 30.811500000000002 - type: nauc_map_at_1000_max value: 27.6972 - type: nauc_map_at_1000_std value: 15.845999999999998 - type: nauc_map_at_1000_diff1 value: 30.5315 - type: nauc_recall_at_1_max value: 12.645100000000001 - type: nauc_recall_at_1_std value: -13.536999999999999 - type: nauc_recall_at_1_diff1 value: 45.0881 - type: nauc_recall_at_3_max value: 14.2305 - type: nauc_recall_at_3_std value: -2.4143000000000003 - type: nauc_recall_at_3_diff1 value: 27.1661 - type: nauc_recall_at_5_max value: 20.62 - type: nauc_recall_at_5_std value: 3.1332 - type: nauc_recall_at_5_diff1 value: 26.7813 - type: nauc_recall_at_10_max value: 22.0278 - type: nauc_recall_at_10_std value: 4.587 - type: nauc_recall_at_10_diff1 value: 22.0275 - type: nauc_recall_at_20_max value: 23.4161 - type: nauc_recall_at_20_std value: 8.2901 - type: nauc_recall_at_20_diff1 value: 20.9799 - type: nauc_recall_at_100_max value: 24.5345 - type: nauc_recall_at_100_std value: 17.1618 - type: nauc_recall_at_100_diff1 value: 15.586500000000001 - type: nauc_recall_at_1000_max value: 22.3168 - type: nauc_recall_at_1000_std value: 22.6961 - type: nauc_recall_at_1000_diff1 value: 9.9602 - type: nauc_precision_at_1_max value: 36.549 - type: nauc_precision_at_1_std value: 16.6789 - type: nauc_precision_at_1_diff1 value: 35.6095 - type: nauc_precision_at_3_max value: 42.6539 - type: nauc_precision_at_3_std value: 33.0974 - type: nauc_precision_at_3_diff1 value: 21.9208 - type: nauc_precision_at_5_max value: 41.787800000000004 - type: nauc_precision_at_5_std value: 35.2286 - type: nauc_precision_at_5_diff1 value: 21.104899999999997 - type: nauc_precision_at_10_max value: 37.7473 - type: nauc_precision_at_10_std value: 39.887 - type: nauc_precision_at_10_diff1 value: 18.9082 - type: nauc_precision_at_20_max value: 32.0874 - type: nauc_precision_at_20_std value: 44.798100000000005 - type: nauc_precision_at_20_diff1 value: 12.953000000000001 - type: nauc_precision_at_100_max value: 19.108900000000002 - type: nauc_precision_at_100_std value: 44.49 - type: nauc_precision_at_100_diff1 value: 6.4374 - type: nauc_precision_at_1000_max value: 2.5292 - type: nauc_precision_at_1000_std value: 30.523400000000002 - type: nauc_precision_at_1000_diff1 value: -0.6787 - type: nauc_mrr_at_1_max value: 36.549 - type: nauc_mrr_at_1_std value: 16.6789 - type: nauc_mrr_at_1_diff1 value: 35.6095 - type: nauc_mrr_at_3_max value: 43.425599999999996 - type: nauc_mrr_at_3_std value: 28.8242 - type: nauc_mrr_at_3_diff1 value: 33.4411 - type: nauc_mrr_at_5_max value: 44.5717 - type: nauc_mrr_at_5_std value: 29.5765 - type: nauc_mrr_at_5_diff1 value: 34.463899999999995 - type: nauc_mrr_at_10_max value: 44.6062 - type: nauc_mrr_at_10_std value: 29.5773 - type: nauc_mrr_at_10_diff1 value: 34.5158 - type: nauc_mrr_at_20_max value: 44.6961 - type: nauc_mrr_at_20_std value: 29.5126 - type: nauc_mrr_at_20_diff1 value: 34.2436 - type: nauc_mrr_at_100_max value: 44.8207 - type: nauc_mrr_at_100_std value: 29.649700000000003 - type: nauc_mrr_at_100_diff1 value: 34.3576 - type: nauc_mrr_at_1000_max value: 44.7763 - type: nauc_mrr_at_1000_std value: 29.6044 - type: nauc_mrr_at_1000_diff1 value: 34.3718 - type: main_score value: 28.903000000000002 - task: type: Retrieval dataset: name: MTEB NQ (default) type: mteb/nq config: default split: test revision: b774495ed302d8c44a3a7ea25c90dbce03968f31 metrics: - type: ndcg_at_1 value: 34.589 - type: ndcg_at_3 value: 45.289 - type: ndcg_at_5 value: 49.919000000000004 - type: ndcg_at_10 value: 53.410000000000004 - type: ndcg_at_20 value: 55.786 - type: ndcg_at_100 value: 57.75599999999999 - type: ndcg_at_1000 value: 58.51499999999999 - type: map_at_1 value: 30.503999999999998 - type: map_at_3 value: 41.396 - type: map_at_5 value: 44.216 - type: map_at_10 value: 45.802 - type: map_at_20 value: 46.542 - type: map_at_100 value: 46.867999999999995 - type: map_at_1000 value: 46.903 - type: recall_at_1 value: 30.503999999999998 - type: recall_at_3 value: 53.244 - type: recall_at_5 value: 63.912 - type: recall_at_10 value: 74.06099999999999 - type: recall_at_20 value: 82.819 - type: recall_at_100 value: 92.51599999999999 - type: recall_at_1000 value: 98.156 - type: precision_at_1 value: 34.589 - type: precision_at_3 value: 20.693 - type: precision_at_5 value: 15.058 - type: precision_at_10 value: 8.818 - type: precision_at_20 value: 4.9799999999999995 - type: precision_at_100 value: 1.125 - type: precision_at_1000 value: 0.11900000000000001 - type: mrr_at_1 value: 34.617599999999996 - type: mrr_at_3 value: 44.7277 - type: mrr_at_5 value: 47.0408 - type: mrr_at_10 value: 48.335499999999996 - type: mrr_at_20 value: 48.8925 - type: mrr_at_100 value: 49.1307 - type: mrr_at_1000 value: 49.154199999999996 - type: nauc_ndcg_at_1_max value: 23.8893 - type: nauc_ndcg_at_1_std value: -3.0092 - type: nauc_ndcg_at_1_diff1 value: 36.789899999999996 - type: nauc_ndcg_at_3_max value: 26.161800000000003 - type: nauc_ndcg_at_3_std value: -3.6557 - type: nauc_ndcg_at_3_diff1 value: 31.381500000000003 - type: nauc_ndcg_at_5_max value: 28.4273 - type: nauc_ndcg_at_5_std value: -2.6271 - type: nauc_ndcg_at_5_diff1 value: 30.960700000000003 - type: nauc_ndcg_at_10_max value: 29.1744 - type: nauc_ndcg_at_10_std value: -0.9882 - type: nauc_ndcg_at_10_diff1 value: 30.9664 - type: nauc_ndcg_at_20_max value: 30.1188 - type: nauc_ndcg_at_20_std value: 0.6556000000000001 - type: nauc_ndcg_at_20_diff1 value: 30.8734 - type: nauc_ndcg_at_100_max value: 29.822 - type: nauc_ndcg_at_100_std value: 1.1388 - type: nauc_ndcg_at_100_diff1 value: 31.348300000000002 - type: nauc_ndcg_at_1000_max value: 29.1591 - type: nauc_ndcg_at_1000_std value: 0.22569999999999998 - type: nauc_ndcg_at_1000_diff1 value: 31.7286 - type: nauc_map_at_1_max value: 22.2587 - type: nauc_map_at_1_std value: -4.6109 - type: nauc_map_at_1_diff1 value: 37.0942 - type: nauc_map_at_3_max value: 25.3764 - type: nauc_map_at_3_std value: -4.1876 - type: nauc_map_at_3_diff1 value: 32.752700000000004 - type: nauc_map_at_5_max value: 26.6367 - type: nauc_map_at_5_std value: -3.6224 - type: nauc_map_at_5_diff1 value: 32.4957 - type: nauc_map_at_10_max value: 27.0304 - type: nauc_map_at_10_std value: -2.852 - type: nauc_map_at_10_diff1 value: 32.548899999999996 - type: nauc_map_at_20_max value: 27.2991 - type: nauc_map_at_20_std value: -2.3765 - type: nauc_map_at_20_diff1 value: 32.5216 - type: nauc_map_at_100_max value: 27.2665 - type: nauc_map_at_100_std value: -2.2849999999999997 - type: nauc_map_at_100_diff1 value: 32.5791 - type: nauc_map_at_1000_max value: 27.243499999999997 - type: nauc_map_at_1000_std value: -2.3154999999999997 - type: nauc_map_at_1000_diff1 value: 32.5925 - type: nauc_recall_at_1_max value: 22.2587 - type: nauc_recall_at_1_std value: -4.6109 - type: nauc_recall_at_1_diff1 value: 37.0942 - type: nauc_recall_at_3_max value: 27.0818 - type: nauc_recall_at_3_std value: -3.5904 - type: nauc_recall_at_3_diff1 value: 26.6279 - type: nauc_recall_at_5_max value: 32.6179 - type: nauc_recall_at_5_std value: -1.2186000000000001 - type: nauc_recall_at_5_diff1 value: 24.7151 - type: nauc_recall_at_10_max value: 36.105599999999995 - type: nauc_recall_at_10_std value: 4.5315 - type: nauc_recall_at_10_diff1 value: 23.4044 - type: nauc_recall_at_20_max value: 45.2605 - type: nauc_recall_at_20_std value: 17.092299999999998 - type: nauc_recall_at_20_diff1 value: 20.5304 - type: nauc_recall_at_100_max value: 57.85829999999999 - type: nauc_recall_at_100_std value: 42.517500000000005 - type: nauc_recall_at_100_diff1 value: 19.6591 - type: nauc_recall_at_1000_max value: 75.3601 - type: nauc_recall_at_1000_std value: 69.4265 - type: nauc_recall_at_1000_diff1 value: 29.8635 - type: nauc_precision_at_1_max value: 23.8893 - type: nauc_precision_at_1_std value: -3.0092 - type: nauc_precision_at_1_diff1 value: 36.789899999999996 - type: nauc_precision_at_3_max value: 27.1749 - type: nauc_precision_at_3_std value: -0.9776 - type: nauc_precision_at_3_diff1 value: 22.9551 - type: nauc_precision_at_5_max value: 28.6992 - type: nauc_precision_at_5_std value: 2.1732 - type: nauc_precision_at_5_diff1 value: 17.6422 - type: nauc_precision_at_10_max value: 27.2755 - type: nauc_precision_at_10_std value: 8.4934 - type: nauc_precision_at_10_diff1 value: 12.1581 - type: nauc_precision_at_20_max value: 26.858900000000002 - type: nauc_precision_at_20_std value: 15.7942 - type: nauc_precision_at_20_diff1 value: 5.8980999999999995 - type: nauc_precision_at_100_max value: 18.8392 - type: nauc_precision_at_100_std value: 19.7054 - type: nauc_precision_at_100_diff1 value: -0.8163 - type: nauc_precision_at_1000_max value: 9.8054 - type: nauc_precision_at_1000_std value: 14.4735 - type: nauc_precision_at_1000_diff1 value: -4.7447 - type: nauc_mrr_at_1_max value: 23.8759 - type: nauc_mrr_at_1_std value: -3.0908 - type: nauc_mrr_at_1_diff1 value: 36.7027 - type: nauc_mrr_at_3_max value: 25.9165 - type: nauc_mrr_at_3_std value: -2.3997 - type: nauc_mrr_at_3_diff1 value: 32.5473 - type: nauc_mrr_at_5_max value: 27.1119 - type: nauc_mrr_at_5_std value: -1.8426999999999998 - type: nauc_mrr_at_5_diff1 value: 32.4999 - type: nauc_mrr_at_10_max value: 27.2217 - type: nauc_mrr_at_10_std value: -1.3365 - type: nauc_mrr_at_10_diff1 value: 32.5293 - type: nauc_mrr_at_20_max value: 27.3157 - type: nauc_mrr_at_20_std value: -1.1132 - type: nauc_mrr_at_20_diff1 value: 32.554300000000005 - type: nauc_mrr_at_100_max value: 27.2621 - type: nauc_mrr_at_100_std value: -1.0897000000000001 - type: nauc_mrr_at_100_diff1 value: 32.6073 - type: nauc_mrr_at_1000_max value: 27.2409 - type: nauc_mrr_at_1000_std value: -1.1176 - type: nauc_mrr_at_1000_diff1 value: 32.6192 - type: main_score value: 53.410000000000004 - task: type: Retrieval dataset: name: MTEB QuoraRetrieval (default) type: mteb/quora config: default split: test revision: e4e08e0b7dbe3c8700f0daef558ff32256715259 metrics: - type: ndcg_at_1 value: 79.64 - type: ndcg_at_3 value: 83.67599999999999 - type: ndcg_at_5 value: 85.52 - type: ndcg_at_10 value: 86.871 - type: ndcg_at_20 value: 87.59 - type: ndcg_at_100 value: 88.211 - type: ndcg_at_1000 value: 88.36 - type: map_at_1 value: 69.133 - type: map_at_3 value: 79.776 - type: map_at_5 value: 81.747 - type: map_at_10 value: 82.852 - type: map_at_20 value: 83.282 - type: map_at_100 value: 83.5 - type: map_at_1000 value: 83.519 - type: recall_at_1 value: 69.133 - type: recall_at_3 value: 85.526 - type: recall_at_5 value: 90.596 - type: recall_at_10 value: 94.613 - type: recall_at_20 value: 96.92699999999999 - type: recall_at_100 value: 99.24300000000001 - type: recall_at_1000 value: 99.96000000000001 - type: precision_at_1 value: 79.64 - type: precision_at_3 value: 36.516999999999996 - type: precision_at_5 value: 24.194 - type: precision_at_10 value: 13.203000000000001 - type: precision_at_20 value: 7.02 - type: precision_at_100 value: 1.514 - type: precision_at_1000 value: 0.156 - type: mrr_at_1 value: 79.60000000000001 - type: mrr_at_3 value: 84.955 - type: mrr_at_5 value: 85.74000000000001 - type: mrr_at_10 value: 86.0913 - type: mrr_at_20 value: 86.1768 - type: mrr_at_100 value: 86.2076 - type: mrr_at_1000 value: 86.2092 - type: nauc_ndcg_at_1_max value: 39.4509 - type: nauc_ndcg_at_1_std value: -30.6309 - type: nauc_ndcg_at_1_diff1 value: 76.5171 - type: nauc_ndcg_at_3_max value: 37.9586 - type: nauc_ndcg_at_3_std value: -35.8174 - type: nauc_ndcg_at_3_diff1 value: 74.5992 - type: nauc_ndcg_at_5_max value: 38.541799999999995 - type: nauc_ndcg_at_5_std value: -36.456300000000006 - type: nauc_ndcg_at_5_diff1 value: 75.0506 - type: nauc_ndcg_at_10_max value: 38.996199999999995 - type: nauc_ndcg_at_10_std value: -35.6649 - type: nauc_ndcg_at_10_diff1 value: 75.3601 - type: nauc_ndcg_at_20_max value: 39.1758 - type: nauc_ndcg_at_20_std value: -34.7636 - type: nauc_ndcg_at_20_diff1 value: 75.3846 - type: nauc_ndcg_at_100_max value: 39.6116 - type: nauc_ndcg_at_100_std value: -33.2361 - type: nauc_ndcg_at_100_diff1 value: 75.31 - type: nauc_ndcg_at_1000_max value: 39.6171 - type: nauc_ndcg_at_1000_std value: -33.1588 - type: nauc_ndcg_at_1000_diff1 value: 75.2929 - type: nauc_map_at_1_max value: 28.8061 - type: nauc_map_at_1_std value: -33.7016 - type: nauc_map_at_1_diff1 value: 78.7612 - type: nauc_map_at_3_max value: 35.2541 - type: nauc_map_at_3_std value: -37.741400000000006 - type: nauc_map_at_3_diff1 value: 75.8173 - type: nauc_map_at_5_max value: 36.822500000000005 - type: nauc_map_at_5_std value: -37.710300000000004 - type: nauc_map_at_5_diff1 value: 75.7355 - type: nauc_map_at_10_max value: 37.5769 - type: nauc_map_at_10_std value: -36.5907 - type: nauc_map_at_10_diff1 value: 75.60040000000001 - type: nauc_map_at_20_max value: 37.8409 - type: nauc_map_at_20_std value: -35.7977 - type: nauc_map_at_20_diff1 value: 75.4885 - type: nauc_map_at_100_max value: 38.0097 - type: nauc_map_at_100_std value: -35.1815 - type: nauc_map_at_100_diff1 value: 75.4349 - type: nauc_map_at_1000_max value: 38.0191 - type: nauc_map_at_1000_std value: -35.1434 - type: nauc_map_at_1000_diff1 value: 75.4325 - type: nauc_recall_at_1_max value: 28.8061 - type: nauc_recall_at_1_std value: -33.7016 - type: nauc_recall_at_1_diff1 value: 78.7612 - type: nauc_recall_at_3_max value: 32.889 - type: nauc_recall_at_3_std value: -41.323100000000004 - type: nauc_recall_at_3_diff1 value: 71.73570000000001 - type: nauc_recall_at_5_max value: 34.6917 - type: nauc_recall_at_5_std value: -44.5216 - type: nauc_recall_at_5_diff1 value: 70.42540000000001 - type: nauc_recall_at_10_max value: 36.0356 - type: nauc_recall_at_10_std value: -45.073 - type: nauc_recall_at_10_diff1 value: 70.1776 - type: nauc_recall_at_20_max value: 35.714800000000004 - type: nauc_recall_at_20_std value: -44.0962 - type: nauc_recall_at_20_diff1 value: 71.23620000000001 - type: nauc_recall_at_100_max value: 43.105199999999996 - type: nauc_recall_at_100_std value: -18.800900000000002 - type: nauc_recall_at_100_diff1 value: 70.7888 - type: nauc_recall_at_1000_max value: 64.4844 - type: nauc_recall_at_1000_std value: 41.486200000000004 - type: nauc_recall_at_1000_diff1 value: 69.0643 - type: nauc_precision_at_1_max value: 39.4509 - type: nauc_precision_at_1_std value: -30.6309 - type: nauc_precision_at_1_diff1 value: 76.5171 - type: nauc_precision_at_3_max value: 12.514800000000001 - type: nauc_precision_at_3_std value: 3.2272000000000003 - type: nauc_precision_at_3_diff1 value: -11.8298 - type: nauc_precision_at_5_max value: 6.0901 - type: nauc_precision_at_5_std value: 12.6778 - type: nauc_precision_at_5_diff1 value: -26.570300000000003 - type: nauc_precision_at_10_max value: 0.9773999999999999 - type: nauc_precision_at_10_std value: 21.1764 - type: nauc_precision_at_10_diff1 value: -35.2909 - type: nauc_precision_at_20_max value: -2.2387 - type: nauc_precision_at_20_std value: 26.571099999999998 - type: nauc_precision_at_20_diff1 value: -39.0582 - type: nauc_precision_at_100_max value: -4.9125000000000005 - type: nauc_precision_at_100_std value: 31.9907 - type: nauc_precision_at_100_diff1 value: -41.5916 - type: nauc_precision_at_1000_max value: -6.0841 - type: nauc_precision_at_1000_std value: 32.8504 - type: nauc_precision_at_1000_diff1 value: -42.25 - type: nauc_mrr_at_1_max value: 39.285599999999995 - type: nauc_mrr_at_1_std value: -30.799100000000003 - type: nauc_mrr_at_1_diff1 value: 76.6113 - type: nauc_mrr_at_3_max value: 40.7492 - type: nauc_mrr_at_3_std value: -31.933699999999998 - type: nauc_mrr_at_3_diff1 value: 75.593 - type: nauc_mrr_at_5_max value: 40.87 - type: nauc_mrr_at_5_std value: -31.9333 - type: nauc_mrr_at_5_diff1 value: 75.7331 - type: nauc_mrr_at_10_max value: 40.7704 - type: nauc_mrr_at_10_std value: -31.839699999999997 - type: nauc_mrr_at_10_diff1 value: 75.8249 - type: nauc_mrr_at_20_max value: 40.7107 - type: nauc_mrr_at_20_std value: -31.7701 - type: nauc_mrr_at_20_diff1 value: 75.8463 - type: nauc_mrr_at_100_max value: 40.6937 - type: nauc_mrr_at_100_std value: -31.735999999999997 - type: nauc_mrr_at_100_diff1 value: 75.84309999999999 - type: nauc_mrr_at_1000_max value: 40.691 - type: nauc_mrr_at_1000_std value: -31.7368 - type: nauc_mrr_at_1000_diff1 value: 75.84349999999999 - type: main_score value: 86.871 - task: type: Clustering dataset: name: MTEB RedditClustering (default) type: mteb/reddit-clustering config: default split: test revision: 24640382cdbf8abc73003fb0fa6d111a705499eb metrics: - type: v_measure value: 45.8568 - type: v_measure_std value: 5.685 - type: main_score value: 45.8568 - task: type: Clustering dataset: name: MTEB RedditClusteringP2P (default) type: mteb/reddit-clustering-p2p config: default split: test revision: 385e3cb46b4cfa89021f56c4380204149d0efe33 metrics: - type: v_measure value: 54.9896 - type: v_measure_std value: 12.0517 - type: main_score value: 54.9896 - task: type: Retrieval dataset: name: MTEB SCIDOCS (default) type: mteb/scidocs config: default split: test revision: f8c2fcf00f625baaa80f62ec5bd9e1fff3b8ae88 metrics: - type: ndcg_at_1 value: 20.599999999999998 - type: ndcg_at_3 value: 17.214 - type: ndcg_at_5 value: 14.93 - type: ndcg_at_10 value: 17.721 - type: ndcg_at_20 value: 20.619 - type: ndcg_at_100 value: 25.46 - type: ndcg_at_1000 value: 30.846 - type: map_at_1 value: 4.175 - type: map_at_3 value: 7.611 - type: map_at_5 value: 8.955 - type: map_at_10 value: 10.360999999999999 - type: map_at_20 value: 11.414 - type: map_at_100 value: 12.3 - type: map_at_1000 value: 12.595999999999998 - type: recall_at_1 value: 4.175 - type: recall_at_3 value: 9.868 - type: recall_at_5 value: 13.303 - type: recall_at_10 value: 18.397 - type: recall_at_20 value: 25.162000000000003 - type: recall_at_100 value: 40.99 - type: recall_at_1000 value: 67.322 - type: precision_at_1 value: 20.599999999999998 - type: precision_at_3 value: 16.2 - type: precision_at_5 value: 13.120000000000001 - type: precision_at_10 value: 9.06 - type: precision_at_20 value: 6.1899999999999995 - type: precision_at_100 value: 2.017 - type: precision_at_1000 value: 0.331 - type: mrr_at_1 value: 20.599999999999998 - type: mrr_at_3 value: 28.1833 - type: mrr_at_5 value: 30.043300000000002 - type: mrr_at_10 value: 31.1391 - type: mrr_at_20 value: 31.9095 - type: mrr_at_100 value: 32.3914 - type: mrr_at_1000 value: 32.4509 - type: nauc_ndcg_at_1_max value: 26.9024 - type: nauc_ndcg_at_1_std value: 4.1442 - type: nauc_ndcg_at_1_diff1 value: 25.9169 - type: nauc_ndcg_at_3_max value: 33.2338 - type: nauc_ndcg_at_3_std value: 7.0103 - type: nauc_ndcg_at_3_diff1 value: 24.8464 - type: nauc_ndcg_at_5_max value: 33.833999999999996 - type: nauc_ndcg_at_5_std value: 8.515 - type: nauc_ndcg_at_5_diff1 value: 22.7135 - type: nauc_ndcg_at_10_max value: 34.6873 - type: nauc_ndcg_at_10_std value: 12.3294 - type: nauc_ndcg_at_10_diff1 value: 20.4198 - type: nauc_ndcg_at_20_max value: 36.889 - type: nauc_ndcg_at_20_std value: 15.5519 - type: nauc_ndcg_at_20_diff1 value: 20.7428 - type: nauc_ndcg_at_100_max value: 39.0403 - type: nauc_ndcg_at_100_std value: 20.2488 - type: nauc_ndcg_at_100_diff1 value: 20.572 - type: nauc_ndcg_at_1000_max value: 38.7458 - type: nauc_ndcg_at_1000_std value: 21.7088 - type: nauc_ndcg_at_1000_diff1 value: 20.5603 - type: nauc_map_at_1_max value: 27.091199999999997 - type: nauc_map_at_1_std value: 4.3355999999999995 - type: nauc_map_at_1_diff1 value: 25.7587 - type: nauc_map_at_3_max value: 33.602900000000005 - type: nauc_map_at_3_std value: 5.8709 - type: nauc_map_at_3_diff1 value: 25.5351 - type: nauc_map_at_5_max value: 34.414 - type: nauc_map_at_5_std value: 6.914199999999999 - type: nauc_map_at_5_diff1 value: 23.7741 - type: nauc_map_at_10_max value: 35.1586 - type: nauc_map_at_10_std value: 10.078800000000001 - type: nauc_map_at_10_diff1 value: 21.628600000000002 - type: nauc_map_at_20_max value: 36.7719 - type: nauc_map_at_20_std value: 12.1807 - type: nauc_map_at_20_diff1 value: 22.0201 - type: nauc_map_at_100_max value: 37.5971 - type: nauc_map_at_100_std value: 13.828299999999999 - type: nauc_map_at_100_diff1 value: 21.8011 - type: nauc_map_at_1000_max value: 37.6524 - type: nauc_map_at_1000_std value: 14.0603 - type: nauc_map_at_1000_diff1 value: 21.87 - type: nauc_recall_at_1_max value: 27.091199999999997 - type: nauc_recall_at_1_std value: 4.3355999999999995 - type: nauc_recall_at_1_diff1 value: 25.7587 - type: nauc_recall_at_3_max value: 35.0346 - type: nauc_recall_at_3_std value: 7.6722 - type: nauc_recall_at_3_diff1 value: 23.8398 - type: nauc_recall_at_5_max value: 34.7429 - type: nauc_recall_at_5_std value: 9.8479 - type: nauc_recall_at_5_diff1 value: 19.9693 - type: nauc_recall_at_10_max value: 34.1188 - type: nauc_recall_at_10_std value: 16.0443 - type: nauc_recall_at_10_diff1 value: 14.844399999999998 - type: nauc_recall_at_20_max value: 36.9825 - type: nauc_recall_at_20_std value: 21.5553 - type: nauc_recall_at_20_diff1 value: 15.4056 - type: nauc_recall_at_100_max value: 37.238 - type: nauc_recall_at_100_std value: 30.425400000000003 - type: nauc_recall_at_100_diff1 value: 12.839 - type: nauc_recall_at_1000_max value: 30.188599999999997 - type: nauc_recall_at_1000_std value: 34.7768 - type: nauc_recall_at_1000_diff1 value: 8.337 - type: nauc_precision_at_1_max value: 26.9024 - type: nauc_precision_at_1_std value: 4.1442 - type: nauc_precision_at_1_diff1 value: 25.9169 - type: nauc_precision_at_3_max value: 35.3949 - type: nauc_precision_at_3_std value: 7.818300000000001 - type: nauc_precision_at_3_diff1 value: 24.4077 - type: nauc_precision_at_5_max value: 35.0653 - type: nauc_precision_at_5_std value: 10.1252 - type: nauc_precision_at_5_diff1 value: 20.4485 - type: nauc_precision_at_10_max value: 34.5799 - type: nauc_precision_at_10_std value: 16.2893 - type: nauc_precision_at_10_diff1 value: 15.337600000000002 - type: nauc_precision_at_20_max value: 37.47 - type: nauc_precision_at_20_std value: 21.7447 - type: nauc_precision_at_20_diff1 value: 15.644 - type: nauc_precision_at_100_max value: 37.8956 - type: nauc_precision_at_100_std value: 30.6388 - type: nauc_precision_at_100_diff1 value: 13.5011 - type: nauc_precision_at_1000_max value: 30.456699999999998 - type: nauc_precision_at_1000_std value: 34.3528 - type: nauc_precision_at_1000_diff1 value: 8.963899999999999 - type: nauc_mrr_at_1_max value: 26.9024 - type: nauc_mrr_at_1_std value: 4.1442 - type: nauc_mrr_at_1_diff1 value: 25.9169 - type: nauc_mrr_at_3_max value: 30.214999999999996 - type: nauc_mrr_at_3_std value: 7.4483 - type: nauc_mrr_at_3_diff1 value: 23.7169 - type: nauc_mrr_at_5_max value: 30.1892 - type: nauc_mrr_at_5_std value: 8.319 - type: nauc_mrr_at_5_diff1 value: 23.4187 - type: nauc_mrr_at_10_max value: 30.5879 - type: nauc_mrr_at_10_std value: 8.9701 - type: nauc_mrr_at_10_diff1 value: 23.4357 - type: nauc_mrr_at_20_max value: 30.579800000000002 - type: nauc_mrr_at_20_std value: 9.3186 - type: nauc_mrr_at_20_diff1 value: 23.2358 - type: nauc_mrr_at_100_max value: 30.660500000000003 - type: nauc_mrr_at_100_std value: 9.404 - type: nauc_mrr_at_100_diff1 value: 23.3937 - type: nauc_mrr_at_1000_max value: 30.6315 - type: nauc_mrr_at_1000_std value: 9.363299999999999 - type: nauc_mrr_at_1000_diff1 value: 23.392599999999998 - type: main_score value: 17.721 - task: type: STS dataset: name: MTEB SICK-R (default) type: mteb/sickr-sts config: default split: test revision: 20a6d6f312dd54037fe07a32d58e5e168867909d metrics: - type: pearson value: 75.5378 - type: spearman value: 68.7448 - type: cosine_pearson value: 75.5378 - type: cosine_spearman value: 68.7448 - type: manhattan_pearson value: 72.905 - type: manhattan_spearman value: 68.9036 - type: euclidean_pearson value: 72.7586 - type: euclidean_spearman value: 68.7448 - type: main_score value: 68.7448 - task: type: STS dataset: name: MTEB STS12 (default) type: mteb/sts12-sts config: default split: test revision: a0d554a64d88156834ff5ae9920b964011b16384 metrics: - type: pearson value: 81.6341 - type: spearman value: 75.1911 - type: cosine_pearson value: 81.6341 - type: cosine_spearman value: 75.1911 - type: manhattan_pearson value: 78.4046 - type: manhattan_spearman value: 75.1706 - type: euclidean_pearson value: 78.3649 - type: euclidean_spearman value: 75.1934 - type: main_score value: 75.1911 - task: type: STS dataset: name: MTEB STS13 (default) type: mteb/sts13-sts config: default split: test revision: 7e90230a92c190f1bf69ae9002b8cea547a64cca metrics: - type: pearson value: 76.4378 - type: spearman value: 77.3053 - type: cosine_pearson value: 76.4378 - type: cosine_spearman value: 77.3053 - type: manhattan_pearson value: 77.1958 - type: manhattan_spearman value: 77.2543 - type: euclidean_pearson value: 77.2317 - type: euclidean_spearman value: 77.3053 - type: main_score value: 77.3053 - task: type: STS dataset: name: MTEB STS14 (default) type: mteb/sts14-sts config: default split: test revision: 6031580fec1f6af667f0bd2da0a551cf4f0b2375 metrics: - type: pearson value: 78.4342 - type: spearman value: 74.9479 - type: cosine_pearson value: 78.4342 - type: cosine_spearman value: 74.9479 - type: manhattan_pearson value: 77.12219999999999 - type: manhattan_spearman value: 74.924 - type: euclidean_pearson value: 77.14800000000001 - type: euclidean_spearman value: 74.94800000000001 - type: main_score value: 74.9479 - task: type: STS dataset: name: MTEB STS15 (default) type: mteb/sts15-sts config: default split: test revision: ae752c7c21bf194d8b67fd573edf7ae58183cbe3 metrics: - type: pearson value: 85.1908 - type: spearman value: 86.0174 - type: cosine_pearson value: 85.1908 - type: cosine_spearman value: 86.0174 - type: manhattan_pearson value: 85.4436 - type: manhattan_spearman value: 86.0332 - type: euclidean_pearson value: 85.4339 - type: euclidean_spearman value: 86.0174 - type: main_score value: 86.0174 - task: type: STS dataset: name: MTEB STS16 (default) type: mteb/sts16-sts config: default split: test revision: 4d8694f8f0e0100860b497b999b3dbed754a0513 metrics: - type: pearson value: 80.5421 - type: spearman value: 81.9568 - type: cosine_pearson value: 80.5421 - type: cosine_spearman value: 81.9568 - type: manhattan_pearson value: 81.1013 - type: manhattan_spearman value: 81.8165 - type: euclidean_pearson value: 81.24510000000001 - type: euclidean_spearman value: 81.9568 - type: main_score value: 81.9568 - task: type: STS dataset: name: MTEB STS17 (en-tr) type: mteb/sts17-crosslingual-sts config: en-tr split: test revision: faeb762787bd10488a50c8b5be4a3b82e411949c metrics: - type: pearson value: 48.2717 - type: spearman value: 44.642900000000004 - type: cosine_pearson value: 48.2717 - type: cosine_spearman value: 44.642900000000004 - type: manhattan_pearson value: 50.314400000000006 - type: manhattan_spearman value: 44.982299999999995 - type: euclidean_pearson value: 50.1685 - type: euclidean_spearman value: 44.642900000000004 - type: main_score value: 44.642900000000004 - task: type: STS dataset: name: MTEB STS17 (it-en) type: mteb/sts17-crosslingual-sts config: it-en split: test revision: faeb762787bd10488a50c8b5be4a3b82e411949c metrics: - type: pearson value: 67.8601 - type: spearman value: 68.2763 - type: cosine_pearson value: 67.8601 - type: cosine_spearman value: 68.2763 - type: manhattan_pearson value: 68.1563 - type: manhattan_spearman value: 68.4724 - type: euclidean_pearson value: 68.1026 - type: euclidean_spearman value: 68.2763 - type: main_score value: 68.2763 - task: type: STS dataset: name: MTEB STS17 (en-en) type: mteb/sts17-crosslingual-sts config: en-en split: test revision: faeb762787bd10488a50c8b5be4a3b82e411949c metrics: - type: pearson value: 78.05539999999999 - type: spearman value: 78.5929 - type: cosine_pearson value: 78.05539999999999 - type: cosine_spearman value: 78.5929 - type: manhattan_pearson value: 78.408 - type: manhattan_spearman value: 78.8622 - type: euclidean_pearson value: 78.1413 - type: euclidean_spearman value: 78.5929 - type: main_score value: 78.5929 - task: type: STS dataset: name: MTEB STS17 (en-ar) type: mteb/sts17-crosslingual-sts config: en-ar split: test revision: faeb762787bd10488a50c8b5be4a3b82e411949c metrics: - type: pearson value: 59.4349 - type: spearman value: 59.838800000000006 - type: cosine_pearson value: 59.4349 - type: cosine_spearman value: 59.838800000000006 - type: manhattan_pearson value: 60.7565 - type: manhattan_spearman value: 60.5824 - type: euclidean_pearson value: 60.247099999999996 - type: euclidean_spearman value: 59.838800000000006 - type: main_score value: 59.838800000000006 - task: type: STS dataset: name: MTEB STS17 (fr-en) type: mteb/sts17-crosslingual-sts config: fr-en split: test revision: faeb762787bd10488a50c8b5be4a3b82e411949c metrics: - type: pearson value: 73.84039999999999 - type: spearman value: 74.2498 - type: cosine_pearson value: 73.84039999999999 - type: cosine_spearman value: 74.2498 - type: manhattan_pearson value: 74.6784 - type: manhattan_spearman value: 74.4608 - type: euclidean_pearson value: 74.5596 - type: euclidean_spearman value: 74.2498 - type: main_score value: 74.2498 - task: type: STS dataset: name: MTEB STS17 (nl-en) type: mteb/sts17-crosslingual-sts config: nl-en split: test revision: faeb762787bd10488a50c8b5be4a3b82e411949c metrics: - type: pearson value: 67.9218 - type: spearman value: 68.0418 - type: cosine_pearson value: 67.9218 - type: cosine_spearman value: 68.0418 - type: manhattan_pearson value: 68.51 - type: manhattan_spearman value: 68.1968 - type: euclidean_pearson value: 68.343 - type: euclidean_spearman value: 68.0418 - type: main_score value: 68.0418 - task: type: STS dataset: name: MTEB STS17 (es-en) type: mteb/sts17-crosslingual-sts config: es-en split: test revision: faeb762787bd10488a50c8b5be4a3b82e411949c metrics: - type: pearson value: 70.381 - type: spearman value: 69.5729 - type: cosine_pearson value: 70.381 - type: cosine_spearman value: 69.5729 - type: manhattan_pearson value: 70.8688 - type: manhattan_spearman value: 69.4406 - type: euclidean_pearson value: 71.0267 - type: euclidean_spearman value: 69.5729 - type: main_score value: 69.5729 - task: type: STS dataset: name: MTEB STS17 (en-de) type: mteb/sts17-crosslingual-sts config: en-de split: test revision: faeb762787bd10488a50c8b5be4a3b82e411949c metrics: - type: pearson value: 70.0196 - type: spearman value: 69.7175 - type: cosine_pearson value: 70.0196 - type: cosine_spearman value: 69.7175 - type: manhattan_pearson value: 71.40990000000001 - type: manhattan_spearman value: 70.1461 - type: euclidean_pearson value: 70.88799999999999 - type: euclidean_spearman value: 69.7175 - type: main_score value: 69.7175 - task: type: STS dataset: name: MTEB STS22 (de-en) type: mteb/sts22-crosslingual-sts config: de-en split: test revision: de9d86b3b84231dc21f76c7b7af1f28e2f57f6e3 metrics: - type: pearson value: 65.7536 - type: spearman value: 60.04429999999999 - type: cosine_pearson value: 65.7536 - type: cosine_spearman value: 60.04429999999999 - type: manhattan_pearson value: 68.58579999999999 - type: manhattan_spearman value: 60.3699 - type: euclidean_pearson value: 68.3761 - type: euclidean_spearman value: 60.04429999999999 - type: main_score value: 60.04429999999999 - task: type: STS dataset: name: MTEB STS22 (en) type: mteb/sts22-crosslingual-sts config: en split: test revision: de9d86b3b84231dc21f76c7b7af1f28e2f57f6e3 metrics: - type: pearson value: 68.997 - type: spearman value: 68.1508 - type: cosine_pearson value: 68.997 - type: cosine_spearman value: 68.1508 - type: manhattan_pearson value: 68.9229 - type: manhattan_spearman value: 68.0124 - type: euclidean_pearson value: 69.0519 - type: euclidean_spearman value: 68.1508 - type: main_score value: 68.1508 - task: type: STS dataset: name: MTEB STS22 (es-en) type: mteb/sts22-crosslingual-sts config: es-en split: test revision: de9d86b3b84231dc21f76c7b7af1f28e2f57f6e3 metrics: - type: pearson value: 80.2006 - type: spearman value: 80.4702 - type: cosine_pearson value: 80.2006 - type: cosine_spearman value: 80.4702 - type: manhattan_pearson value: 80.81009999999999 - type: manhattan_spearman value: 80.6037 - type: euclidean_pearson value: 80.66290000000001 - type: euclidean_spearman value: 80.4702 - type: main_score value: 80.4702 - task: type: STS dataset: name: MTEB STS22 (zh-en) type: mteb/sts22-crosslingual-sts config: zh-en split: test revision: de9d86b3b84231dc21f76c7b7af1f28e2f57f6e3 metrics: - type: pearson value: 74.0885 - type: spearman value: 72.4574 - type: cosine_pearson value: 74.0885 - type: cosine_spearman value: 72.4574 - type: manhattan_pearson value: 75.25659999999999 - type: manhattan_spearman value: 71.9695 - type: euclidean_pearson value: 75.4999 - type: euclidean_spearman value: 72.4574 - type: main_score value: 72.4574 - task: type: STS dataset: name: MTEB STS22 (pl-en) type: mteb/sts22-crosslingual-sts config: pl-en split: test revision: de9d86b3b84231dc21f76c7b7af1f28e2f57f6e3 metrics: - type: pearson value: 74.1794 - type: spearman value: 70.6749 - type: cosine_pearson value: 74.1794 - type: cosine_spearman value: 70.6749 - type: manhattan_pearson value: 74.3245 - type: manhattan_spearman value: 71.2375 - type: euclidean_pearson value: 73.221 - type: euclidean_spearman value: 70.6749 - type: main_score value: 70.6749 - task: type: STS dataset: name: MTEB STSBenchmark (default) type: mteb/stsbenchmark-sts config: default split: test revision: b0fddb56ed78048fa8b90373c8a3cfc37b684831 metrics: - type: pearson value: 76.7328 - type: spearman value: 78.4076 - type: cosine_pearson value: 76.7328 - type: cosine_spearman value: 78.4076 - type: manhattan_pearson value: 78.24950000000001 - type: manhattan_spearman value: 78.23400000000001 - type: euclidean_pearson value: 78.3628 - type: euclidean_spearman value: 78.4076 - type: main_score value: 78.4076 - task: type: Reranking dataset: name: MTEB SciDocsRR (default) type: mteb/scidocs-reranking config: default split: test revision: d3c5e1fc0b855ab6097bf1cda04dd73947d7caab metrics: - type: map value: 79.6097 - type: mrr value: 94.12939999999999 - type: nAUC_map_max value: 58.7937 - type: nAUC_map_std value: 69.6785 - type: nAUC_map_diff1 value: 7.4891 - type: nAUC_mrr_max value: 84.7821 - type: nAUC_mrr_std value: 77.6636 - type: nAUC_mrr_diff1 value: 49.763600000000004 - type: main_score value: 79.6097 - task: type: Retrieval dataset: name: MTEB SciFact (default) type: mteb/scifact config: default split: test revision: 0228b52cf27578f30900b9e5271d331663a030d7 metrics: - type: ndcg_at_1 value: 54.0 - type: ndcg_at_3 value: 60.851 - type: ndcg_at_5 value: 63.410999999999994 - type: ndcg_at_10 value: 65.847 - type: ndcg_at_20 value: 66.937 - type: ndcg_at_100 value: 68.262 - type: ndcg_at_1000 value: 69.341 - type: map_at_1 value: 51.093999999999994 - type: map_at_3 value: 58.044 - type: map_at_5 value: 59.702999999999996 - type: map_at_10 value: 60.885999999999996 - type: map_at_20 value: 61.266 - type: map_at_100 value: 61.482000000000006 - type: map_at_1000 value: 61.519 - type: recall_at_1 value: 51.093999999999994 - type: recall_at_3 value: 66.128 - type: recall_at_5 value: 72.456 - type: recall_at_10 value: 79.3 - type: recall_at_20 value: 83.2 - type: recall_at_100 value: 90.0 - type: recall_at_1000 value: 98.667 - type: precision_at_1 value: 54.0 - type: precision_at_3 value: 23.778 - type: precision_at_5 value: 15.933 - type: precision_at_10 value: 8.967 - type: precision_at_20 value: 4.75 - type: precision_at_100 value: 1.03 - type: precision_at_1000 value: 0.11199999999999999 - type: mrr_at_1 value: 54.0 - type: mrr_at_3 value: 60.3889 - type: mrr_at_5 value: 61.7556 - type: mrr_at_10 value: 62.5984 - type: mrr_at_20 value: 62.85039999999999 - type: mrr_at_100 value: 63.0155 - type: mrr_at_1000 value: 63.052699999999994 - type: nauc_ndcg_at_1_max value: 56.6373 - type: nauc_ndcg_at_1_std value: 2.1765 - type: nauc_ndcg_at_1_diff1 value: 71.14829999999999 - type: nauc_ndcg_at_3_max value: 53.7965 - type: nauc_ndcg_at_3_std value: -3.4057999999999997 - type: nauc_ndcg_at_3_diff1 value: 63.712199999999996 - type: nauc_ndcg_at_5_max value: 56.96059999999999 - type: nauc_ndcg_at_5_std value: 1.4794 - type: nauc_ndcg_at_5_diff1 value: 64.65419999999999 - type: nauc_ndcg_at_10_max value: 59.4154 - type: nauc_ndcg_at_10_std value: 5.2752 - type: nauc_ndcg_at_10_diff1 value: 64.3098 - type: nauc_ndcg_at_20_max value: 59.7717 - type: nauc_ndcg_at_20_std value: 6.2032 - type: nauc_ndcg_at_20_diff1 value: 64.18599999999999 - type: nauc_ndcg_at_100_max value: 59.2146 - type: nauc_ndcg_at_100_std value: 6.0138 - type: nauc_ndcg_at_100_diff1 value: 64.0895 - type: nauc_ndcg_at_1000_max value: 58.5714 - type: nauc_ndcg_at_1000_std value: 4.8872 - type: nauc_ndcg_at_1000_diff1 value: 64.66969999999999 - type: nauc_map_at_1_max value: 51.2417 - type: nauc_map_at_1_std value: -5.42 - type: nauc_map_at_1_diff1 value: 70.0616 - type: nauc_map_at_3_max value: 51.9587 - type: nauc_map_at_3_std value: -5.3035 - type: nauc_map_at_3_diff1 value: 65.282 - type: nauc_map_at_5_max value: 54.1516 - type: nauc_map_at_5_std value: -2.2858 - type: nauc_map_at_5_diff1 value: 65.86659999999999 - type: nauc_map_at_10_max value: 55.5412 - type: nauc_map_at_10_std value: -0.34299999999999997 - type: nauc_map_at_10_diff1 value: 65.89620000000001 - type: nauc_map_at_20_max value: 55.7967 - type: nauc_map_at_20_std value: 0.13799999999999998 - type: nauc_map_at_20_diff1 value: 65.8685 - type: nauc_map_at_100_max value: 55.74550000000001 - type: nauc_map_at_100_std value: 0.211 - type: nauc_map_at_100_diff1 value: 65.8557 - type: nauc_map_at_1000_max value: 55.728 - type: nauc_map_at_1000_std value: 0.1875 - type: nauc_map_at_1000_diff1 value: 65.8748 - type: nauc_recall_at_1_max value: 51.2417 - type: nauc_recall_at_1_std value: -5.42 - type: nauc_recall_at_1_diff1 value: 70.0616 - type: nauc_recall_at_3_max value: 52.4327 - type: nauc_recall_at_3_std value: -6.7153 - type: nauc_recall_at_3_diff1 value: 57.111999999999995 - type: nauc_recall_at_5_max value: 60.5827 - type: nauc_recall_at_5_std value: 7.1365 - type: nauc_recall_at_5_diff1 value: 58.3449 - type: nauc_recall_at_10_max value: 70.24770000000001 - type: nauc_recall_at_10_std value: 22.0896 - type: nauc_recall_at_10_diff1 value: 55.7264 - type: nauc_recall_at_20_max value: 73.483 - type: nauc_recall_at_20_std value: 29.653299999999998 - type: nauc_recall_at_20_diff1 value: 53.54750000000001 - type: nauc_recall_at_100_max value: 74.0321 - type: nauc_recall_at_100_std value: 37.491400000000006 - type: nauc_recall_at_100_diff1 value: 47.3918 - type: nauc_recall_at_1000_max value: 69.5378 - type: nauc_recall_at_1000_std value: 60.5042 - type: nauc_recall_at_1000_diff1 value: 19.5028 - type: nauc_precision_at_1_max value: 56.6373 - type: nauc_precision_at_1_std value: 2.1765 - type: nauc_precision_at_1_diff1 value: 71.14829999999999 - type: nauc_precision_at_3_max value: 51.811099999999996 - type: nauc_precision_at_3_std value: 8.4319 - type: nauc_precision_at_3_diff1 value: 48.545500000000004 - type: nauc_precision_at_5_max value: 55.4685 - type: nauc_precision_at_5_std value: 26.387 - type: nauc_precision_at_5_diff1 value: 39.6201 - type: nauc_precision_at_10_max value: 53.2436 - type: nauc_precision_at_10_std value: 41.6957 - type: nauc_precision_at_10_diff1 value: 24.6115 - type: nauc_precision_at_20_max value: 48.353699999999996 - type: nauc_precision_at_20_std value: 47.253 - type: nauc_precision_at_20_diff1 value: 15.687599999999998 - type: nauc_precision_at_100_max value: 36.771100000000004 - type: nauc_precision_at_100_std value: 48.1335 - type: nauc_precision_at_100_diff1 value: 2.6454 - type: nauc_precision_at_1000_max value: 23.0391 - type: nauc_precision_at_1000_std value: 53.26499999999999 - type: nauc_precision_at_1000_diff1 value: -15.0974 - type: nauc_mrr_at_1_max value: 56.6373 - type: nauc_mrr_at_1_std value: 2.1765 - type: nauc_mrr_at_1_diff1 value: 71.14829999999999 - type: nauc_mrr_at_3_max value: 57.6843 - type: nauc_mrr_at_3_std value: 2.4692 - type: nauc_mrr_at_3_diff1 value: 66.10340000000001 - type: nauc_mrr_at_5_max value: 59.2453 - type: nauc_mrr_at_5_std value: 5.1308 - type: nauc_mrr_at_5_diff1 value: 66.7377 - type: nauc_mrr_at_10_max value: 59.5575 - type: nauc_mrr_at_10_std value: 5.7778 - type: nauc_mrr_at_10_diff1 value: 66.36149999999999 - type: nauc_mrr_at_20_max value: 59.466300000000004 - type: nauc_mrr_at_20_std value: 5.6867 - type: nauc_mrr_at_20_diff1 value: 66.37100000000001 - type: nauc_mrr_at_100_max value: 59.404999999999994 - type: nauc_mrr_at_100_std value: 5.6528 - type: nauc_mrr_at_100_diff1 value: 66.41040000000001 - type: nauc_mrr_at_1000_max value: 59.3919 - type: nauc_mrr_at_1000_std value: 5.6358 - type: nauc_mrr_at_1000_diff1 value: 66.43050000000001 - type: main_score value: 65.847 - task: type: PairClassification dataset: name: MTEB SprintDuplicateQuestions (default) type: mteb/sprintduplicatequestions-pairclassification config: default split: test revision: d66bd1f72af766a5cc4b0ca5e00c162f89e8cc46 metrics: - type: similarity_accuracy value: 99.7386 - type: similarity_accuracy_threshold value: 84.1442 - type: similarity_f1 value: 86.41980000000001 - type: similarity_f1_threshold value: 84.1442 - type: similarity_precision value: 88.98310000000001 - type: similarity_recall value: 84.0 - type: similarity_ap value: 93.50309999999999 - type: cosine_accuracy value: 99.7386 - type: cosine_accuracy_threshold value: 84.1442 - type: cosine_f1 value: 86.41980000000001 - type: cosine_f1_threshold value: 84.1442 - type: cosine_precision value: 88.98310000000001 - type: cosine_recall value: 84.0 - type: cosine_ap value: 93.50309999999999 - type: manhattan_accuracy value: 99.7406 - type: manhattan_accuracy_threshold value: 1243.0971 - type: manhattan_f1 value: 86.5641 - type: manhattan_f1_threshold value: 1243.0971 - type: manhattan_precision value: 88.8421 - type: manhattan_recall value: 84.39999999999999 - type: manhattan_ap value: 93.50840000000001 - type: euclidean_accuracy value: 99.7386 - type: euclidean_accuracy_threshold value: 56.313 - type: euclidean_f1 value: 86.41980000000001 - type: euclidean_f1_threshold value: 56.313 - type: euclidean_precision value: 88.98310000000001 - type: euclidean_recall value: 84.0 - type: euclidean_ap value: 93.50309999999999 - type: dot_accuracy value: 99.7386 - type: dot_accuracy_threshold value: 84.1442 - type: dot_f1 value: 86.41980000000001 - type: dot_f1_threshold value: 84.1442 - type: dot_precision value: 88.98310000000001 - type: dot_recall value: 84.0 - type: dot_ap value: 93.50309999999999 - type: max_accuracy value: 99.7406 - type: max_f1 value: 86.5641 - type: max_precision value: 88.98310000000001 - type: max_recall value: 84.39999999999999 - type: max_ap value: 93.50840000000001 - type: main_score value: 93.50840000000001 - task: type: Clustering dataset: name: MTEB StackExchangeClustering (default) type: mteb/stackexchange-clustering config: default split: test revision: 6cbc1f7b2bc0622f2e39d2c77fa502909748c259 metrics: - type: v_measure value: 55.9311 - type: v_measure_std value: 5.0881 - type: main_score value: 55.9311 - task: type: Clustering dataset: name: MTEB StackExchangeClusteringP2P (default) type: mteb/stackexchange-clustering-p2p config: default split: test revision: 815ca46b2622cec33ccafc3735d572c266efdb44 metrics: - type: v_measure value: 32.9298 - type: v_measure_std value: 1.7169 - type: main_score value: 32.9298 - task: type: Reranking dataset: name: MTEB StackOverflowDupQuestions (default) type: mteb/stackoverflowdupquestions-reranking config: default split: test revision: e185fbe320c72810689fc5848eb6114e1ef5ec69 metrics: - type: map value: 51.7759 - type: mrr value: 52.7456 - type: nAUC_map_max value: 15.138499999999999 - type: nAUC_map_std value: 9.876999999999999 - type: nAUC_map_diff1 value: 37.8337 - type: nAUC_mrr_max value: 16.128600000000002 - type: nAUC_mrr_std value: 10.4175 - type: nAUC_mrr_diff1 value: 37.3753 - type: main_score value: 51.7759 - task: type: Retrieval dataset: name: MTEB StackOverflowQA (default) type: CoIR-Retrieval/stackoverflow-qa config: default split: test revision: db8f169f3894c14a00251061f957b2063eef2bd5 metrics: - type: ndcg_at_1 value: 68.205 - type: ndcg_at_3 value: 75.473 - type: ndcg_at_5 value: 77.118 - type: ndcg_at_10 value: 78.45 - type: ndcg_at_20 value: 79.181 - type: ndcg_at_100 value: 80.259 - type: ndcg_at_1000 value: 80.518 - type: map_at_1 value: 68.205 - type: map_at_3 value: 73.763 - type: map_at_5 value: 74.68299999999999 - type: map_at_10 value: 75.234 - type: map_at_20 value: 75.43900000000001 - type: map_at_100 value: 75.59 - type: map_at_1000 value: 75.599 - type: recall_at_1 value: 68.205 - type: recall_at_3 value: 80.391 - type: recall_at_5 value: 84.353 - type: recall_at_10 value: 88.465 - type: recall_at_20 value: 91.32400000000001 - type: recall_at_100 value: 97.09100000000001 - type: recall_at_1000 value: 99.14699999999999 - type: precision_at_1 value: 68.205 - type: precision_at_3 value: 26.796999999999997 - type: precision_at_5 value: 16.871 - type: precision_at_10 value: 8.847 - type: precision_at_20 value: 4.566 - type: precision_at_100 value: 0.971 - type: precision_at_1000 value: 0.099 - type: mrr_at_1 value: 68.2046 - type: mrr_at_3 value: 73.763 - type: mrr_at_5 value: 74.6832 - type: mrr_at_10 value: 75.23440000000001 - type: mrr_at_20 value: 75.4389 - type: mrr_at_100 value: 75.5901 - type: mrr_at_1000 value: 75.59909999999999 - type: nauc_ndcg_at_1_max value: 70.0997 - type: nauc_ndcg_at_1_std value: -6.6174 - type: nauc_ndcg_at_1_diff1 value: 80.8018 - type: nauc_ndcg_at_3_max value: 71.8713 - type: nauc_ndcg_at_3_std value: -5.7584 - type: nauc_ndcg_at_3_diff1 value: 76.6152 - type: nauc_ndcg_at_5_max value: 71.7906 - type: nauc_ndcg_at_5_std value: -5.6573 - type: nauc_ndcg_at_5_diff1 value: 76.6923 - type: nauc_ndcg_at_10_max value: 71.4058 - type: nauc_ndcg_at_10_std value: -4.8043000000000005 - type: nauc_ndcg_at_10_diff1 value: 76.4267 - type: nauc_ndcg_at_20_max value: 71.5511 - type: nauc_ndcg_at_20_std value: -4.8308 - type: nauc_ndcg_at_20_diff1 value: 76.49669999999999 - type: nauc_ndcg_at_100_max value: 71.5604 - type: nauc_ndcg_at_100_std value: -4.8645000000000005 - type: nauc_ndcg_at_100_diff1 value: 77.022 - type: nauc_ndcg_at_1000_max value: 71.4953 - type: nauc_ndcg_at_1000_std value: -4.8631 - type: nauc_ndcg_at_1000_diff1 value: 77.1952 - type: nauc_map_at_1_max value: 70.0997 - type: nauc_map_at_1_std value: -6.6174 - type: nauc_map_at_1_diff1 value: 80.8018 - type: nauc_map_at_3_max value: 71.46329999999999 - type: nauc_map_at_3_std value: -5.9901 - type: nauc_map_at_3_diff1 value: 77.7281 - type: nauc_map_at_5_max value: 71.4046 - type: nauc_map_at_5_std value: -5.9794 - type: nauc_map_at_5_diff1 value: 77.8163 - type: nauc_map_at_10_max value: 71.2618 - type: nauc_map_at_10_std value: -5.702999999999999 - type: nauc_map_at_10_diff1 value: 77.73780000000001 - type: nauc_map_at_20_max value: 71.30330000000001 - type: nauc_map_at_20_std value: -5.691 - type: nauc_map_at_20_diff1 value: 77.7683 - type: nauc_map_at_100_max value: 71.3035 - type: nauc_map_at_100_std value: -5.680000000000001 - type: nauc_map_at_100_diff1 value: 77.8324 - type: nauc_map_at_1000_max value: 71.3013 - type: nauc_map_at_1000_std value: -5.6772 - type: nauc_map_at_1000_diff1 value: 77.837 - type: nauc_recall_at_1_max value: 70.0997 - type: nauc_recall_at_1_std value: -6.6174 - type: nauc_recall_at_1_diff1 value: 80.8018 - type: nauc_recall_at_3_max value: 73.3015 - type: nauc_recall_at_3_std value: -4.9247 - type: nauc_recall_at_3_diff1 value: 72.6201 - type: nauc_recall_at_5_max value: 73.3818 - type: nauc_recall_at_5_std value: -4.196 - type: nauc_recall_at_5_diff1 value: 71.8984 - type: nauc_recall_at_10_max value: 71.8002 - type: nauc_recall_at_10_std value: 1.0328 - type: nauc_recall_at_10_diff1 value: 69.0552 - type: nauc_recall_at_20_max value: 72.9934 - type: nauc_recall_at_20_std value: 2.0923000000000003 - type: nauc_recall_at_20_diff1 value: 67.3481 - type: nauc_recall_at_100_max value: 76.0971 - type: nauc_recall_at_100_std value: 12.4217 - type: nauc_recall_at_100_diff1 value: 66.6112 - type: nauc_recall_at_1000_max value: 76.7462 - type: nauc_recall_at_1000_std value: 50.754200000000004 - type: nauc_recall_at_1000_diff1 value: 69.8675 - type: nauc_precision_at_1_max value: 70.0997 - type: nauc_precision_at_1_std value: -6.6174 - type: nauc_precision_at_1_diff1 value: 80.8018 - type: nauc_precision_at_3_max value: 73.3015 - type: nauc_precision_at_3_std value: -4.9247 - type: nauc_precision_at_3_diff1 value: 72.6201 - type: nauc_precision_at_5_max value: 73.3818 - type: nauc_precision_at_5_std value: -4.196 - type: nauc_precision_at_5_diff1 value: 71.8984 - type: nauc_precision_at_10_max value: 71.8002 - type: nauc_precision_at_10_std value: 1.0328 - type: nauc_precision_at_10_diff1 value: 69.0552 - type: nauc_precision_at_20_max value: 72.9934 - type: nauc_precision_at_20_std value: 2.0923000000000003 - type: nauc_precision_at_20_diff1 value: 67.3481 - type: nauc_precision_at_100_max value: 76.0971 - type: nauc_precision_at_100_std value: 12.4217 - type: nauc_precision_at_100_diff1 value: 66.6112 - type: nauc_precision_at_1000_max value: 76.7462 - type: nauc_precision_at_1000_std value: 50.754200000000004 - type: nauc_precision_at_1000_diff1 value: 69.8675 - type: nauc_mrr_at_1_max value: 70.0997 - type: nauc_mrr_at_1_std value: -6.6174 - type: nauc_mrr_at_1_diff1 value: 80.8018 - type: nauc_mrr_at_3_max value: 71.46329999999999 - type: nauc_mrr_at_3_std value: -5.9901 - type: nauc_mrr_at_3_diff1 value: 77.7281 - type: nauc_mrr_at_5_max value: 71.4046 - type: nauc_mrr_at_5_std value: -5.9794 - type: nauc_mrr_at_5_diff1 value: 77.8163 - type: nauc_mrr_at_10_max value: 71.2618 - type: nauc_mrr_at_10_std value: -5.702999999999999 - type: nauc_mrr_at_10_diff1 value: 77.73780000000001 - type: nauc_mrr_at_20_max value: 71.30330000000001 - type: nauc_mrr_at_20_std value: -5.691 - type: nauc_mrr_at_20_diff1 value: 77.7683 - type: nauc_mrr_at_100_max value: 71.3035 - type: nauc_mrr_at_100_std value: -5.680000000000001 - type: nauc_mrr_at_100_diff1 value: 77.8324 - type: nauc_mrr_at_1000_max value: 71.3013 - type: nauc_mrr_at_1000_std value: -5.6772 - type: nauc_mrr_at_1000_diff1 value: 77.837 - type: main_score value: 78.45 - task: type: Summarization dataset: name: MTEB SummEval (default) type: mteb/summeval config: default split: test revision: cda12ad7615edc362dbf25a00fdd61d3b1eaf93c metrics: - type: pearson value: 31.7097 - type: spearman value: 32.0256 - type: cosine_spearman value: 32.0256 - type: cosine_pearson value: 31.7097 - type: dot_spearman value: 32.0256 - type: dot_pearson value: 31.7097 - type: main_score value: 32.0256 - task: type: Retrieval dataset: name: MTEB SyntheticText2SQL (default) type: CoIR-Retrieval/synthetic-text2sql config: default split: test revision: 686b87296c3a0191b5d9415a00526c62db9fce09 metrics: - type: ndcg_at_1 value: 3.5549999999999997 - type: ndcg_at_3 value: 41.534 - type: ndcg_at_5 value: 44.847 - type: ndcg_at_10 value: 47.344 - type: ndcg_at_20 value: 48.826 - type: ndcg_at_100 value: 50.442 - type: ndcg_at_1000 value: 50.937 - type: map_at_1 value: 3.5549999999999997 - type: map_at_3 value: 33.083 - type: map_at_5 value: 34.928 - type: map_at_10 value: 35.964 - type: map_at_20 value: 36.376 - type: map_at_100 value: 36.61 - type: map_at_1000 value: 36.63 - type: recall_at_1 value: 3.5549999999999997 - type: recall_at_3 value: 65.63 - type: recall_at_5 value: 73.646 - type: recall_at_10 value: 81.337 - type: recall_at_20 value: 87.165 - type: recall_at_100 value: 95.71 - type: recall_at_1000 value: 99.556 - type: precision_at_1 value: 3.5549999999999997 - type: precision_at_3 value: 21.877 - type: precision_at_5 value: 14.729000000000001 - type: precision_at_10 value: 8.134 - type: precision_at_20 value: 4.358 - type: precision_at_100 value: 0.9570000000000001 - type: precision_at_1000 value: 0.1 - type: mrr_at_1 value: 31.721100000000003 - type: mrr_at_3 value: 48.6754 - type: mrr_at_5 value: 50.3093 - type: mrr_at_10 value: 51.2454 - type: mrr_at_20 value: 51.629999999999995 - type: mrr_at_100 value: 51.8552 - type: mrr_at_1000 value: 51.8747 - type: nauc_ndcg_at_1_max value: 6.543 - type: nauc_ndcg_at_1_std value: -11.0614 - type: nauc_ndcg_at_1_diff1 value: 77.4191 - type: nauc_ndcg_at_3_max value: 35.9842 - type: nauc_ndcg_at_3_std value: -16.258200000000002 - type: nauc_ndcg_at_3_diff1 value: -62.2219 - type: nauc_ndcg_at_5_max value: 35.0885 - type: nauc_ndcg_at_5_std value: -14.935699999999999 - type: nauc_ndcg_at_5_diff1 value: -58.3931 - type: nauc_ndcg_at_10_max value: 33.7926 - type: nauc_ndcg_at_10_std value: -14.2862 - type: nauc_ndcg_at_10_diff1 value: -55.5325 - type: nauc_ndcg_at_20_max value: 33.631899999999995 - type: nauc_ndcg_at_20_std value: -14.061499999999999 - type: nauc_ndcg_at_20_diff1 value: -53.7148 - type: nauc_ndcg_at_100_max value: 32.736900000000006 - type: nauc_ndcg_at_100_std value: -13.7486 - type: nauc_ndcg_at_100_diff1 value: -52.0744 - type: nauc_ndcg_at_1000_max value: 32.941500000000005 - type: nauc_ndcg_at_1000_std value: -14.186099999999998 - type: nauc_ndcg_at_1000_diff1 value: -51.6402 - type: nauc_map_at_1_max value: 6.543 - type: nauc_map_at_1_std value: -11.0614 - type: nauc_map_at_1_diff1 value: 77.4191 - type: nauc_map_at_3_max value: 33.901399999999995 - type: nauc_map_at_3_std value: -15.789 - type: nauc_map_at_3_diff1 value: -53.5257 - type: nauc_map_at_5_max value: 33.1725 - type: nauc_map_at_5_std value: -14.948400000000001 - type: nauc_map_at_5_diff1 value: -50.5361 - type: nauc_map_at_10_max value: 32.5273 - type: nauc_map_at_10_std value: -14.648 - type: nauc_map_at_10_diff1 value: -48.928 - type: nauc_map_at_20_max value: 32.4474 - type: nauc_map_at_20_std value: -14.6155 - type: nauc_map_at_20_diff1 value: -48.2673 - type: nauc_map_at_100_max value: 32.2692 - type: nauc_map_at_100_std value: -14.5789 - type: nauc_map_at_100_diff1 value: -47.9677 - type: nauc_map_at_1000_max value: 32.2805 - type: nauc_map_at_1000_std value: -14.594999999999999 - type: nauc_map_at_1000_diff1 value: -47.944700000000005 - type: nauc_recall_at_1_max value: 6.543 - type: nauc_recall_at_1_std value: -11.0614 - type: nauc_recall_at_1_diff1 value: 77.4191 - type: nauc_recall_at_3_max value: 39.704899999999995 - type: nauc_recall_at_3_std value: -17.1274 - type: nauc_recall_at_3_diff1 value: -77.3937 - type: nauc_recall_at_5_max value: 38.8786 - type: nauc_recall_at_5_std value: -14.7304 - type: nauc_recall_at_5_diff1 value: -73.366 - type: nauc_recall_at_10_max value: 36.2642 - type: nauc_recall_at_10_std value: -12.828800000000001 - type: nauc_recall_at_10_diff1 value: -69.7955 - type: nauc_recall_at_20_max value: 36.5493 - type: nauc_recall_at_20_std value: -10.9359 - type: nauc_recall_at_20_diff1 value: -66.8099 - type: nauc_recall_at_100_max value: 29.1291 - type: nauc_recall_at_100_std value: 0.3365 - type: nauc_recall_at_100_diff1 value: -63.8938 - type: nauc_recall_at_1000_max value: 37.589800000000004 - type: nauc_recall_at_1000_std value: 17.3579 - type: nauc_recall_at_1000_diff1 value: -68.429 - type: nauc_precision_at_1_max value: 6.543 - type: nauc_precision_at_1_std value: -11.0614 - type: nauc_precision_at_1_diff1 value: 77.4191 - type: nauc_precision_at_3_max value: 39.704899999999995 - type: nauc_precision_at_3_std value: -17.1274 - type: nauc_precision_at_3_diff1 value: -77.3937 - type: nauc_precision_at_5_max value: 38.8786 - type: nauc_precision_at_5_std value: -14.7304 - type: nauc_precision_at_5_diff1 value: -73.366 - type: nauc_precision_at_10_max value: 36.2642 - type: nauc_precision_at_10_std value: -12.828800000000001 - type: nauc_precision_at_10_diff1 value: -69.7955 - type: nauc_precision_at_20_max value: 36.5493 - type: nauc_precision_at_20_std value: -10.9359 - type: nauc_precision_at_20_diff1 value: -66.8099 - type: nauc_precision_at_100_max value: 29.1291 - type: nauc_precision_at_100_std value: 0.3365 - type: nauc_precision_at_100_diff1 value: -63.8938 - type: nauc_precision_at_1000_max value: 37.589800000000004 - type: nauc_precision_at_1000_std value: 17.3579 - type: nauc_precision_at_1000_diff1 value: -68.429 - type: nauc_mrr_at_1_max value: 18.7616 - type: nauc_mrr_at_1_std value: -9.332600000000001 - type: nauc_mrr_at_1_diff1 value: -38.775 - type: nauc_mrr_at_3_max value: 27.9627 - type: nauc_mrr_at_3_std value: -12.1163 - type: nauc_mrr_at_3_diff1 value: -56.172900000000006 - type: nauc_mrr_at_5_max value: 27.385900000000003 - type: nauc_mrr_at_5_std value: -11.7823 - type: nauc_mrr_at_5_diff1 value: -55.085300000000004 - type: nauc_mrr_at_10_max value: 26.9297 - type: nauc_mrr_at_10_std value: -11.5899 - type: nauc_mrr_at_10_diff1 value: -54.352900000000005 - type: nauc_mrr_at_20_max value: 26.8231 - type: nauc_mrr_at_20_std value: -11.5438 - type: nauc_mrr_at_20_diff1 value: -54.101 - type: nauc_mrr_at_100_max value: 26.6888 - type: nauc_mrr_at_100_std value: -11.5184 - type: nauc_mrr_at_100_diff1 value: -53.9839 - type: nauc_mrr_at_1000_max value: 26.691399999999998 - type: nauc_mrr_at_1000_std value: -11.5244 - type: nauc_mrr_at_1000_diff1 value: -53.976 - type: main_score value: 47.344 - task: type: Retrieval dataset: name: MTEB TRECCOVID (default) type: mteb/trec-covid config: default split: test revision: bb9466bac8153a0349341eb1b22e06409e78ef4e metrics: - type: ndcg_at_1 value: 70.0 - type: ndcg_at_3 value: 70.877 - type: ndcg_at_5 value: 70.735 - type: ndcg_at_10 value: 68.573 - type: ndcg_at_20 value: 65.635 - type: ndcg_at_100 value: 53.501 - type: ndcg_at_1000 value: 49.288 - type: map_at_1 value: 0.207 - type: map_at_3 value: 0.551 - type: map_at_5 value: 0.8909999999999999 - type: map_at_10 value: 1.635 - type: map_at_20 value: 2.952 - type: map_at_100 value: 9.713 - type: map_at_1000 value: 24.064 - type: recall_at_1 value: 0.207 - type: recall_at_3 value: 0.602 - type: recall_at_5 value: 0.992 - type: recall_at_10 value: 1.9009999999999998 - type: recall_at_20 value: 3.5709999999999997 - type: recall_at_100 value: 13.297999999999998 - type: recall_at_1000 value: 47.067 - type: precision_at_1 value: 80.0 - type: precision_at_3 value: 76.667 - type: precision_at_5 value: 76.4 - type: precision_at_10 value: 73.2 - type: precision_at_20 value: 70.1 - type: precision_at_100 value: 55.04 - type: precision_at_1000 value: 22.046 - type: mrr_at_1 value: 80.0 - type: mrr_at_3 value: 88.66669999999999 - type: mrr_at_5 value: 89.16669999999999 - type: mrr_at_10 value: 89.16669999999999 - type: mrr_at_20 value: 89.16669999999999 - type: mrr_at_100 value: 89.16669999999999 - type: mrr_at_1000 value: 89.16669999999999 - type: nauc_ndcg_at_1_max value: 9.0505 - type: nauc_ndcg_at_1_std value: 17.7341 - type: nauc_ndcg_at_1_diff1 value: -17.272399999999998 - type: nauc_ndcg_at_3_max value: 27.3702 - type: nauc_ndcg_at_3_std value: 43.432500000000005 - type: nauc_ndcg_at_3_diff1 value: -5.716600000000001 - type: nauc_ndcg_at_5_max value: 24.6447 - type: nauc_ndcg_at_5_std value: 48.0114 - type: nauc_ndcg_at_5_diff1 value: -7.0447999999999995 - type: nauc_ndcg_at_10_max value: 31.5589 - type: nauc_ndcg_at_10_std value: 60.242 - type: nauc_ndcg_at_10_diff1 value: -4.827 - type: nauc_ndcg_at_20_max value: 39.195600000000006 - type: nauc_ndcg_at_20_std value: 67.9313 - type: nauc_ndcg_at_20_diff1 value: -10.0317 - type: nauc_ndcg_at_100_max value: 43.8896 - type: nauc_ndcg_at_100_std value: 76.6623 - type: nauc_ndcg_at_100_diff1 value: -14.7694 - type: nauc_ndcg_at_1000_max value: 46.935 - type: nauc_ndcg_at_1000_std value: 79.9247 - type: nauc_ndcg_at_1000_diff1 value: -12.9885 - type: nauc_map_at_1_max value: 5.587899999999999 - type: nauc_map_at_1_std value: -6.5333000000000006 - type: nauc_map_at_1_diff1 value: 7.8414 - type: nauc_map_at_3_max value: 14.21 - type: nauc_map_at_3_std value: 7.9614 - type: nauc_map_at_3_diff1 value: 11.9467 - type: nauc_map_at_5_max value: 14.514299999999999 - type: nauc_map_at_5_std value: 10.6974 - type: nauc_map_at_5_diff1 value: 11.732800000000001 - type: nauc_map_at_10_max value: 17.5629 - type: nauc_map_at_10_std value: 21.4707 - type: nauc_map_at_10_diff1 value: 10.9138 - type: nauc_map_at_20_max value: 23.891399999999997 - type: nauc_map_at_20_std value: 32.5254 - type: nauc_map_at_20_diff1 value: 5.6072999999999995 - type: nauc_map_at_100_max value: 37.247 - type: nauc_map_at_100_std value: 66.2197 - type: nauc_map_at_100_diff1 value: -6.0896 - type: nauc_map_at_1000_max value: 51.590599999999995 - type: nauc_map_at_1000_std value: 83.3358 - type: nauc_map_at_1000_diff1 value: -18.7689 - type: nauc_recall_at_1_max value: 5.587899999999999 - type: nauc_recall_at_1_std value: -6.5333000000000006 - type: nauc_recall_at_1_diff1 value: 7.8414 - type: nauc_recall_at_3_max value: 10.6036 - type: nauc_recall_at_3_std value: 8.7269 - type: nauc_recall_at_3_diff1 value: 13.296 - type: nauc_recall_at_5_max value: 9.3121 - type: nauc_recall_at_5_std value: 9.9978 - type: nauc_recall_at_5_diff1 value: 12.5994 - type: nauc_recall_at_10_max value: 10.0265 - type: nauc_recall_at_10_std value: 16.8073 - type: nauc_recall_at_10_diff1 value: 10.8776 - type: nauc_recall_at_20_max value: 16.3788 - type: nauc_recall_at_20_std value: 23.7003 - type: nauc_recall_at_20_diff1 value: 7.832 - type: nauc_recall_at_100_max value: 25.289 - type: nauc_recall_at_100_std value: 51.6757 - type: nauc_recall_at_100_diff1 value: 0.4044 - type: nauc_recall_at_1000_max value: 42.1531 - type: nauc_recall_at_1000_std value: 72.10419999999999 - type: nauc_recall_at_1000_diff1 value: -12.410499999999999 - type: nauc_precision_at_1_max value: 31.203799999999998 - type: nauc_precision_at_1_std value: 23.1918 - type: nauc_precision_at_1_diff1 value: -32.057900000000004 - type: nauc_precision_at_3_max value: 40.368300000000005 - type: nauc_precision_at_3_std value: 50.225699999999996 - type: nauc_precision_at_3_diff1 value: -2.2047 - type: nauc_precision_at_5_max value: 29.592200000000002 - type: nauc_precision_at_5_std value: 49.6822 - type: nauc_precision_at_5_diff1 value: -4.1202000000000005 - type: nauc_precision_at_10_max value: 41.876400000000004 - type: nauc_precision_at_10_std value: 67.3955 - type: nauc_precision_at_10_diff1 value: 1.8023 - type: nauc_precision_at_20_max value: 49.011500000000005 - type: nauc_precision_at_20_std value: 72.0322 - type: nauc_precision_at_20_diff1 value: -8.0818 - type: nauc_precision_at_100_max value: 49.385200000000005 - type: nauc_precision_at_100_std value: 79.20660000000001 - type: nauc_precision_at_100_diff1 value: -12.9969 - type: nauc_precision_at_1000_max value: 41.5596 - type: nauc_precision_at_1000_std value: 51.89470000000001 - type: nauc_precision_at_1000_diff1 value: -24.5507 - type: nauc_mrr_at_1_max value: 31.203799999999998 - type: nauc_mrr_at_1_std value: 23.1918 - type: nauc_mrr_at_1_diff1 value: -32.057900000000004 - type: nauc_mrr_at_3_max value: 37.7018 - type: nauc_mrr_at_3_std value: 31.9141 - type: nauc_mrr_at_3_diff1 value: -22.4835 - type: nauc_mrr_at_5_max value: 35.284 - type: nauc_mrr_at_5_std value: 28.569899999999997 - type: nauc_mrr_at_5_diff1 value: -26.309700000000003 - type: nauc_mrr_at_10_max value: 35.284 - type: nauc_mrr_at_10_std value: 28.569899999999997 - type: nauc_mrr_at_10_diff1 value: -26.309700000000003 - type: nauc_mrr_at_20_max value: 35.284 - type: nauc_mrr_at_20_std value: 28.569899999999997 - type: nauc_mrr_at_20_diff1 value: -26.309700000000003 - type: nauc_mrr_at_100_max value: 35.284 - type: nauc_mrr_at_100_std value: 28.569899999999997 - type: nauc_mrr_at_100_diff1 value: -26.309700000000003 - type: nauc_mrr_at_1000_max value: 35.284 - type: nauc_mrr_at_1000_std value: 28.569899999999997 - type: nauc_mrr_at_1000_diff1 value: -26.309700000000003 - type: main_score value: 68.573 - task: type: Retrieval dataset: name: MTEB Touche2020 (default) type: mteb/touche2020 config: default split: test revision: a34f9a33db75fa0cbb21bb5cfc3dae8dc8bec93f metrics: - type: ndcg_at_1 value: 41.837 - type: ndcg_at_3 value: 34.675 - type: ndcg_at_5 value: 30.017 - type: ndcg_at_10 value: 27.306 - type: ndcg_at_20 value: 27.009 - type: ndcg_at_100 value: 38.037 - type: ndcg_at_1000 value: 49.413000000000004 - type: map_at_1 value: 3.304 - type: map_at_3 value: 6.0569999999999995 - type: map_at_5 value: 7.856000000000001 - type: map_at_10 value: 10.869 - type: map_at_20 value: 12.824 - type: map_at_100 value: 16.631999999999998 - type: map_at_1000 value: 18.138 - type: recall_at_1 value: 3.304 - type: recall_at_3 value: 7.13 - type: recall_at_5 value: 9.995999999999999 - type: recall_at_10 value: 16.766000000000002 - type: recall_at_20 value: 22.933 - type: recall_at_100 value: 47.427 - type: recall_at_1000 value: 81.527 - type: precision_at_1 value: 42.857 - type: precision_at_3 value: 35.374 - type: precision_at_5 value: 28.163 - type: precision_at_10 value: 23.061 - type: precision_at_20 value: 16.633 - type: precision_at_100 value: 7.632999999999999 - type: precision_at_1000 value: 1.51 - type: mrr_at_1 value: 42.857099999999996 - type: mrr_at_3 value: 54.4218 - type: mrr_at_5 value: 54.4218 - type: mrr_at_10 value: 56.431 - type: mrr_at_20 value: 56.880900000000004 - type: mrr_at_100 value: 57.0526 - type: mrr_at_1000 value: 57.0526 - type: nauc_ndcg_at_1_max value: -44.2104 - type: nauc_ndcg_at_1_std value: -2.3875 - type: nauc_ndcg_at_1_diff1 value: -23.4197 - type: nauc_ndcg_at_3_max value: -40.1986 - type: nauc_ndcg_at_3_std value: -4.3845 - type: nauc_ndcg_at_3_diff1 value: -26.881100000000004 - type: nauc_ndcg_at_5_max value: -37.8693 - type: nauc_ndcg_at_5_std value: -5.817 - type: nauc_ndcg_at_5_diff1 value: -30.292599999999997 - type: nauc_ndcg_at_10_max value: -35.0514 - type: nauc_ndcg_at_10_std value: -12.628 - type: nauc_ndcg_at_10_diff1 value: -28.5171 - type: nauc_ndcg_at_20_max value: -36.829499999999996 - type: nauc_ndcg_at_20_std value: -10.9047 - type: nauc_ndcg_at_20_diff1 value: -25.590200000000003 - type: nauc_ndcg_at_100_max value: -33.1224 - type: nauc_ndcg_at_100_std value: 14.3094 - type: nauc_ndcg_at_100_diff1 value: -17.6544 - type: nauc_ndcg_at_1000_max value: -30.8819 - type: nauc_ndcg_at_1000_std value: 22.3523 - type: nauc_ndcg_at_1000_diff1 value: -19.5741 - type: nauc_map_at_1_max value: -38.6863 - type: nauc_map_at_1_std value: -15.0366 - type: nauc_map_at_1_diff1 value: -8.5063 - type: nauc_map_at_3_max value: -38.9161 - type: nauc_map_at_3_std value: -16.71 - type: nauc_map_at_3_diff1 value: -21.3221 - type: nauc_map_at_5_max value: -35.0036 - type: nauc_map_at_5_std value: -18.4668 - type: nauc_map_at_5_diff1 value: -27.6758 - type: nauc_map_at_10_max value: -29.7816 - type: nauc_map_at_10_std value: -20.890900000000002 - type: nauc_map_at_10_diff1 value: -27.380100000000002 - type: nauc_map_at_20_max value: -29.3362 - type: nauc_map_at_20_std value: -18.9281 - type: nauc_map_at_20_diff1 value: -27.058500000000002 - type: nauc_map_at_100_max value: -27.9555 - type: nauc_map_at_100_std value: -7.222 - type: nauc_map_at_100_diff1 value: -22.7849 - type: nauc_map_at_1000_max value: -26.954 - type: nauc_map_at_1000_std value: -4.0097000000000005 - type: nauc_map_at_1000_diff1 value: -22.855 - type: nauc_recall_at_1_max value: -38.6863 - type: nauc_recall_at_1_std value: -15.0366 - type: nauc_recall_at_1_diff1 value: -8.5063 - type: nauc_recall_at_3_max value: -42.2532 - type: nauc_recall_at_3_std value: -20.399 - type: nauc_recall_at_3_diff1 value: -23.8415 - type: nauc_recall_at_5_max value: -35.3457 - type: nauc_recall_at_5_std value: -20.0969 - type: nauc_recall_at_5_diff1 value: -29.5907 - type: nauc_recall_at_10_max value: -31.7181 - type: nauc_recall_at_10_std value: -22.9559 - type: nauc_recall_at_10_diff1 value: -22.564400000000003 - type: nauc_recall_at_20_max value: -34.5273 - type: nauc_recall_at_20_std value: -15.6335 - type: nauc_recall_at_20_diff1 value: -22.9889 - type: nauc_recall_at_100_max value: -28.2509 - type: nauc_recall_at_100_std value: 30.481399999999997 - type: nauc_recall_at_100_diff1 value: -6.9437999999999995 - type: nauc_recall_at_1000_max value: -12.5952 - type: nauc_recall_at_1000_std value: 69.9957 - type: nauc_recall_at_1000_diff1 value: 2.2129 - type: nauc_precision_at_1_max value: -45.3657 - type: nauc_precision_at_1_std value: -4.4435 - type: nauc_precision_at_1_diff1 value: -18.6647 - type: nauc_precision_at_3_max value: -39.1078 - type: nauc_precision_at_3_std value: -8.047600000000001 - type: nauc_precision_at_3_diff1 value: -27.322200000000002 - type: nauc_precision_at_5_max value: -32.8848 - type: nauc_precision_at_5_std value: -8.5508 - type: nauc_precision_at_5_diff1 value: -31.567600000000002 - type: nauc_precision_at_10_max value: -28.719499999999996 - type: nauc_precision_at_10_std value: -14.498800000000001 - type: nauc_precision_at_10_diff1 value: -27.8402 - type: nauc_precision_at_20_max value: -26.466 - type: nauc_precision_at_20_std value: 3.3133000000000004 - type: nauc_precision_at_20_diff1 value: -31.5367 - type: nauc_precision_at_100_max value: -5.4186 - type: nauc_precision_at_100_std value: 61.58709999999999 - type: nauc_precision_at_100_diff1 value: -8.8049 - type: nauc_precision_at_1000_max value: 37.745400000000004 - type: nauc_precision_at_1000_std value: 48.7776 - type: nauc_precision_at_1000_diff1 value: 6.4595 - type: nauc_mrr_at_1_max value: -45.3657 - type: nauc_mrr_at_1_std value: -4.4435 - type: nauc_mrr_at_1_diff1 value: -18.6647 - type: nauc_mrr_at_3_max value: -52.9035 - type: nauc_mrr_at_3_std value: -13.174800000000001 - type: nauc_mrr_at_3_diff1 value: -20.045299999999997 - type: nauc_mrr_at_5_max value: -52.9035 - type: nauc_mrr_at_5_std value: -13.174800000000001 - type: nauc_mrr_at_5_diff1 value: -20.045299999999997 - type: nauc_mrr_at_10_max value: -51.358599999999996 - type: nauc_mrr_at_10_std value: -11.266 - type: nauc_mrr_at_10_diff1 value: -19.4274 - type: nauc_mrr_at_20_max value: -51.648799999999994 - type: nauc_mrr_at_20_std value: -10.9663 - type: nauc_mrr_at_20_diff1 value: -19.5931 - type: nauc_mrr_at_100_max value: -51.669200000000004 - type: nauc_mrr_at_100_std value: -10.9424 - type: nauc_mrr_at_100_diff1 value: -19.7412 - type: nauc_mrr_at_1000_max value: -51.669200000000004 - type: nauc_mrr_at_1000_std value: -10.9424 - type: nauc_mrr_at_1000_diff1 value: -19.7412 - type: main_score value: 27.306 - task: type: Classification dataset: name: MTEB ToxicConversationsClassification (default) type: mteb/toxic_conversations_50k config: default split: test revision: edfaf9da55d3dd50d43143d90c1ac476895ae6de metrics: - type: accuracy value: 62.480500000000006 - type: f1 value: 48.201100000000004 - type: f1_weighted value: 70.8591 - type: ap value: 10.9948 - type: ap_weighted value: 10.9948 - type: main_score value: 62.480500000000006 - task: type: Classification dataset: name: MTEB TweetSentimentExtractionClassification (default) type: mteb/tweet_sentiment_extraction config: default split: test revision: d604517c81ca91fe16a244d1248fc021f9ecee7a metrics: - type: accuracy value: 58.3616 - type: f1 value: 58.5596 - type: f1_weighted value: 57.801 - type: main_score value: 58.3616 - task: type: Clustering dataset: name: MTEB TwentyNewsgroupsClustering (default) type: mteb/twentynewsgroups-clustering config: default split: test revision: 6125ec4e24fa026cec8a478383ee943acfbd5449 metrics: - type: v_measure value: 38.6199 - type: v_measure_std value: 2.3855999999999997 - type: main_score value: 38.6199 - task: type: PairClassification dataset: name: MTEB TwitterSemEval2015 (default) type: mteb/twittersemeval2015-pairclassification config: default split: test revision: 70970daeab8776df92f5ea462b6173c0b46fd2d1 metrics: - type: similarity_accuracy value: 82.9886 - type: similarity_accuracy_threshold value: 86.3901 - type: similarity_f1 value: 60.866200000000006 - type: similarity_f1_threshold value: 83.9821 - type: similarity_precision value: 59.333499999999994 - type: similarity_recall value: 62.480199999999996 - type: similarity_ap value: 64.413 - type: cosine_accuracy value: 82.9886 - type: cosine_accuracy_threshold value: 86.3901 - type: cosine_f1 value: 60.866200000000006 - type: cosine_f1_threshold value: 83.9821 - type: cosine_precision value: 59.333499999999994 - type: cosine_recall value: 62.480199999999996 - type: cosine_ap value: 64.413 - type: manhattan_accuracy value: 82.9409 - type: manhattan_accuracy_threshold value: 1144.7468000000001 - type: manhattan_f1 value: 60.760400000000004 - type: manhattan_f1_threshold value: 1291.7232999999999 - type: manhattan_precision value: 54.7126 - type: manhattan_recall value: 68.3113 - type: manhattan_ap value: 64.3592 - type: euclidean_accuracy value: 82.9886 - type: euclidean_accuracy_threshold value: 52.1726 - type: euclidean_f1 value: 60.866200000000006 - type: euclidean_f1_threshold value: 56.6001 - type: euclidean_precision value: 59.333499999999994 - type: euclidean_recall value: 62.480199999999996 - type: euclidean_ap value: 64.4131 - type: dot_accuracy value: 82.9886 - type: dot_accuracy_threshold value: 86.3901 - type: dot_f1 value: 60.866200000000006 - type: dot_f1_threshold value: 83.9821 - type: dot_precision value: 59.333499999999994 - type: dot_recall value: 62.480199999999996 - type: dot_ap value: 64.413 - type: max_accuracy value: 82.9886 - type: max_f1 value: 60.866200000000006 - type: max_precision value: 59.333499999999994 - type: max_recall value: 68.3113 - type: max_ap value: 64.4131 - type: main_score value: 64.4131 - task: type: PairClassification dataset: name: MTEB TwitterURLCorpus (default) type: mteb/twitterurlcorpus-pairclassification config: default split: test revision: 8b6510b0b1fa4e4c4f879467980e9be563ec1cdf metrics: - type: similarity_accuracy value: 88.95100000000001 - type: similarity_accuracy_threshold value: 82.18520000000001 - type: similarity_f1 value: 77.9051 - type: similarity_f1_threshold value: 80.3369 - type: similarity_precision value: 76.07310000000001 - type: similarity_recall value: 79.8275 - type: similarity_ap value: 86.1545 - type: cosine_accuracy value: 88.95100000000001 - type: cosine_accuracy_threshold value: 82.18520000000001 - type: cosine_f1 value: 77.9051 - type: cosine_f1_threshold value: 80.3369 - type: cosine_precision value: 76.07310000000001 - type: cosine_recall value: 79.8275 - type: cosine_ap value: 86.1545 - type: manhattan_accuracy value: 88.9277 - type: manhattan_accuracy_threshold value: 1338.2836 - type: manhattan_f1 value: 77.8186 - type: manhattan_f1_threshold value: 1372.5978 - type: manhattan_precision value: 76.5745 - type: manhattan_recall value: 79.1038 - type: manhattan_ap value: 86.114 - type: euclidean_accuracy value: 88.95100000000001 - type: euclidean_accuracy_threshold value: 59.6905 - type: euclidean_f1 value: 77.9051 - type: euclidean_f1_threshold value: 62.71060000000001 - type: euclidean_precision value: 76.07310000000001 - type: euclidean_recall value: 79.8275 - type: euclidean_ap value: 86.1544 - type: dot_accuracy value: 88.95100000000001 - type: dot_accuracy_threshold value: 82.18520000000001 - type: dot_f1 value: 77.9051 - type: dot_f1_threshold value: 80.3369 - type: dot_precision value: 76.07310000000001 - type: dot_recall value: 79.8275 - type: dot_ap value: 86.1544 - type: max_accuracy value: 88.95100000000001 - type: max_f1 value: 77.9051 - type: max_precision value: 76.5745 - type: max_recall value: 79.8275 - type: max_ap value: 86.1545 - type: main_score value: 86.1545 --- # hongkeon/granite-embedding-278m-multilingual-Q8_0-GGUF This model was converted to GGUF format from [`ibm-granite/granite-embedding-278m-multilingual`](https://huggingface.co/ibm-granite/granite-embedding-278m-multilingual) using llama.cpp via the ggml.ai's [GGUF-my-repo](https://huggingface.co/spaces/ggml-org/gguf-my-repo) space. Refer to the [original model card](https://huggingface.co/ibm-granite/granite-embedding-278m-multilingual) for more details on the model. ## Use with llama.cpp Install llama.cpp through brew (works on Mac and Linux) ```bash brew install llama.cpp ``` Invoke the llama.cpp server or the CLI. ### CLI: ```bash llama-cli --hf-repo hongkeon/granite-embedding-278m-multilingual-Q8_0-GGUF --hf-file granite-embedding-278m-multilingual-q8_0.gguf -p "The meaning to life and the universe is" ``` ### Server: ```bash llama-server --hf-repo hongkeon/granite-embedding-278m-multilingual-Q8_0-GGUF --hf-file granite-embedding-278m-multilingual-q8_0.gguf -c 2048 ``` Note: You can also use this checkpoint directly through the [usage steps](https://github.com/ggerganov/llama.cpp?tab=readme-ov-file#usage) listed in the Llama.cpp repo as well. Step 1: Clone llama.cpp from GitHub. ``` git clone https://github.com/ggerganov/llama.cpp ``` Step 2: Move into the llama.cpp folder and build it with `LLAMA_CURL=1` flag along with other hardware-specific flags (for ex: LLAMA_CUDA=1 for Nvidia GPUs on Linux). ``` cd llama.cpp && LLAMA_CURL=1 make ``` Step 3: Run inference through the main binary. ``` ./llama-cli --hf-repo hongkeon/granite-embedding-278m-multilingual-Q8_0-GGUF --hf-file granite-embedding-278m-multilingual-q8_0.gguf -p "The meaning to life and the universe is" ``` or ``` ./llama-server --hf-repo hongkeon/granite-embedding-278m-multilingual-Q8_0-GGUF --hf-file granite-embedding-278m-multilingual-q8_0.gguf -c 2048 ```
[ "SUMMARIZATION" ]
[ "BIOSSES", "SCIFACT" ]
XO-Appleton/opus-mt-zh-en-finetuned
XO-Appleton
translation
[ "transformers", "pytorch", "marian", "text2text-generation", "translation", "zh", "en", "dataset:bigbio/paramed", "autotrain_compatible", "endpoints_compatible", "region:us" ]
2023-10-27T06:09:59
2023-10-27T06:28:42
135
0
--- datasets: - bigbio/paramed language: - zh - en metrics: - sacrebleu - bleu pipeline_tag: translation --- Finetuned pre-trained MarianMT model from the Research Group at the University of Helsinki. Finetuned on ParaMed Zh-En parallel corpus.
[ "TRANSLATION" ]
[ "PARAMED" ]
knowledgator/modern-gliner-bi-base-v1.0
knowledgator
token-classification
[ "gliner", "pytorch", "NER", "GLiNER", "information extraction", "encoder", "entity recognition", "modernbert", "token-classification", "en", "dataset:urchade/pile-mistral-v0.1", "dataset:numind/NuNER", "dataset:knowledgator/GLINER-multi-task-synthetic-data", "arxiv:2412.13663", "arxiv:2311.08526", "arxiv:2406.12925", "base_model:BAAI/bge-small-en-v1.5", "base_model:finetune:BAAI/bge-small-en-v1.5", "license:apache-2.0", "region:us" ]
2024-12-24T10:54:53
2025-01-07T11:11:08
135
25
--- base_model: - answerdotai/ModernBERT-base - BAAI/bge-small-en-v1.5 datasets: - urchade/pile-mistral-v0.1 - numind/NuNER - knowledgator/GLINER-multi-task-synthetic-data language: - en library_name: gliner license: apache-2.0 pipeline_tag: token-classification tags: - NER - GLiNER - information extraction - encoder - entity recognition - modernbert --- # About GLiNER is a Named Entity Recognition (NER) model capable of identifying any entity type using a bidirectional transformer encoders (BERT-like). It provides a practical alternative to traditional NER models, which are limited to predefined entities, and Large Language Models (LLMs) that, despite their flexibility, are costly and large for resource-constrained scenarios. This particular version utilize bi-encoder architecture, where textual encoder is [ModernBERT-base](https://huggingface.co/answerdotai/ModernBERT-base) and entity label encoder is sentence transformer - [BGE-small-en](https://huggingface.co/BAAI/bge-small-en-v1.5). Such architecture brings several advantages over uni-encoder GLiNER: * An unlimited amount of entities can be recognized at a single time; * Faster inference if entity embeddings are preprocessed; * Better generalization to unseen entities; Utilization of ModernBERT uncovers up to 3 times better efficiency in comparison to DeBERTa-based models and context length up to 8,192 tokens while demonstrating comparable results. ![inference time comparison](modernbert_inference_time.png "Inference time comparison") However, bi-encoder architecture has some drawbacks such as a lack of inter-label interactions that make it hard for the model to disambiguate semantically similar but contextually different entities. ### Installation & Usage Install or update the gliner package: ```bash pip install gliner -U ``` You need to install the latest version of transformers to use this model: ```bash pip install git+https://github.com/huggingface/transformers.git ``` Once you've downloaded the GLiNER library, you can import the GLiNER class. You can then load this model using `GLiNER.from_pretrained` and predict entities with `predict_entities`. ```python from gliner import GLiNER model = GLiNER.from_pretrained("knowledgator/modern-gliner-bi-base-v1.0") text = """ Cristiano Ronaldo dos Santos Aveiro (Portuguese pronunciation: [kɾiʃˈtjɐnu ʁɔˈnaldu]; born 5 February 1985) is a Portuguese professional footballer who plays as a forward for and captains both Saudi Pro League club Al Nassr and the Portugal national team. Widely regarded as one of the greatest players of all time, Ronaldo has won five Ballon d'Or awards,[note 3] a record three UEFA Men's Player of the Year Awards, and four European Golden Shoes, the most by a European player. He has won 33 trophies in his career, including seven league titles, five UEFA Champions Leagues, the UEFA European Championship and the UEFA Nations League. Ronaldo holds the records for most appearances (183), goals (140) and assists (42) in the Champions League, goals in the European Championship (14), international goals (128) and international appearances (205). He is one of the few players to have made over 1,200 professional career appearances, the most by an outfield player, and has scored over 850 official senior career goals for club and country, making him the top goalscorer of all time. """ labels = ["person", "award", "date", "competitions", "teams"] entities = model.predict_entities(text, labels, threshold=0.3) for entity in entities: print(entity["text"], "=>", entity["label"]) ``` ``` Cristiano Ronaldo dos Santos Aveiro => person 5 February 1985 => date Al Nassr => teams Portugal national team => teams Ballon d'Or => award UEFA Men's Player of the Year Awards => award European Golden Shoes => award UEFA Champions Leagues => competitions UEFA European Championship => competitions UEFA Nations League => competitions Champions League => competitions European Championship => competitions ``` If you want to use **flash attention** or increase sequence length, please, check the following code: Firstly, install flash attention and triton packages: ```bash pip install flash-attn triton ``` ```python model = GLiNER.from_pretrained("knowledgator/modern-gliner-bi-base-v1.0", _attn_implementation = 'flash_attention_2', max_len = 2048).to('cuda:0') ``` If you have a large amount of entities and want to pre-embed them, please, refer to the following code snippet: ```python labels = ["your entities"] texts = ["your texts"] entity_embeddings = model.encode_labels(labels, batch_size = 8) outputs = model.batch_predict_with_embeds(texts, entity_embeddings, labels) ``` ### Benchmarks ![results on different datasets](modernbert_benchmarking.png "Results on different datasets") Below you can see the table with benchmarking results on various named entity recognition datasets: | Dataset | Score | |-------------------------|--------| | ACE 2004 | 29.5% | | ACE 2005 | 25.5% | | AnatEM | 39.9% | | Broad Tweet Corpus | 70.9% | | CoNLL 2003 | 65.8% | | FabNER | 22.8% | | FindVehicle | 41.8% | | GENIA_NER | 46.8% | | HarveyNER | 15.2% | | MultiNERD | 70.9% | | Ontonotes | 34.9% | | PolyglotNER | 47.6% | | TweetNER7 | 38.2% | | WikiANN en | 54.2% | | WikiNeural | 81.6% | | bc2gm | 50.7% | | bc4chemd | 49.6% | | bc5cdr | 65.0% | | ncbi | 58.9% | | **Average** | **47.9%** | | | | | CrossNER_AI | 57.4% | | CrossNER_literature | 59.4% | | CrossNER_music | 71.1% | | CrossNER_politics | 73.8% | | CrossNER_science | 65.5% | | mit-movie | 48.6% | | mit-restaurant | 39.7% | | **Average (zero-shot benchmark)** | **59.4%** | ### Join Our Discord Connect with our community on Discord for news, support, and discussion about our models. Join [Discord](https://discord.gg/dkyeAgs9DG). ## Citation If you use this model in your work, please cite: ```bibtex @misc{modernbert, title={Smarter, Better, Faster, Longer: A Modern Bidirectional Encoder for Fast, Memory Efficient, and Long Context Finetuning and Inference}, author={Benjamin Warner and Antoine Chaffin and Benjamin Clavié and Orion Weller and Oskar Hallström and Said Taghadouini and Alexis Gallagher and Raja Biswas and Faisal Ladhak and Tom Aarsen and Nathan Cooper and Griffin Adams and Jeremy Howard and Iacopo Poli}, year={2024}, eprint={2412.13663}, archivePrefix={arXiv}, primaryClass={cs.CL}, url={https://arxiv.org/abs/2412.13663}, } ``` ```bibtex @misc{zaratiana2023gliner, title={GLiNER: Generalist Model for Named Entity Recognition using Bidirectional Transformer}, author={Urchade Zaratiana and Nadi Tomeh and Pierre Holat and Thierry Charnois}, year={2023}, eprint={2311.08526}, archivePrefix={arXiv}, primaryClass={cs.CL} } ``` ```bibtex @misc{stepanov2024gliner, title={GLiNER multi-task: Generalist Lightweight Model for Various Information Extraction Tasks}, author={Ihor Stepanov and Mykhailo Shtopko}, year={2024}, eprint={2406.12925}, archivePrefix={arXiv}, primaryClass={id='cs.LG' full_name='Machine Learning' is_active=True alt_name=None in_archive='cs' is_general=False description='Papers on all aspects of machine learning research (supervised, unsupervised, reinforcement learning, bandit problems, and so on) including also robustness, explanation, fairness, and methodology. cs.LG is also an appropriate primary category for applications of machine learning methods.'} } ```
[ "NAMED_ENTITY_RECOGNITION" ]
[ "ANATEM", "BC5CDR" ]
TheBloke/med42-70B-GPTQ
TheBloke
text-generation
[ "transformers", "safetensors", "llama", "text-generation", "m42", "health", "healthcare", "clinical-llm", "en", "base_model:m42-health/med42-70b", "base_model:quantized:m42-health/med42-70b", "license:other", "autotrain_compatible", "text-generation-inference", "4-bit", "gptq", "region:us" ]
2023-10-27T22:47:52
2023-10-28T02:58:43
134
1
--- base_model: m42-health/med42-70b language: - en license: other license_name: med42 model_name: Med42 70B pipeline_tag: text-generation tags: - m42 - health - healthcare - clinical-llm inference: false model_creator: M42 Health model_type: llama prompt_template: '<|system|>: You are a helpful medical assistant created by M42 Health in the UAE. <|prompter|>:{prompt} <|assistant|>: ' quantized_by: TheBloke --- <!-- markdownlint-disable MD041 --> <!-- header start --> <!-- 200823 --> <div style="width: auto; margin-left: auto; margin-right: auto"> <img src="https://i.imgur.com/EBdldam.jpg" alt="TheBlokeAI" style="width: 100%; min-width: 400px; display: block; margin: auto;"> </div> <div style="display: flex; justify-content: space-between; width: 100%;"> <div style="display: flex; flex-direction: column; align-items: flex-start;"> <p style="margin-top: 0.5em; margin-bottom: 0em;"><a href="https://discord.gg/theblokeai">Chat & support: TheBloke's Discord server</a></p> </div> <div style="display: flex; flex-direction: column; align-items: flex-end;"> <p style="margin-top: 0.5em; margin-bottom: 0em;"><a href="https://www.patreon.com/TheBlokeAI">Want to contribute? TheBloke's Patreon page</a></p> </div> </div> <div style="text-align:center; margin-top: 0em; margin-bottom: 0em"><p style="margin-top: 0.25em; margin-bottom: 0em;">TheBloke's LLM work is generously supported by a grant from <a href="https://a16z.com">andreessen horowitz (a16z)</a></p></div> <hr style="margin-top: 1.0em; margin-bottom: 1.0em;"> <!-- header end --> # Med42 70B - GPTQ - Model creator: [M42 Health](https://huggingface.co/m42-health) - Original model: [Med42 70B](https://huggingface.co/m42-health/med42-70b) <!-- description start --> ## Description This repo contains GPTQ model files for [M42 Health's Med42 70B](https://huggingface.co/m42-health/med42-70b). Multiple GPTQ parameter permutations are provided; see Provided Files below for details of the options provided, their parameters, and the software used to create them. These files were quantised using hardware kindly provided by [Massed Compute](https://massedcompute.com/). <!-- description end --> <!-- repositories-available start --> ## Repositories available * [AWQ model(s) for GPU inference.](https://huggingface.co/TheBloke/med42-70B-AWQ) * [GPTQ models for GPU inference, with multiple quantisation parameter options.](https://huggingface.co/TheBloke/med42-70B-GPTQ) * [2, 3, 4, 5, 6 and 8-bit GGUF models for CPU+GPU inference](https://huggingface.co/TheBloke/med42-70B-GGUF) * [M42 Health's original unquantised fp16 model in pytorch format, for GPU inference and for further conversions](https://huggingface.co/m42-health/med42-70b) <!-- repositories-available end --> <!-- prompt-template start --> ## Prompt template: Med42 ``` <|system|>: You are a helpful medical assistant created by M42 Health in the UAE. <|prompter|>:{prompt} <|assistant|>: ``` <!-- prompt-template end --> <!-- licensing start --> ## Licensing The creator of the source model has listed its license as `other`, and this quantization has therefore used that same license. As this model is based on Llama 2, it is also subject to the Meta Llama 2 license terms, and the license files for that are additionally included. It should therefore be considered as being claimed to be licensed under both licenses. I contacted Hugging Face for clarification on dual licensing but they do not yet have an official position. Should this change, or should Meta provide any feedback on this situation, I will update this section accordingly. In the meantime, any questions regarding licensing, and in particular how these two licenses might interact, should be directed to the original model repository: [M42 Health's Med42 70B](https://huggingface.co/m42-health/med42-70b). <!-- licensing end --> <!-- README_GPTQ.md-compatible clients start --> ## Known compatible clients / servers These GPTQ models are known to work in the following inference servers/webuis. - [text-generation-webui](https://github.com/oobabooga/text-generation-webui) - [KobaldAI United](https://github.com/henk717/koboldai) - [LoLLMS Web UI](https://github.com/ParisNeo/lollms-webui) - [Hugging Face Text Generation Inference (TGI)](https://github.com/huggingface/text-generation-inference) This may not be a complete list; if you know of others, please let me know! <!-- README_GPTQ.md-compatible clients end --> <!-- README_GPTQ.md-provided-files start --> ## Provided files, and GPTQ parameters Multiple quantisation parameters are provided, to allow you to choose the best one for your hardware and requirements. Each separate quant is in a different branch. See below for instructions on fetching from different branches. Most GPTQ files are made with AutoGPTQ. Mistral models are currently made with Transformers. <details> <summary>Explanation of GPTQ parameters</summary> - Bits: The bit size of the quantised model. - GS: GPTQ group size. Higher numbers use less VRAM, but have lower quantisation accuracy. "None" is the lowest possible value. - Act Order: True or False. Also known as `desc_act`. True results in better quantisation accuracy. Some GPTQ clients have had issues with models that use Act Order plus Group Size, but this is generally resolved now. - Damp %: A GPTQ parameter that affects how samples are processed for quantisation. 0.01 is default, but 0.1 results in slightly better accuracy. - GPTQ dataset: The calibration dataset used during quantisation. Using a dataset more appropriate to the model's training can improve quantisation accuracy. Note that the GPTQ calibration dataset is not the same as the dataset used to train the model - please refer to the original model repo for details of the training dataset(s). - Sequence Length: The length of the dataset sequences used for quantisation. Ideally this is the same as the model sequence length. For some very long sequence models (16+K), a lower sequence length may have to be used. Note that a lower sequence length does not limit the sequence length of the quantised model. It only impacts the quantisation accuracy on longer inference sequences. - ExLlama Compatibility: Whether this file can be loaded with ExLlama, which currently only supports Llama and Mistral models in 4-bit. </details> | Branch | Bits | GS | Act Order | Damp % | GPTQ Dataset | Seq Len | Size | ExLlama | Desc | | ------ | ---- | -- | --------- | ------ | ------------ | ------- | ---- | ------- | ---- | | [main](https://huggingface.co/TheBloke/med42-70B-GPTQ/tree/main) | 4 | None | Yes | 0.1 | [Medical Meadow WikiDoc](https://huggingface.co/datasets/medalpaca/medical_meadow_wikidoc) | 4096 | 35.33 GB | Yes | 4-bit, with Act Order. No group size, to lower VRAM requirements. | | [gptq-4bit-128g-actorder_True](https://huggingface.co/TheBloke/med42-70B-GPTQ/tree/gptq-4bit-128g-actorder_True) | 4 | 128 | Yes | 0.1 | [Medical Meadow WikiDoc](https://huggingface.co/datasets/medalpaca/medical_meadow_wikidoc) | 4096 | 36.65 GB | Yes | 4-bit, with Act Order and group size 128g. Uses even less VRAM than 64g, but with slightly lower accuracy. | | [gptq-4bit-32g-actorder_True](https://huggingface.co/TheBloke/med42-70B-GPTQ/tree/gptq-4bit-32g-actorder_True) | 4 | 32 | Yes | 0.1 | [Medical Meadow WikiDoc](https://huggingface.co/datasets/medalpaca/medical_meadow_wikidoc) | 4096 | 40.66 GB | Yes | 4-bit, with Act Order and group size 32g. Gives highest possible inference quality, with maximum VRAM usage. | | [gptq-3bit--1g-actorder_True](https://huggingface.co/TheBloke/med42-70B-GPTQ/tree/gptq-3bit--1g-actorder_True) | 3 | None | Yes | 0.1 | [Medical Meadow WikiDoc](https://huggingface.co/datasets/medalpaca/medical_meadow_wikidoc) | 4096 | 26.77 GB | No | 3-bit, with Act Order and no group size. Lowest possible VRAM requirements. May be lower quality than 3-bit 128g. | <!-- README_GPTQ.md-provided-files end --> <!-- README_GPTQ.md-download-from-branches start --> ## How to download, including from branches ### In text-generation-webui To download from the `main` branch, enter `TheBloke/med42-70B-GPTQ` in the "Download model" box. To download from another branch, add `:branchname` to the end of the download name, eg `TheBloke/med42-70B-GPTQ:gptq-4bit-128g-actorder_True` ### From the command line I recommend using the `huggingface-hub` Python library: ```shell pip3 install huggingface-hub ``` To download the `main` branch to a folder called `med42-70B-GPTQ`: ```shell mkdir med42-70B-GPTQ huggingface-cli download TheBloke/med42-70B-GPTQ --local-dir med42-70B-GPTQ --local-dir-use-symlinks False ``` To download from a different branch, add the `--revision` parameter: ```shell mkdir med42-70B-GPTQ huggingface-cli download TheBloke/med42-70B-GPTQ --revision gptq-4bit-128g-actorder_True --local-dir med42-70B-GPTQ --local-dir-use-symlinks False ``` <details> <summary>More advanced huggingface-cli download usage</summary> If you remove the `--local-dir-use-symlinks False` parameter, the files will instead be stored in the central Hugging Face cache directory (default location on Linux is: `~/.cache/huggingface`), and symlinks will be added to the specified `--local-dir`, pointing to their real location in the cache. This allows for interrupted downloads to be resumed, and allows you to quickly clone the repo to multiple places on disk without triggering a download again. The downside, and the reason why I don't list that as the default option, is that the files are then hidden away in a cache folder and it's harder to know where your disk space is being used, and to clear it up if/when you want to remove a download model. The cache location can be changed with the `HF_HOME` environment variable, and/or the `--cache-dir` parameter to `huggingface-cli`. For more documentation on downloading with `huggingface-cli`, please see: [HF -> Hub Python Library -> Download files -> Download from the CLI](https://huggingface.co/docs/huggingface_hub/guides/download#download-from-the-cli). To accelerate downloads on fast connections (1Gbit/s or higher), install `hf_transfer`: ```shell pip3 install hf_transfer ``` And set environment variable `HF_HUB_ENABLE_HF_TRANSFER` to `1`: ```shell mkdir med42-70B-GPTQ HF_HUB_ENABLE_HF_TRANSFER=1 huggingface-cli download TheBloke/med42-70B-GPTQ --local-dir med42-70B-GPTQ --local-dir-use-symlinks False ``` Windows Command Line users: You can set the environment variable by running `set HF_HUB_ENABLE_HF_TRANSFER=1` before the download command. </details> ### With `git` (**not** recommended) To clone a specific branch with `git`, use a command like this: ```shell git clone --single-branch --branch gptq-4bit-128g-actorder_True https://huggingface.co/TheBloke/med42-70B-GPTQ ``` Note that using Git with HF repos is strongly discouraged. It will be much slower than using `huggingface-hub`, and will use twice as much disk space as it has to store the model files twice (it stores every byte both in the intended target folder, and again in the `.git` folder as a blob.) <!-- README_GPTQ.md-download-from-branches end --> <!-- README_GPTQ.md-text-generation-webui start --> ## How to easily download and use this model in [text-generation-webui](https://github.com/oobabooga/text-generation-webui) Please make sure you're using the latest version of [text-generation-webui](https://github.com/oobabooga/text-generation-webui). It is strongly recommended to use the text-generation-webui one-click-installers unless you're sure you know how to make a manual install. 1. Click the **Model tab**. 2. Under **Download custom model or LoRA**, enter `TheBloke/med42-70B-GPTQ`. - To download from a specific branch, enter for example `TheBloke/med42-70B-GPTQ:gptq-4bit-128g-actorder_True` - see Provided Files above for the list of branches for each option. 3. Click **Download**. 4. The model will start downloading. Once it's finished it will say "Done". 5. In the top left, click the refresh icon next to **Model**. 6. In the **Model** dropdown, choose the model you just downloaded: `med42-70B-GPTQ` 7. The model will automatically load, and is now ready for use! 8. If you want any custom settings, set them and then click **Save settings for this model** followed by **Reload the Model** in the top right. - Note that you do not need to and should not set manual GPTQ parameters any more. These are set automatically from the file `quantize_config.json`. 9. Once you're ready, click the **Text Generation** tab and enter a prompt to get started! <!-- README_GPTQ.md-text-generation-webui end --> <!-- README_GPTQ.md-use-from-tgi start --> ## Serving this model from Text Generation Inference (TGI) It's recommended to use TGI version 1.1.0 or later. The official Docker container is: `ghcr.io/huggingface/text-generation-inference:1.1.0` Example Docker parameters: ```shell --model-id TheBloke/med42-70B-GPTQ --port 3000 --quantize gptq --max-input-length 3696 --max-total-tokens 4096 --max-batch-prefill-tokens 4096 ``` Example Python code for interfacing with TGI (requires huggingface-hub 0.17.0 or later): ```shell pip3 install huggingface-hub ``` ```python from huggingface_hub import InferenceClient endpoint_url = "https://your-endpoint-url-here" prompt = "Tell me about AI" prompt_template=f'''<|system|>: You are a helpful medical assistant created by M42 Health in the UAE. <|prompter|>:{prompt} <|assistant|>: ''' client = InferenceClient(endpoint_url) response = client.text_generation(prompt, max_new_tokens=128, do_sample=True, temperature=0.7, top_p=0.95, top_k=40, repetition_penalty=1.1) print(f"Model output: {response}") ``` <!-- README_GPTQ.md-use-from-tgi end --> <!-- README_GPTQ.md-use-from-python start --> ## How to use this GPTQ model from Python code ### Install the necessary packages Requires: Transformers 4.33.0 or later, Optimum 1.12.0 or later, and AutoGPTQ 0.4.2 or later. ```shell pip3 install transformers optimum pip3 install auto-gptq --extra-index-url https://huggingface.github.io/autogptq-index/whl/cu118/ # Use cu117 if on CUDA 11.7 ``` If you have problems installing AutoGPTQ using the pre-built wheels, install it from source instead: ```shell pip3 uninstall -y auto-gptq git clone https://github.com/PanQiWei/AutoGPTQ cd AutoGPTQ git checkout v0.4.2 pip3 install . ``` ### You can then use the following code ```python from transformers import AutoModelForCausalLM, AutoTokenizer, pipeline model_name_or_path = "TheBloke/med42-70B-GPTQ" # To use a different branch, change revision # For example: revision="gptq-4bit-128g-actorder_True" model = AutoModelForCausalLM.from_pretrained(model_name_or_path, device_map="auto", trust_remote_code=False, revision="main") tokenizer = AutoTokenizer.from_pretrained(model_name_or_path, use_fast=True) prompt = "Tell me about AI" prompt_template=f'''<|system|>: You are a helpful medical assistant created by M42 Health in the UAE. <|prompter|>:{prompt} <|assistant|>: ''' print("\n\n*** Generate:") input_ids = tokenizer(prompt_template, return_tensors='pt').input_ids.cuda() output = model.generate(inputs=input_ids, temperature=0.7, do_sample=True, top_p=0.95, top_k=40, max_new_tokens=512) print(tokenizer.decode(output[0])) # Inference can also be done using transformers' pipeline print("*** Pipeline:") pipe = pipeline( "text-generation", model=model, tokenizer=tokenizer, max_new_tokens=512, do_sample=True, temperature=0.7, top_p=0.95, top_k=40, repetition_penalty=1.1 ) print(pipe(prompt_template)[0]['generated_text']) ``` <!-- README_GPTQ.md-use-from-python end --> <!-- README_GPTQ.md-compatibility start --> ## Compatibility The files provided are tested to work with Transformers. For non-Mistral models, AutoGPTQ can also be used directly. [ExLlama](https://github.com/turboderp/exllama) is compatible with Llama and Mistral models in 4-bit. Please see the Provided Files table above for per-file compatibility. For a list of clients/servers, please see "Known compatible clients / servers", above. <!-- README_GPTQ.md-compatibility end --> <!-- footer start --> <!-- 200823 --> ## Discord For further support, and discussions on these models and AI in general, join us at: [TheBloke AI's Discord server](https://discord.gg/theblokeai) ## Thanks, and how to contribute Thanks to the [chirper.ai](https://chirper.ai) team! Thanks to Clay from [gpus.llm-utils.org](llm-utils)! I've had a lot of people ask if they can contribute. I enjoy providing models and helping people, and would love to be able to spend even more time doing it, as well as expanding into new projects like fine tuning/training. If you're able and willing to contribute it will be most gratefully received and will help me to keep providing more models, and to start work on new AI projects. Donaters will get priority support on any and all AI/LLM/model questions and requests, access to a private Discord room, plus other benefits. * Patreon: https://patreon.com/TheBlokeAI * Ko-Fi: https://ko-fi.com/TheBlokeAI **Special thanks to**: Aemon Algiz. **Patreon special mentions**: Pierre Kircher, Stanislav Ovsiannikov, Michael Levine, Eugene Pentland, Andrey, 준교 김, Randy H, Fred von Graf, Artur Olbinski, Caitlyn Gatomon, terasurfer, Jeff Scroggin, James Bentley, Vadim, Gabriel Puliatti, Harry Royden McLaughlin, Sean Connelly, Dan Guido, Edmond Seymore, Alicia Loh, subjectnull, AzureBlack, Manuel Alberto Morcote, Thomas Belote, Lone Striker, Chris Smitley, Vitor Caleffi, Johann-Peter Hartmann, Clay Pascal, biorpg, Brandon Frisco, sidney chen, transmissions 11, Pedro Madruga, jinyuan sun, Ajan Kanaga, Emad Mostaque, Trenton Dambrowitz, Jonathan Leane, Iucharbius, usrbinkat, vamX, George Stoitzev, Luke Pendergrass, theTransient, Olakabola, Swaroop Kallakuri, Cap'n Zoog, Brandon Phillips, Michael Dempsey, Nikolai Manek, danny, Matthew Berman, Gabriel Tamborski, alfie_i, Raymond Fosdick, Tom X Nguyen, Raven Klaugh, LangChain4j, Magnesian, Illia Dulskyi, David Ziegler, Mano Prime, Luis Javier Navarrete Lozano, Erik Bjäreholt, 阿明, Nathan Dryer, Alex, Rainer Wilmers, zynix, TL, Joseph William Delisle, John Villwock, Nathan LeClaire, Willem Michiel, Joguhyik, GodLy, OG, Alps Aficionado, Jeffrey Morgan, ReadyPlayerEmma, Tiffany J. Kim, Sebastain Graf, Spencer Kim, Michael Davis, webtim, Talal Aujan, knownsqashed, John Detwiler, Imad Khwaja, Deo Leter, Jerry Meng, Elijah Stavena, Rooh Singh, Pieter, SuperWojo, Alexandros Triantafyllidis, Stephen Murray, Ai Maven, ya boyyy, Enrico Ros, Ken Nordquist, Deep Realms, Nicholas, Spiking Neurons AB, Elle, Will Dee, Jack West, RoA, Luke @flexchar, Viktor Bowallius, Derek Yates, Subspace Studios, jjj, Toran Billups, Asp the Wyvern, Fen Risland, Ilya, NimbleBox.ai, Chadd, Nitin Borwankar, Emre, Mandus, Leonard Tan, Kalila, K, Trailburnt, S_X, Cory Kujawski Thank you to all my generous patrons and donaters! And thank you again to a16z for their generous grant. <!-- footer end --> # Original model card: M42 Health's Med42 70B # **Med42 - Clinical Large Language Model** Med42 is an open-access clinical large language model (LLM) developed by M42 to expand access to medical knowledge. Built off LLaMA-2 and comprising 70 billion parameters, this generative AI system provides high-quality answers to medical questions. ## Model Details *Note: Use of this model is governed by the M42 Health license. In order to download the model weights (and tokenizer), please read the [Med42 License](https://huggingface.co/spaces/m42-health/License) and accept our License by requesting access here.* Beginning with the base LLaMa-2 model, Med42 was instruction-tuned on a dataset of ~250M tokens compiled from different open-access sources, including medical flashcards, exam questions, and open-domain dialogues. **Model Developers:** M42 Health AI Team **Finetuned from model:** Llama-2 - 70B **Context length:** 4k tokens **Input:** Text only data **Output:** Model generates text only **Status:** This is a static model trained on an offline dataset. Future versions of the tuned models will be released as we enhance model's performance. **License:** A custom license is available [here](https://huggingface.co/spaces/m42-health/License) **Research Paper:** TBA ## Intended Use Med42 is being made available for further testing and assessment as an AI assistant to enhance clinical decision-making and enhance access to an LLM for healthcare use. Potential use cases include: - Medical question answering - Patient record summarization - Aiding medical diagnosis - General health Q&A To get the expected features and performance for the model, a specific formatting needs to be followed, including the `<|system|>`, `<|prompter|>` and `<|assistant|>` tags. ```python from transformers import AutoModelForCausalLM, AutoTokenizer model_name_or_path = "m42-health/med42-70b" model = AutoModelForCausalLM.from_pretrained(model_name_or_path, device_map="auto") tokenizer = AutoTokenizer.from_pretrained(model_name_or_path) prompt = "What are the symptoms of diabetes ?" prompt_template=f''' <|system|>: You are a helpful medical assistant created by M42 Health in the UAE. <|prompter|>:{prompt} <|assistant|>: ''' input_ids = tokenizer(prompt_template, return_tensors='pt').input_ids.cuda() output = model.generate(inputs=input_ids, temperature=0.7, do_sample=True,eos_token_id=tokenizer.eos_token_id, pad_token_id=tokenizer.pad_token_id, max_new_tokens=512) print(tokenizer.decode(output[0])) ``` ## Hardware and Software The training process was performed on the Condor Galaxy 1 (CG-1) supercomputer platform. ## Evaluation Results Med42 achieves achieves competitive performance on various medical benchmarks, including MedQA, MedMCQA, PubMedQA, HeadQA, and Measuring Massive Multitask Language Understanding (MMLU) clinical topics. For all evaluations reported so far, we use [EleutherAI's evaluation harness library](https://github.com/EleutherAI/lm-evaluation-harness) and report zero-shot accuracies (except otherwise stated). We compare the performance with that reported for other models (ClinicalCamel-70B, GPT-3.5, GPT-4.0, Med-PaLM 2). |Dataset|Med42|ClinicalCamel-70B|GPT-3.5|GPT-4.0|Med-PaLM-2 (5-shot)*| |---|---|---|---|---|---| |MMLU Clinical Knowledge|74.3|69.8|69.8|86.0|88.3| |MMLU College Biology|84.0|79.2|72.2|95.1|94.4| |MMLU College Medicine|68.8|67.0|61.3|76.9|80.9| |MMLU Medical Genetics|86.0|69.0|70.0|91.0|90.0| |MMLU Professional Medicine|79.8|71.3|70.2|93.0|95.2| |MMLU Anatomy|67.4|62.2|56.3|80.0|77.8| |MedMCQA|60.9|47.0|50.1|69.5|71.3| |MedQA|61.5|53.4|50.8|78.9|79.7| |USMLE Self-Assessment|71.7|-|49.1|83.8|-| |USMLE Sample Exam|72.0|54.3|56.9|84.3|-| **We note that 0-shot performance is not reported for Med-PaLM 2. Further details can be found at [https://github.com/m42health/med42](https://github.com/m42health/med42)*. ### Key performance metrics: - Med42 achieves a 72% accuracy on the US Medical Licensing Examination (USMLE) sample exam, surpassing the prior state of the art among openly available medical LLMs. - 61.5% on MedQA dataset (compared to 50.8% for GPT-3.5) - Consistently higher performance on MMLU clinical topics compared to GPT-3.5. ## Limitations & Safe Use - Med42 is not ready for real clinical use. Extensive human evaluation is undergoing as it is required to ensure safety. - Potential for generating incorrect or harmful information. - Risk of perpetuating biases in training data. Use this model responsibly! Do not rely on it for medical usage without rigorous safety testing. ## Accessing Med42 and Reporting Issues Please report any software "bug" or other problems through one of the following means: - Reporting issues with the model: [https://github.com/m42health/med42](https://github.com/m42health/med42) - Reporting risky content generated by the model, bugs and/or any security concerns: [https://forms.office.com/r/YMJu3kcKat](https://forms.office.com/r/YMJu3kcKat) - M42’s privacy policy available at [https://m42.ae/privacy-policy/](https://m42.ae/privacy-policy/) - Reporting violations of the Acceptable Use Policy or unlicensed uses of Med42: <[email protected]>
[ "QUESTION_ANSWERING", "SUMMARIZATION" ]
[ "MEDQA", "PUBMEDQA" ]
tau/t5-v1_1-large-rss
tau
text2text-generation
[ "transformers", "pytorch", "t5", "text2text-generation", "en", "dataset:c4", "dataset:wikipedia", "arxiv:2108.05857", "arxiv:2101.00438", "autotrain_compatible", "text-generation-inference", "endpoints_compatible", "region:us" ]
2022-03-02T23:29:05
2021-08-20T17:35:51
133
0
--- datasets: - c4 - wikipedia language: en metrics: - f1 --- # T5-V1.1-large-rss This model is [T5-v1.1-large](https://huggingface.co/google/t5-v1_1-large) finetuned on RSS dataset. The model was finetuned as part of ["How Optimal is Greedy Decoding for Extractive Question Answering?"](https://arxiv.org/abs/2108.05857), while the RSS pretraining method was introduced in [this paper](https://arxiv.org/pdf/2101.00438.pdf). ## Model description The original [T5-v1.1-large](https://huggingface.co/google/t5-v1_1-large) was only pre-trained on C4 excluding any supervised training. Our version is further trained on Rucurrent Span Selection scheme (RSS), using a sample from the dataset used to pretrain [Splinter](tau/splinter-large): * contexts with a span occurring more than once are detected * a single instance of the recurring span is maked * the model is trained (teacher forcing) to predict the masked span This training scheme naturally matches the extractive question answering task. During training time, the masked span is replaced with `<extra_id_0>` and the labels are formatted as `<extra_id_0>span<extra_id_0>`. Unlike [Splinter](tau/splinter-large), only one span is mask at a time. ## Intended uses & limitations This model naturally fits tasks where a span from a context is intended to be copied, like extractive question answering. This checkpoint is primarily aimed to be used in zero-shot setting - further fine-tuning it on an annotated dataset gives equal results to those of the original T5-v1.1-large. ### How to use You can use this model directly but it is recommended to format the input to be aligned with that of the training scheme and as a text-question context: ```python from transformers import AutoModelForSeq2SeqLM, AutoTokenizer model = AutoModelForSeq2SeqLM.from_pretrained('tau/t5-v1_1-large-rss') tokenizer = AutoTokenizer.from_pretrained('tau/t5-v1_1-large-rss') passage = 'Barack Hussein Obama II is an American politician and attorney who served as the 44th president of the United States from 2009 to 2017. ' question = 'When was Obama inaugurated?' text = f'Text: {passage}.\nQuestion: {question}\nAnswer:{tokenizer.additional_special_tokens[0]}.' encoded_input = tokenizer(text, return_tensors='pt') output_ids = model.generate(input_ids=encoded_input.input_ids, attention_mask=encoded_input.attention_mask, eos_token_id=tokenizer.additional_special_tokens_ids[1], num_beams=1, max_length=512, min_length=3) tokenizer.decode(output_ids[0]) ``` The generated answer is then `"<pad><extra_id_0> 2009<extra_id_1>"`, while the one generated by the original [T5-v1.1-large](https://huggingface.co/google/t5-v1_1-large) is `"<pad><extra_id_0> On January 20, 2009<extra_id_1>"` - a correct yet non-extractive answer. ### Limitations and bias Although using the model with greedy decoding tends toward extracted outputs, is may sometimes produce non-extracted ones - may it be different casing or a whole different string (or substring) that may bear another semantic meaning. ### Pretraining The model was finetuned with 100,000 rss-examples for 3 epochs using Adafactor optimizer with constant learning rate of 5e-5. ## Evaluation results Evaluated over few-shot QA in a zero-shot setting (no finetuning on annotated examples): |Model \ Dataset| SQuAD |TriviaQA | NaturalQs | NewsQA | SearchQA | HotpotQA | BioASQ | TextbookQA| |:-------------:|:-----:|:-------:|:---------:|:------:|:--------:|:--------:|:------:|:---------:| |T5 | 50.4 | 61.7 | 42.1 | 19.2 | 24.0 | 43.3 | 55.5 | 17.8 | |T5-rss | 71.4 | 69.3 | 57.2 | 43.2 | 29.7 | 59.0 | 65.5 | 39.0 | The gap between the two models diminishes as more training examples are introduced, for additional result see the [paper]((https://arxiv.org/abs/2108.05857). ### BibTeX entry and citation info ```bibtex @inproceedings{ram-etal-2021-shot, title = "Few-Shot Question Answering by Pretraining Span Selection", author = "Ram, Ori and Kirstain, Yuval and Berant, Jonathan and Globerson, Amir and Levy, Omer", booktitle = "Proceedings of the 59th Annual Meeting of the Association for Computational Linguistics and the 11th International Joint Conference on Natural Language Processing (Volume 1: Long Papers)", month = aug, year = "2021", address = "Online", publisher = "Association for Computational Linguistics", url = "https://aclanthology.org/2021.acl-long.239", doi = "10.18653/v1/2021.acl-long.239", pages = "3066--3079", }, @misc{castel2021optimal, title={How Optimal is Greedy Decoding for Extractive Question Answering?}, author={Or Castel and Ori Ram and Avia Efrat and Omer Levy}, year={2021}, eprint={2108.05857}, archivePrefix={arXiv}, primaryClass={cs.CL} } ```
[ "QUESTION_ANSWERING" ]
[ "BEAR" ]
sam-babayev/sf_model_e5
sam-babayev
feature-extraction
[ "transformers", "safetensors", "bert", "feature-extraction", "mteb", "model-index", "text-embeddings-inference", "endpoints_compatible", "region:us" ]
2023-11-09T23:12:19
2023-11-14T15:47:11
132
2
--- tags: - mteb model-index: - name: sf_model_e5 results: - task: type: Classification dataset: name: MTEB AmazonCounterfactualClassification (en) type: mteb/amazon_counterfactual config: en split: test revision: e8379541af4e31359cca9fbcf4b00f2671dba205 metrics: - type: accuracy value: 70.85074626865672 - type: ap value: 33.779217850079206 - type: f1 value: 64.96977487239377 - task: type: Classification dataset: name: MTEB AmazonPolarityClassification type: mteb/amazon_polarity config: default split: test revision: e2d317d38cd51312af73b3d32a06d1a08b442046 metrics: - type: accuracy value: 91.80945 - type: ap value: 88.22978189506895 - type: f1 value: 91.7858219911604 - task: type: Classification dataset: name: MTEB AmazonReviewsClassification (en) type: mteb/amazon_reviews_multi config: en split: test revision: 1399c76144fd37290681b995c656ef9b2e06e26d metrics: - type: accuracy value: 48.94200000000001 - type: f1 value: 47.911934405973895 - task: type: Retrieval dataset: name: MTEB ArguAna type: arguana config: default split: test revision: None metrics: - type: map_at_1 value: 39.616 - type: map_at_10 value: 55.938 - type: map_at_100 value: 56.552 - type: map_at_1000 value: 56.556 - type: map_at_3 value: 51.754 - type: map_at_5 value: 54.623999999999995 - type: mrr_at_1 value: 40.967 - type: mrr_at_10 value: 56.452999999999996 - type: mrr_at_100 value: 57.053 - type: mrr_at_1000 value: 57.057 - type: mrr_at_3 value: 52.312000000000005 - type: mrr_at_5 value: 55.1 - type: ndcg_at_1 value: 39.616 - type: ndcg_at_10 value: 64.067 - type: ndcg_at_100 value: 66.384 - type: ndcg_at_1000 value: 66.468 - type: ndcg_at_3 value: 55.74 - type: ndcg_at_5 value: 60.889 - type: precision_at_1 value: 39.616 - type: precision_at_10 value: 8.953999999999999 - type: precision_at_100 value: 0.9900000000000001 - type: precision_at_1000 value: 0.1 - type: precision_at_3 value: 22.428 - type: precision_at_5 value: 15.946 - type: recall_at_1 value: 39.616 - type: recall_at_10 value: 89.545 - type: recall_at_100 value: 99.004 - type: recall_at_1000 value: 99.644 - type: recall_at_3 value: 67.283 - type: recall_at_5 value: 79.73 - task: type: Clustering dataset: name: MTEB ArxivClusteringP2P type: mteb/arxiv-clustering-p2p config: default split: test revision: a122ad7f3f0291bf49cc6f4d32aa80929df69d5d metrics: - type: v_measure value: 48.72923923743124 - task: type: Clustering dataset: name: MTEB ArxivClusteringS2S type: mteb/arxiv-clustering-s2s config: default split: test revision: f910caf1a6075f7329cdf8c1a6135696f37dbd53 metrics: - type: v_measure value: 42.87449955203238 - task: type: Reranking dataset: name: MTEB AskUbuntuDupQuestions type: mteb/askubuntudupquestions-reranking config: default split: test revision: 2000358ca161889fa9c082cb41daa8dcfb161a54 metrics: - type: map value: 64.3214434754065 - type: mrr value: 77.87879787187265 - task: type: STS dataset: name: MTEB BIOSSES type: mteb/biosses-sts config: default split: test revision: d3fb88f8f02e40887cd149695127462bbcf29b4a metrics: - type: cos_sim_pearson value: 88.82418607751953 - type: cos_sim_spearman value: 86.74535004562274 - type: euclidean_pearson value: 86.58792166831103 - type: euclidean_spearman value: 86.74535004562274 - type: manhattan_pearson value: 86.23957813056677 - type: manhattan_spearman value: 86.41522204150452 - task: type: Classification dataset: name: MTEB Banking77Classification type: mteb/banking77 config: default split: test revision: 0fd18e25b25c072e09e0d92ab615fda904d66300 metrics: - type: accuracy value: 84.61363636363636 - type: f1 value: 83.98373241136187 - task: type: Clustering dataset: name: MTEB BiorxivClusteringP2P type: mteb/biorxiv-clustering-p2p config: default split: test revision: 65b79d1d13f80053f67aca9498d9402c2d9f1f40 metrics: - type: v_measure value: 39.73148995791471 - task: type: Clustering dataset: name: MTEB BiorxivClusteringS2S type: mteb/biorxiv-clustering-s2s config: default split: test revision: 258694dd0231531bc1fd9de6ceb52a0853c6d908 metrics: - type: v_measure value: 37.23723038699733 - task: type: Retrieval dataset: name: MTEB CQADupstackAndroidRetrieval type: BeIR/cqadupstack config: default split: test revision: None metrics: - type: map_at_1 value: 32.217 - type: map_at_10 value: 43.453 - type: map_at_100 value: 45.038 - type: map_at_1000 value: 45.162 - type: map_at_3 value: 39.589 - type: map_at_5 value: 41.697 - type: mrr_at_1 value: 39.628 - type: mrr_at_10 value: 49.698 - type: mrr_at_100 value: 50.44 - type: mrr_at_1000 value: 50.482000000000006 - type: mrr_at_3 value: 46.781 - type: mrr_at_5 value: 48.548 - type: ndcg_at_1 value: 39.628 - type: ndcg_at_10 value: 50.158 - type: ndcg_at_100 value: 55.687 - type: ndcg_at_1000 value: 57.499 - type: ndcg_at_3 value: 44.594 - type: ndcg_at_5 value: 47.198 - type: precision_at_1 value: 39.628 - type: precision_at_10 value: 9.828000000000001 - type: precision_at_100 value: 1.591 - type: precision_at_1000 value: 0.20600000000000002 - type: precision_at_3 value: 21.507 - type: precision_at_5 value: 15.765 - type: recall_at_1 value: 32.217 - type: recall_at_10 value: 62.717999999999996 - type: recall_at_100 value: 85.992 - type: recall_at_1000 value: 97.271 - type: recall_at_3 value: 46.694 - type: recall_at_5 value: 53.952 - type: map_at_1 value: 30.862000000000002 - type: map_at_10 value: 41.287 - type: map_at_100 value: 42.526 - type: map_at_1000 value: 42.653999999999996 - type: map_at_3 value: 38.055 - type: map_at_5 value: 40.022000000000006 - type: mrr_at_1 value: 38.408 - type: mrr_at_10 value: 46.943 - type: mrr_at_100 value: 47.597 - type: mrr_at_1000 value: 47.64 - type: mrr_at_3 value: 44.607 - type: mrr_at_5 value: 46.079 - type: ndcg_at_1 value: 38.408 - type: ndcg_at_10 value: 46.936 - type: ndcg_at_100 value: 51.307 - type: ndcg_at_1000 value: 53.312000000000005 - type: ndcg_at_3 value: 42.579 - type: ndcg_at_5 value: 44.877 - type: precision_at_1 value: 38.408 - type: precision_at_10 value: 8.885 - type: precision_at_100 value: 1.4449999999999998 - type: precision_at_1000 value: 0.192 - type: precision_at_3 value: 20.616 - type: precision_at_5 value: 14.841 - type: recall_at_1 value: 30.862000000000002 - type: recall_at_10 value: 56.994 - type: recall_at_100 value: 75.347 - type: recall_at_1000 value: 87.911 - type: recall_at_3 value: 44.230000000000004 - type: recall_at_5 value: 50.625 - type: map_at_1 value: 39.076 - type: map_at_10 value: 52.535 - type: map_at_100 value: 53.537 - type: map_at_1000 value: 53.591 - type: map_at_3 value: 48.961 - type: map_at_5 value: 50.96000000000001 - type: mrr_at_1 value: 44.765 - type: mrr_at_10 value: 55.615 - type: mrr_at_100 value: 56.24 - type: mrr_at_1000 value: 56.264 - type: mrr_at_3 value: 52.925999999999995 - type: mrr_at_5 value: 54.493 - type: ndcg_at_1 value: 44.765 - type: ndcg_at_10 value: 58.777 - type: ndcg_at_100 value: 62.574 - type: ndcg_at_1000 value: 63.624 - type: ndcg_at_3 value: 52.81 - type: ndcg_at_5 value: 55.657999999999994 - type: precision_at_1 value: 44.765 - type: precision_at_10 value: 9.693 - type: precision_at_100 value: 1.248 - type: precision_at_1000 value: 0.13799999999999998 - type: precision_at_3 value: 23.866 - type: precision_at_5 value: 16.489 - type: recall_at_1 value: 39.076 - type: recall_at_10 value: 74.01299999999999 - type: recall_at_100 value: 90.363 - type: recall_at_1000 value: 97.782 - type: recall_at_3 value: 58.056 - type: recall_at_5 value: 65.029 - type: map_at_1 value: 26.357000000000003 - type: map_at_10 value: 35.492000000000004 - type: map_at_100 value: 36.504999999999995 - type: map_at_1000 value: 36.578 - type: map_at_3 value: 32.696999999999996 - type: map_at_5 value: 34.388999999999996 - type: mrr_at_1 value: 28.136 - type: mrr_at_10 value: 37.383 - type: mrr_at_100 value: 38.271 - type: mrr_at_1000 value: 38.324999999999996 - type: mrr_at_3 value: 34.782999999999994 - type: mrr_at_5 value: 36.416 - type: ndcg_at_1 value: 28.136 - type: ndcg_at_10 value: 40.741 - type: ndcg_at_100 value: 45.803 - type: ndcg_at_1000 value: 47.637 - type: ndcg_at_3 value: 35.412 - type: ndcg_at_5 value: 38.251000000000005 - type: precision_at_1 value: 28.136 - type: precision_at_10 value: 6.315999999999999 - type: precision_at_100 value: 0.931 - type: precision_at_1000 value: 0.11299999999999999 - type: precision_at_3 value: 15.254000000000001 - type: precision_at_5 value: 10.757 - type: recall_at_1 value: 26.357000000000003 - type: recall_at_10 value: 55.021 - type: recall_at_100 value: 78.501 - type: recall_at_1000 value: 92.133 - type: recall_at_3 value: 40.798 - type: recall_at_5 value: 47.591 - type: map_at_1 value: 17.302 - type: map_at_10 value: 26.365 - type: map_at_100 value: 27.581 - type: map_at_1000 value: 27.705999999999996 - type: map_at_3 value: 23.682 - type: map_at_5 value: 25.304 - type: mrr_at_1 value: 21.891 - type: mrr_at_10 value: 31.227 - type: mrr_at_100 value: 32.22 - type: mrr_at_1000 value: 32.282 - type: mrr_at_3 value: 28.711 - type: mrr_at_5 value: 30.314999999999998 - type: ndcg_at_1 value: 21.891 - type: ndcg_at_10 value: 31.965 - type: ndcg_at_100 value: 37.869 - type: ndcg_at_1000 value: 40.642 - type: ndcg_at_3 value: 27.184 - type: ndcg_at_5 value: 29.686 - type: precision_at_1 value: 21.891 - type: precision_at_10 value: 5.9830000000000005 - type: precision_at_100 value: 1.0250000000000001 - type: precision_at_1000 value: 0.14100000000000001 - type: precision_at_3 value: 13.391 - type: precision_at_5 value: 9.801 - type: recall_at_1 value: 17.302 - type: recall_at_10 value: 44.312000000000005 - type: recall_at_100 value: 70.274 - type: recall_at_1000 value: 89.709 - type: recall_at_3 value: 31.117 - type: recall_at_5 value: 37.511 - type: map_at_1 value: 29.404000000000003 - type: map_at_10 value: 40.571 - type: map_at_100 value: 42.049 - type: map_at_1000 value: 42.156 - type: map_at_3 value: 37.413000000000004 - type: map_at_5 value: 39.206 - type: mrr_at_1 value: 36.285000000000004 - type: mrr_at_10 value: 46.213 - type: mrr_at_100 value: 47.129 - type: mrr_at_1000 value: 47.168 - type: mrr_at_3 value: 43.84 - type: mrr_at_5 value: 45.226 - type: ndcg_at_1 value: 36.285000000000004 - type: ndcg_at_10 value: 46.809 - type: ndcg_at_100 value: 52.615 - type: ndcg_at_1000 value: 54.538 - type: ndcg_at_3 value: 41.91 - type: ndcg_at_5 value: 44.224999999999994 - type: precision_at_1 value: 36.285000000000004 - type: precision_at_10 value: 8.527 - type: precision_at_100 value: 1.3259999999999998 - type: precision_at_1000 value: 0.167 - type: precision_at_3 value: 20.083000000000002 - type: precision_at_5 value: 14.071 - type: recall_at_1 value: 29.404000000000003 - type: recall_at_10 value: 59.611999999999995 - type: recall_at_100 value: 83.383 - type: recall_at_1000 value: 95.703 - type: recall_at_3 value: 45.663 - type: recall_at_5 value: 51.971999999999994 - type: map_at_1 value: 25.317 - type: map_at_10 value: 35.217999999999996 - type: map_at_100 value: 36.665 - type: map_at_1000 value: 36.768 - type: map_at_3 value: 31.924000000000003 - type: map_at_5 value: 33.591 - type: mrr_at_1 value: 31.507 - type: mrr_at_10 value: 40.671 - type: mrr_at_100 value: 41.609 - type: mrr_at_1000 value: 41.657 - type: mrr_at_3 value: 38.261 - type: mrr_at_5 value: 39.431 - type: ndcg_at_1 value: 31.507 - type: ndcg_at_10 value: 41.375 - type: ndcg_at_100 value: 47.426 - type: ndcg_at_1000 value: 49.504 - type: ndcg_at_3 value: 35.989 - type: ndcg_at_5 value: 38.068000000000005 - type: precision_at_1 value: 31.507 - type: precision_at_10 value: 7.8420000000000005 - type: precision_at_100 value: 1.257 - type: precision_at_1000 value: 0.16199999999999998 - type: precision_at_3 value: 17.352 - type: precision_at_5 value: 12.328999999999999 - type: recall_at_1 value: 25.317 - type: recall_at_10 value: 54.254999999999995 - type: recall_at_100 value: 80.184 - type: recall_at_1000 value: 94.07 - type: recall_at_3 value: 39.117000000000004 - type: recall_at_5 value: 44.711 - type: map_at_1 value: 25.813000000000002 - type: map_at_10 value: 35.47183333333334 - type: map_at_100 value: 36.71775 - type: map_at_1000 value: 36.833000000000006 - type: map_at_3 value: 32.449916666666674 - type: map_at_5 value: 34.1235 - type: mrr_at_1 value: 30.766750000000005 - type: mrr_at_10 value: 39.77508333333334 - type: mrr_at_100 value: 40.64233333333333 - type: mrr_at_1000 value: 40.69658333333333 - type: mrr_at_3 value: 37.27349999999999 - type: mrr_at_5 value: 38.723416666666665 - type: ndcg_at_1 value: 30.766750000000005 - type: ndcg_at_10 value: 41.141416666666665 - type: ndcg_at_100 value: 46.42016666666666 - type: ndcg_at_1000 value: 48.61916666666667 - type: ndcg_at_3 value: 36.06883333333333 - type: ndcg_at_5 value: 38.43966666666666 - type: precision_at_1 value: 30.766750000000005 - type: precision_at_10 value: 7.340000000000001 - type: precision_at_100 value: 1.1796666666666666 - type: precision_at_1000 value: 0.15625 - type: precision_at_3 value: 16.763833333333334 - type: precision_at_5 value: 11.972166666666666 - type: recall_at_1 value: 25.813000000000002 - type: recall_at_10 value: 53.62741666666667 - type: recall_at_100 value: 76.70125000000002 - type: recall_at_1000 value: 91.85566666666666 - type: recall_at_3 value: 39.55075 - type: recall_at_5 value: 45.645250000000004 - type: map_at_1 value: 23.249 - type: map_at_10 value: 31.095 - type: map_at_100 value: 32.056000000000004 - type: map_at_1000 value: 32.163000000000004 - type: map_at_3 value: 29.275000000000002 - type: map_at_5 value: 30.333 - type: mrr_at_1 value: 26.687 - type: mrr_at_10 value: 34.122 - type: mrr_at_100 value: 34.958 - type: mrr_at_1000 value: 35.039 - type: mrr_at_3 value: 32.541 - type: mrr_at_5 value: 33.43 - type: ndcg_at_1 value: 26.687 - type: ndcg_at_10 value: 35.248000000000005 - type: ndcg_at_100 value: 39.933 - type: ndcg_at_1000 value: 42.616 - type: ndcg_at_3 value: 31.980999999999998 - type: ndcg_at_5 value: 33.583 - type: precision_at_1 value: 26.687 - type: precision_at_10 value: 5.445 - type: precision_at_100 value: 0.848 - type: precision_at_1000 value: 0.11499999999999999 - type: precision_at_3 value: 13.957 - type: precision_at_5 value: 9.479 - type: recall_at_1 value: 23.249 - type: recall_at_10 value: 45.005 - type: recall_at_100 value: 66.175 - type: recall_at_1000 value: 86.116 - type: recall_at_3 value: 36.03 - type: recall_at_5 value: 40.037 - type: map_at_1 value: 17.592 - type: map_at_10 value: 25.003999999999998 - type: map_at_100 value: 26.208 - type: map_at_1000 value: 26.333000000000002 - type: map_at_3 value: 22.479 - type: map_at_5 value: 23.712 - type: mrr_at_1 value: 21.37 - type: mrr_at_10 value: 28.951999999999998 - type: mrr_at_100 value: 29.915999999999997 - type: mrr_at_1000 value: 29.99 - type: mrr_at_3 value: 26.503 - type: mrr_at_5 value: 27.728 - type: ndcg_at_1 value: 21.37 - type: ndcg_at_10 value: 29.944 - type: ndcg_at_100 value: 35.632000000000005 - type: ndcg_at_1000 value: 38.393 - type: ndcg_at_3 value: 25.263999999999996 - type: ndcg_at_5 value: 27.115000000000002 - type: precision_at_1 value: 21.37 - type: precision_at_10 value: 5.568 - type: precision_at_100 value: 0.992 - type: precision_at_1000 value: 0.13999999999999999 - type: precision_at_3 value: 11.895 - type: precision_at_5 value: 8.61 - type: recall_at_1 value: 17.592 - type: recall_at_10 value: 40.976 - type: recall_at_100 value: 66.487 - type: recall_at_1000 value: 85.954 - type: recall_at_3 value: 27.797 - type: recall_at_5 value: 32.553 - type: map_at_1 value: 25.173000000000002 - type: map_at_10 value: 34.611999999999995 - type: map_at_100 value: 35.735 - type: map_at_1000 value: 35.842 - type: map_at_3 value: 31.345 - type: map_at_5 value: 33.123000000000005 - type: mrr_at_1 value: 29.570999999999998 - type: mrr_at_10 value: 38.775999999999996 - type: mrr_at_100 value: 39.621 - type: mrr_at_1000 value: 39.684000000000005 - type: mrr_at_3 value: 35.992000000000004 - type: mrr_at_5 value: 37.586999999999996 - type: ndcg_at_1 value: 29.570999999999998 - type: ndcg_at_10 value: 40.388000000000005 - type: ndcg_at_100 value: 45.59 - type: ndcg_at_1000 value: 47.948 - type: ndcg_at_3 value: 34.497 - type: ndcg_at_5 value: 37.201 - type: precision_at_1 value: 29.570999999999998 - type: precision_at_10 value: 6.931 - type: precision_at_100 value: 1.082 - type: precision_at_1000 value: 0.13999999999999999 - type: precision_at_3 value: 15.609 - type: precision_at_5 value: 11.286999999999999 - type: recall_at_1 value: 25.173000000000002 - type: recall_at_10 value: 53.949000000000005 - type: recall_at_100 value: 76.536 - type: recall_at_1000 value: 92.979 - type: recall_at_3 value: 37.987 - type: recall_at_5 value: 44.689 - type: map_at_1 value: 24.224 - type: map_at_10 value: 32.903 - type: map_at_100 value: 34.65 - type: map_at_1000 value: 34.873 - type: map_at_3 value: 29.673 - type: map_at_5 value: 31.361 - type: mrr_at_1 value: 30.435000000000002 - type: mrr_at_10 value: 38.677 - type: mrr_at_100 value: 39.805 - type: mrr_at_1000 value: 39.851 - type: mrr_at_3 value: 35.935 - type: mrr_at_5 value: 37.566 - type: ndcg_at_1 value: 30.435000000000002 - type: ndcg_at_10 value: 39.012 - type: ndcg_at_100 value: 45.553 - type: ndcg_at_1000 value: 47.919 - type: ndcg_at_3 value: 33.809 - type: ndcg_at_5 value: 36.120999999999995 - type: precision_at_1 value: 30.435000000000002 - type: precision_at_10 value: 7.628 - type: precision_at_100 value: 1.5810000000000002 - type: precision_at_1000 value: 0.243 - type: precision_at_3 value: 15.744 - type: precision_at_5 value: 11.66 - type: recall_at_1 value: 24.224 - type: recall_at_10 value: 50.009 - type: recall_at_100 value: 78.839 - type: recall_at_1000 value: 93.71300000000001 - type: recall_at_3 value: 35.512 - type: recall_at_5 value: 41.541 - type: map_at_1 value: 18.983 - type: map_at_10 value: 27.127000000000002 - type: map_at_100 value: 28.063 - type: map_at_1000 value: 28.17 - type: map_at_3 value: 24.306 - type: map_at_5 value: 25.784000000000002 - type: mrr_at_1 value: 20.518 - type: mrr_at_10 value: 29.024 - type: mrr_at_100 value: 29.902 - type: mrr_at_1000 value: 29.976999999999997 - type: mrr_at_3 value: 26.401999999999997 - type: mrr_at_5 value: 27.862 - type: ndcg_at_1 value: 20.518 - type: ndcg_at_10 value: 32.344 - type: ndcg_at_100 value: 37.053000000000004 - type: ndcg_at_1000 value: 39.798 - type: ndcg_at_3 value: 26.796999999999997 - type: ndcg_at_5 value: 29.293000000000003 - type: precision_at_1 value: 20.518 - type: precision_at_10 value: 5.434 - type: precision_at_100 value: 0.83 - type: precision_at_1000 value: 0.11800000000000001 - type: precision_at_3 value: 11.892 - type: precision_at_5 value: 8.577 - type: recall_at_1 value: 18.983 - type: recall_at_10 value: 46.665 - type: recall_at_100 value: 68.33399999999999 - type: recall_at_1000 value: 88.927 - type: recall_at_3 value: 31.608000000000004 - type: recall_at_5 value: 37.532 - task: type: Retrieval dataset: name: MTEB ClimateFEVER type: climate-fever config: default split: test revision: None metrics: - type: map_at_1 value: 11.200000000000001 - type: map_at_10 value: 20.241999999999997 - type: map_at_100 value: 22.357 - type: map_at_1000 value: 22.556 - type: map_at_3 value: 16.564999999999998 - type: map_at_5 value: 18.443 - type: mrr_at_1 value: 25.277 - type: mrr_at_10 value: 37.582 - type: mrr_at_100 value: 38.525999999999996 - type: mrr_at_1000 value: 38.564 - type: mrr_at_3 value: 33.898 - type: mrr_at_5 value: 36.191 - type: ndcg_at_1 value: 25.277 - type: ndcg_at_10 value: 28.74 - type: ndcg_at_100 value: 36.665 - type: ndcg_at_1000 value: 40.08 - type: ndcg_at_3 value: 22.888 - type: ndcg_at_5 value: 25.081999999999997 - type: precision_at_1 value: 25.277 - type: precision_at_10 value: 9.251 - type: precision_at_100 value: 1.773 - type: precision_at_1000 value: 0.241 - type: precision_at_3 value: 17.329 - type: precision_at_5 value: 13.746 - type: recall_at_1 value: 11.200000000000001 - type: recall_at_10 value: 35.419 - type: recall_at_100 value: 62.41 - type: recall_at_1000 value: 81.467 - type: recall_at_3 value: 21.275 - type: recall_at_5 value: 27.201999999999998 - task: type: Retrieval dataset: name: MTEB DBPedia type: dbpedia-entity config: default split: test revision: None metrics: - type: map_at_1 value: 9.396 - type: map_at_10 value: 20.735 - type: map_at_100 value: 30.098000000000003 - type: map_at_1000 value: 31.866 - type: map_at_3 value: 14.71 - type: map_at_5 value: 17.259 - type: mrr_at_1 value: 70.25 - type: mrr_at_10 value: 77.09700000000001 - type: mrr_at_100 value: 77.398 - type: mrr_at_1000 value: 77.40899999999999 - type: mrr_at_3 value: 75.542 - type: mrr_at_5 value: 76.354 - type: ndcg_at_1 value: 57.75 - type: ndcg_at_10 value: 42.509 - type: ndcg_at_100 value: 48.94 - type: ndcg_at_1000 value: 56.501000000000005 - type: ndcg_at_3 value: 46.827000000000005 - type: ndcg_at_5 value: 44.033 - type: precision_at_1 value: 70.25 - type: precision_at_10 value: 33.85 - type: precision_at_100 value: 11.373 - type: precision_at_1000 value: 2.136 - type: precision_at_3 value: 50.917 - type: precision_at_5 value: 42.8 - type: recall_at_1 value: 9.396 - type: recall_at_10 value: 26.472 - type: recall_at_100 value: 57.30800000000001 - type: recall_at_1000 value: 80.983 - type: recall_at_3 value: 15.859000000000002 - type: recall_at_5 value: 19.758 - task: type: Classification dataset: name: MTEB EmotionClassification type: mteb/emotion config: default split: test revision: 4f58c6b202a23cf9a4da393831edf4f9183cad37 metrics: - type: accuracy value: 54.900000000000006 - type: f1 value: 48.14707395235448 - task: type: Retrieval dataset: name: MTEB FEVER type: fever config: default split: test revision: None metrics: - type: map_at_1 value: 66.369 - type: map_at_10 value: 76.708 - type: map_at_100 value: 76.981 - type: map_at_1000 value: 76.995 - type: map_at_3 value: 75.114 - type: map_at_5 value: 76.116 - type: mrr_at_1 value: 71.557 - type: mrr_at_10 value: 80.95 - type: mrr_at_100 value: 81.075 - type: mrr_at_1000 value: 81.07900000000001 - type: mrr_at_3 value: 79.728 - type: mrr_at_5 value: 80.522 - type: ndcg_at_1 value: 71.557 - type: ndcg_at_10 value: 81.381 - type: ndcg_at_100 value: 82.421 - type: ndcg_at_1000 value: 82.709 - type: ndcg_at_3 value: 78.671 - type: ndcg_at_5 value: 80.17 - type: precision_at_1 value: 71.557 - type: precision_at_10 value: 10.159 - type: precision_at_100 value: 1.089 - type: precision_at_1000 value: 0.11299999999999999 - type: precision_at_3 value: 30.668 - type: precision_at_5 value: 19.337 - type: recall_at_1 value: 66.369 - type: recall_at_10 value: 91.482 - type: recall_at_100 value: 95.848 - type: recall_at_1000 value: 97.749 - type: recall_at_3 value: 84.185 - type: recall_at_5 value: 87.908 - task: type: Retrieval dataset: name: MTEB FiQA2018 type: fiqa config: default split: test revision: None metrics: - type: map_at_1 value: 20.902 - type: map_at_10 value: 34.554 - type: map_at_100 value: 36.632 - type: map_at_1000 value: 36.811 - type: map_at_3 value: 30.264000000000003 - type: map_at_5 value: 32.714999999999996 - type: mrr_at_1 value: 42.13 - type: mrr_at_10 value: 51.224000000000004 - type: mrr_at_100 value: 52.044999999999995 - type: mrr_at_1000 value: 52.075 - type: mrr_at_3 value: 48.842999999999996 - type: mrr_at_5 value: 50.108 - type: ndcg_at_1 value: 42.13 - type: ndcg_at_10 value: 42.643 - type: ndcg_at_100 value: 49.806 - type: ndcg_at_1000 value: 52.583 - type: ndcg_at_3 value: 38.927 - type: ndcg_at_5 value: 40.071 - type: precision_at_1 value: 42.13 - type: precision_at_10 value: 11.928999999999998 - type: precision_at_100 value: 1.931 - type: precision_at_1000 value: 0.243 - type: precision_at_3 value: 26.337 - type: precision_at_5 value: 19.29 - type: recall_at_1 value: 20.902 - type: recall_at_10 value: 49.527 - type: recall_at_100 value: 75.754 - type: recall_at_1000 value: 92.171 - type: recall_at_3 value: 35.024 - type: recall_at_5 value: 41.207 - task: type: Retrieval dataset: name: MTEB HotpotQA type: hotpotqa config: default split: test revision: None metrics: - type: map_at_1 value: 39.831 - type: map_at_10 value: 63.958999999999996 - type: map_at_100 value: 64.869 - type: map_at_1000 value: 64.924 - type: map_at_3 value: 60.25 - type: map_at_5 value: 62.572 - type: mrr_at_1 value: 79.662 - type: mrr_at_10 value: 85.57900000000001 - type: mrr_at_100 value: 85.744 - type: mrr_at_1000 value: 85.748 - type: mrr_at_3 value: 84.718 - type: mrr_at_5 value: 85.312 - type: ndcg_at_1 value: 79.662 - type: ndcg_at_10 value: 72.366 - type: ndcg_at_100 value: 75.42999999999999 - type: ndcg_at_1000 value: 76.469 - type: ndcg_at_3 value: 67.258 - type: ndcg_at_5 value: 70.14099999999999 - type: precision_at_1 value: 79.662 - type: precision_at_10 value: 15.254999999999999 - type: precision_at_100 value: 1.763 - type: precision_at_1000 value: 0.19 - type: precision_at_3 value: 43.358000000000004 - type: precision_at_5 value: 28.288999999999998 - type: recall_at_1 value: 39.831 - type: recall_at_10 value: 76.273 - type: recall_at_100 value: 88.163 - type: recall_at_1000 value: 95.017 - type: recall_at_3 value: 65.037 - type: recall_at_5 value: 70.722 - task: type: Classification dataset: name: MTEB ImdbClassification type: mteb/imdb config: default split: test revision: 3d86128a09e091d6018b6d26cad27f2739fc2db7 metrics: - type: accuracy value: 93.13879999999999 - type: ap value: 89.94638859649079 - type: f1 value: 93.13371537570421 - task: type: Retrieval dataset: name: MTEB MSMARCO type: msmarco config: default split: dev revision: None metrics: - type: map_at_1 value: 21.482 - type: map_at_10 value: 33.635999999999996 - type: map_at_100 value: 34.792 - type: map_at_1000 value: 34.839999999999996 - type: map_at_3 value: 29.553 - type: map_at_5 value: 31.892 - type: mrr_at_1 value: 22.076999999999998 - type: mrr_at_10 value: 34.247 - type: mrr_at_100 value: 35.337 - type: mrr_at_1000 value: 35.38 - type: mrr_at_3 value: 30.208000000000002 - type: mrr_at_5 value: 32.554 - type: ndcg_at_1 value: 22.092 - type: ndcg_at_10 value: 40.657 - type: ndcg_at_100 value: 46.251999999999995 - type: ndcg_at_1000 value: 47.466 - type: ndcg_at_3 value: 32.353 - type: ndcg_at_5 value: 36.532 - type: precision_at_1 value: 22.092 - type: precision_at_10 value: 6.5040000000000004 - type: precision_at_100 value: 0.9329999999999999 - type: precision_at_1000 value: 0.104 - type: precision_at_3 value: 13.719999999999999 - type: precision_at_5 value: 10.344000000000001 - type: recall_at_1 value: 21.482 - type: recall_at_10 value: 62.316 - type: recall_at_100 value: 88.283 - type: recall_at_1000 value: 97.554 - type: recall_at_3 value: 39.822 - type: recall_at_5 value: 49.805 - task: type: Classification dataset: name: MTEB MTOPDomainClassification (en) type: mteb/mtop_domain config: en split: test revision: d80d48c1eb48d3562165c59d59d0034df9fff0bf metrics: - type: accuracy value: 93.63657090743274 - type: f1 value: 93.49355466580484 - task: type: Classification dataset: name: MTEB MTOPIntentClassification (en) type: mteb/mtop_intent config: en split: test revision: ae001d0e6b1228650b7bd1c2c65fb50ad11a8aba metrics: - type: accuracy value: 66.01459188326493 - type: f1 value: 48.48386472180784 - task: type: Classification dataset: name: MTEB MassiveIntentClassification (en) type: mteb/amazon_massive_intent config: en split: test revision: 31efe3c427b0bae9c22cbb560b8f15491cc6bed7 metrics: - type: accuracy value: 73.49024882313383 - type: f1 value: 71.8750196914349 - task: type: Classification dataset: name: MTEB MassiveScenarioClassification (en) type: mteb/amazon_massive_scenario config: en split: test revision: 7d571f92784cd94a019292a1f45445077d0ef634 metrics: - type: accuracy value: 77.38063214525891 - type: f1 value: 76.87364042122763 - task: type: Clustering dataset: name: MTEB MedrxivClusteringP2P type: mteb/medrxiv-clustering-p2p config: default split: test revision: e7a26af6f3ae46b30dde8737f02c07b1505bcc73 metrics: - type: v_measure value: 34.30572302322684 - task: type: Clustering dataset: name: MTEB MedrxivClusteringS2S type: mteb/medrxiv-clustering-s2s config: default split: test revision: 35191c8c0dca72d8ff3efcd72aa802307d469663 metrics: - type: v_measure value: 32.18418556367587 - task: type: Reranking dataset: name: MTEB MindSmallReranking type: mteb/mind_small config: default split: test revision: 3bdac13927fdc888b903db93b2ffdbd90b295a69 metrics: - type: map value: 32.268707296386154 - type: mrr value: 33.481925531215055 - task: type: Retrieval dataset: name: MTEB NFCorpus type: nfcorpus config: default split: test revision: None metrics: - type: map_at_1 value: 6.586 - type: map_at_10 value: 14.954999999999998 - type: map_at_100 value: 19.03 - type: map_at_1000 value: 20.653 - type: map_at_3 value: 10.859 - type: map_at_5 value: 12.577 - type: mrr_at_1 value: 47.988 - type: mrr_at_10 value: 57.57 - type: mrr_at_100 value: 58.050000000000004 - type: mrr_at_1000 value: 58.083 - type: mrr_at_3 value: 55.212 - type: mrr_at_5 value: 56.713 - type: ndcg_at_1 value: 45.975 - type: ndcg_at_10 value: 38.432 - type: ndcg_at_100 value: 35.287 - type: ndcg_at_1000 value: 44.35 - type: ndcg_at_3 value: 43.077 - type: ndcg_at_5 value: 40.952 - type: precision_at_1 value: 47.368 - type: precision_at_10 value: 28.483000000000004 - type: precision_at_100 value: 8.882 - type: precision_at_1000 value: 2.217 - type: precision_at_3 value: 40.144000000000005 - type: precision_at_5 value: 35.17 - type: recall_at_1 value: 6.586 - type: recall_at_10 value: 19.688 - type: recall_at_100 value: 35.426 - type: recall_at_1000 value: 68.09100000000001 - type: recall_at_3 value: 12.234 - type: recall_at_5 value: 14.937000000000001 - task: type: Retrieval dataset: name: MTEB NQ type: nq config: default split: test revision: None metrics: - type: map_at_1 value: 27.322000000000003 - type: map_at_10 value: 43.224000000000004 - type: map_at_100 value: 44.275999999999996 - type: map_at_1000 value: 44.308 - type: map_at_3 value: 38.239000000000004 - type: map_at_5 value: 41.244 - type: mrr_at_1 value: 31.025000000000002 - type: mrr_at_10 value: 45.635 - type: mrr_at_100 value: 46.425 - type: mrr_at_1000 value: 46.445 - type: mrr_at_3 value: 41.42 - type: mrr_at_5 value: 44.038 - type: ndcg_at_1 value: 30.997000000000003 - type: ndcg_at_10 value: 51.55499999999999 - type: ndcg_at_100 value: 55.964999999999996 - type: ndcg_at_1000 value: 56.657000000000004 - type: ndcg_at_3 value: 42.185 - type: ndcg_at_5 value: 47.229 - type: precision_at_1 value: 30.997000000000003 - type: precision_at_10 value: 8.885 - type: precision_at_100 value: 1.1360000000000001 - type: precision_at_1000 value: 0.12 - type: precision_at_3 value: 19.457 - type: precision_at_5 value: 14.554 - type: recall_at_1 value: 27.322000000000003 - type: recall_at_10 value: 74.59400000000001 - type: recall_at_100 value: 93.699 - type: recall_at_1000 value: 98.76599999999999 - type: recall_at_3 value: 50.43 - type: recall_at_5 value: 62.073 - task: type: Retrieval dataset: name: MTEB QuoraRetrieval type: quora config: default split: test revision: None metrics: - type: map_at_1 value: 71.109 - type: map_at_10 value: 85.137 - type: map_at_100 value: 85.759 - type: map_at_1000 value: 85.774 - type: map_at_3 value: 82.25200000000001 - type: map_at_5 value: 84.031 - type: mrr_at_1 value: 82.01 - type: mrr_at_10 value: 87.97 - type: mrr_at_100 value: 88.076 - type: mrr_at_1000 value: 88.076 - type: mrr_at_3 value: 87.06 - type: mrr_at_5 value: 87.694 - type: ndcg_at_1 value: 81.99 - type: ndcg_at_10 value: 88.738 - type: ndcg_at_100 value: 89.928 - type: ndcg_at_1000 value: 90.01400000000001 - type: ndcg_at_3 value: 86.042 - type: ndcg_at_5 value: 87.505 - type: precision_at_1 value: 81.99 - type: precision_at_10 value: 13.468 - type: precision_at_100 value: 1.534 - type: precision_at_1000 value: 0.157 - type: precision_at_3 value: 37.702999999999996 - type: precision_at_5 value: 24.706 - type: recall_at_1 value: 71.109 - type: recall_at_10 value: 95.58 - type: recall_at_100 value: 99.62299999999999 - type: recall_at_1000 value: 99.98899999999999 - type: recall_at_3 value: 87.69 - type: recall_at_5 value: 91.982 - task: type: Clustering dataset: name: MTEB RedditClustering type: mteb/reddit-clustering config: default split: test revision: 24640382cdbf8abc73003fb0fa6d111a705499eb metrics: - type: v_measure value: 59.43361510023748 - task: type: Clustering dataset: name: MTEB RedditClusteringP2P type: mteb/reddit-clustering-p2p config: default split: test revision: 282350215ef01743dc01b456c7f5241fa8937f16 metrics: - type: v_measure value: 64.53582642500159 - task: type: Retrieval dataset: name: MTEB SCIDOCS type: scidocs config: default split: test revision: None metrics: - type: map_at_1 value: 4.2299999999999995 - type: map_at_10 value: 11.802 - type: map_at_100 value: 14.454 - type: map_at_1000 value: 14.865 - type: map_at_3 value: 7.911 - type: map_at_5 value: 9.912 - type: mrr_at_1 value: 21.0 - type: mrr_at_10 value: 32.722 - type: mrr_at_100 value: 33.989000000000004 - type: mrr_at_1000 value: 34.026 - type: mrr_at_3 value: 28.65 - type: mrr_at_5 value: 31.075000000000003 - type: ndcg_at_1 value: 21.0 - type: ndcg_at_10 value: 20.161 - type: ndcg_at_100 value: 30.122 - type: ndcg_at_1000 value: 36.399 - type: ndcg_at_3 value: 17.881 - type: ndcg_at_5 value: 16.439999999999998 - type: precision_at_1 value: 21.0 - type: precision_at_10 value: 10.94 - type: precision_at_100 value: 2.5340000000000003 - type: precision_at_1000 value: 0.402 - type: precision_at_3 value: 17.067 - type: precision_at_5 value: 15.120000000000001 - type: recall_at_1 value: 4.2299999999999995 - type: recall_at_10 value: 22.163 - type: recall_at_100 value: 51.42 - type: recall_at_1000 value: 81.652 - type: recall_at_3 value: 10.353 - type: recall_at_5 value: 15.323 - task: type: STS dataset: name: MTEB SICK-R type: mteb/sickr-sts config: default split: test revision: a6ea5a8cab320b040a23452cc28066d9beae2cee metrics: - type: cos_sim_pearson value: 86.44056731476951 - type: cos_sim_spearman value: 82.32974396072802 - type: euclidean_pearson value: 83.63616080755894 - type: euclidean_spearman value: 82.32974071069209 - type: manhattan_pearson value: 83.64149958303744 - type: manhattan_spearman value: 82.32161014878858 - task: type: STS dataset: name: MTEB STS12 type: mteb/sts12-sts config: default split: test revision: a0d554a64d88156834ff5ae9920b964011b16384 metrics: - type: cos_sim_pearson value: 85.65083720426293 - type: cos_sim_spearman value: 77.60786500521749 - type: euclidean_pearson value: 81.8149634918642 - type: euclidean_spearman value: 77.60637450428892 - type: manhattan_pearson value: 81.83507575657566 - type: manhattan_spearman value: 77.613220311151 - task: type: STS dataset: name: MTEB STS13 type: mteb/sts13-sts config: default split: test revision: 7e90230a92c190f1bf69ae9002b8cea547a64cca metrics: - type: cos_sim_pearson value: 87.35683624595698 - type: cos_sim_spearman value: 87.94550696434106 - type: euclidean_pearson value: 87.50272679030367 - type: euclidean_spearman value: 87.94550696434106 - type: manhattan_pearson value: 87.4759786099497 - type: manhattan_spearman value: 87.90226811166427 - task: type: STS dataset: name: MTEB STS14 type: mteb/sts14-sts config: default split: test revision: 6031580fec1f6af667f0bd2da0a551cf4f0b2375 metrics: - type: cos_sim_pearson value: 86.27438743391316 - type: cos_sim_spearman value: 83.85378984594779 - type: euclidean_pearson value: 85.25840635223642 - type: euclidean_spearman value: 83.85378983163673 - type: manhattan_pearson value: 85.24936075631025 - type: manhattan_spearman value: 83.85052479958138 - task: type: STS dataset: name: MTEB STS15 type: mteb/sts15-sts config: default split: test revision: ae752c7c21bf194d8b67fd573edf7ae58183cbe3 metrics: - type: cos_sim_pearson value: 87.4783814521557 - type: cos_sim_spearman value: 88.473284566453 - type: euclidean_pearson value: 87.94757741870404 - type: euclidean_spearman value: 88.47327698999878 - type: manhattan_pearson value: 87.93617414057984 - type: manhattan_spearman value: 88.45889274229359 - task: type: STS dataset: name: MTEB STS16 type: mteb/sts16-sts config: default split: test revision: 4d8694f8f0e0100860b497b999b3dbed754a0513 metrics: - type: cos_sim_pearson value: 84.68359147631057 - type: cos_sim_spearman value: 86.46426572535646 - type: euclidean_pearson value: 85.98303971468599 - type: euclidean_spearman value: 86.46426572535646 - type: manhattan_pearson value: 85.95109710640726 - type: manhattan_spearman value: 86.43282632541583 - task: type: STS dataset: name: MTEB STS17 (en-en) type: mteb/sts17-crosslingual-sts config: en-en split: test revision: af5e6fb845001ecf41f4c1e033ce921939a2a68d metrics: - type: cos_sim_pearson value: 88.88758959688604 - type: cos_sim_spearman value: 88.70384784133324 - type: euclidean_pearson value: 89.27293800474978 - type: euclidean_spearman value: 88.70384784133324 - type: manhattan_pearson value: 89.41494348093664 - type: manhattan_spearman value: 88.8330050824941 - task: type: STS dataset: name: MTEB STS22 (en) type: mteb/sts22-crosslingual-sts config: en split: test revision: 6d1ba47164174a496b7fa5d3569dae26a6813b80 metrics: - type: cos_sim_pearson value: 67.66759812551814 - type: cos_sim_spearman value: 68.02368115471576 - type: euclidean_pearson value: 69.52859542757353 - type: euclidean_spearman value: 68.02368115471576 - type: manhattan_pearson value: 69.50332399468952 - type: manhattan_spearman value: 67.91228681203849 - task: type: STS dataset: name: MTEB STSBenchmark type: mteb/stsbenchmark-sts config: default split: test revision: b0fddb56ed78048fa8b90373c8a3cfc37b684831 metrics: - type: cos_sim_pearson value: 87.75891320010409 - type: cos_sim_spearman value: 88.33063922402347 - type: euclidean_pearson value: 88.02964654543274 - type: euclidean_spearman value: 88.33063922402347 - type: manhattan_pearson value: 88.03029440701458 - type: manhattan_spearman value: 88.3158691488696 - task: type: Reranking dataset: name: MTEB SciDocsRR type: mteb/scidocs-reranking config: default split: test revision: d3c5e1fc0b855ab6097bf1cda04dd73947d7caab metrics: - type: map value: 87.46897310470844 - type: mrr value: 96.29042072669523 - task: type: Retrieval dataset: name: MTEB SciFact type: scifact config: default split: test revision: None metrics: - type: map_at_1 value: 62.261 - type: map_at_10 value: 71.023 - type: map_at_100 value: 71.5 - type: map_at_1000 value: 71.518 - type: map_at_3 value: 67.857 - type: map_at_5 value: 69.44500000000001 - type: mrr_at_1 value: 65.0 - type: mrr_at_10 value: 72.11 - type: mrr_at_100 value: 72.479 - type: mrr_at_1000 value: 72.49600000000001 - type: mrr_at_3 value: 69.722 - type: mrr_at_5 value: 71.02199999999999 - type: ndcg_at_1 value: 65.0 - type: ndcg_at_10 value: 75.40599999999999 - type: ndcg_at_100 value: 77.41 - type: ndcg_at_1000 value: 77.83200000000001 - type: ndcg_at_3 value: 69.95599999999999 - type: ndcg_at_5 value: 72.296 - type: precision_at_1 value: 65.0 - type: precision_at_10 value: 9.966999999999999 - type: precision_at_100 value: 1.097 - type: precision_at_1000 value: 0.11299999999999999 - type: precision_at_3 value: 26.667 - type: precision_at_5 value: 17.666999999999998 - type: recall_at_1 value: 62.261 - type: recall_at_10 value: 87.822 - type: recall_at_100 value: 96.833 - type: recall_at_1000 value: 100.0 - type: recall_at_3 value: 73.06099999999999 - type: recall_at_5 value: 78.88300000000001 - task: type: PairClassification dataset: name: MTEB SprintDuplicateQuestions type: mteb/sprintduplicatequestions-pairclassification config: default split: test revision: d66bd1f72af766a5cc4b0ca5e00c162f89e8cc46 metrics: - type: cos_sim_accuracy value: 99.86138613861387 - type: cos_sim_ap value: 96.7851799601876 - type: cos_sim_f1 value: 92.94354838709677 - type: cos_sim_precision value: 93.69918699186992 - type: cos_sim_recall value: 92.2 - type: dot_accuracy value: 99.86138613861387 - type: dot_ap value: 96.78517996018759 - type: dot_f1 value: 92.94354838709677 - type: dot_precision value: 93.69918699186992 - type: dot_recall value: 92.2 - type: euclidean_accuracy value: 99.86138613861387 - type: euclidean_ap value: 96.78517996018759 - type: euclidean_f1 value: 92.94354838709677 - type: euclidean_precision value: 93.69918699186992 - type: euclidean_recall value: 92.2 - type: manhattan_accuracy value: 99.86336633663366 - type: manhattan_ap value: 96.79790073128503 - type: manhattan_f1 value: 93.0930930930931 - type: manhattan_precision value: 93.18637274549098 - type: manhattan_recall value: 93.0 - type: max_accuracy value: 99.86336633663366 - type: max_ap value: 96.79790073128503 - type: max_f1 value: 93.0930930930931 - task: type: Clustering dataset: name: MTEB StackExchangeClustering type: mteb/stackexchange-clustering config: default split: test revision: 6cbc1f7b2bc0622f2e39d2c77fa502909748c259 metrics: - type: v_measure value: 65.07696952556874 - task: type: Clustering dataset: name: MTEB StackExchangeClusteringP2P type: mteb/stackexchange-clustering-p2p config: default split: test revision: 815ca46b2622cec33ccafc3735d572c266efdb44 metrics: - type: v_measure value: 35.51701116515262 - task: type: Reranking dataset: name: MTEB StackOverflowDupQuestions type: mteb/stackoverflowdupquestions-reranking config: default split: test revision: e185fbe320c72810689fc5848eb6114e1ef5ec69 metrics: - type: map value: 55.40099299306496 - type: mrr value: 56.411316420507596 - task: type: Summarization dataset: name: MTEB SummEval type: mteb/summeval config: default split: test revision: cda12ad7615edc362dbf25a00fdd61d3b1eaf93c metrics: - type: cos_sim_pearson value: 30.940008734510055 - type: cos_sim_spearman value: 31.606997026865212 - type: dot_pearson value: 30.940010256206353 - type: dot_spearman value: 31.62194110302714 - task: type: Retrieval dataset: name: MTEB TRECCOVID type: trec-covid config: default split: test revision: None metrics: - type: map_at_1 value: 0.197 - type: map_at_10 value: 1.6549999999999998 - type: map_at_100 value: 8.939 - type: map_at_1000 value: 22.402 - type: map_at_3 value: 0.587 - type: map_at_5 value: 0.931 - type: mrr_at_1 value: 74.0 - type: mrr_at_10 value: 84.667 - type: mrr_at_100 value: 84.667 - type: mrr_at_1000 value: 84.667 - type: mrr_at_3 value: 83.667 - type: mrr_at_5 value: 84.667 - type: ndcg_at_1 value: 69.0 - type: ndcg_at_10 value: 66.574 - type: ndcg_at_100 value: 51.074 - type: ndcg_at_1000 value: 47.263 - type: ndcg_at_3 value: 71.95 - type: ndcg_at_5 value: 70.52000000000001 - type: precision_at_1 value: 74.0 - type: precision_at_10 value: 70.39999999999999 - type: precision_at_100 value: 52.580000000000005 - type: precision_at_1000 value: 20.93 - type: precision_at_3 value: 76.667 - type: precision_at_5 value: 75.6 - type: recall_at_1 value: 0.197 - type: recall_at_10 value: 1.92 - type: recall_at_100 value: 12.655 - type: recall_at_1000 value: 44.522 - type: recall_at_3 value: 0.639 - type: recall_at_5 value: 1.03 - task: type: Retrieval dataset: name: MTEB Touche2020 type: webis-touche2020 config: default split: test revision: None metrics: - type: map_at_1 value: 1.735 - type: map_at_10 value: 9.064 - type: map_at_100 value: 15.021999999999998 - type: map_at_1000 value: 16.596 - type: map_at_3 value: 4.188 - type: map_at_5 value: 6.194999999999999 - type: mrr_at_1 value: 26.531 - type: mrr_at_10 value: 44.413000000000004 - type: mrr_at_100 value: 45.433 - type: mrr_at_1000 value: 45.452999999999996 - type: mrr_at_3 value: 41.497 - type: mrr_at_5 value: 42.925000000000004 - type: ndcg_at_1 value: 22.448999999999998 - type: ndcg_at_10 value: 22.597 - type: ndcg_at_100 value: 34.893 - type: ndcg_at_1000 value: 46.763 - type: ndcg_at_3 value: 24.366 - type: ndcg_at_5 value: 23.959 - type: precision_at_1 value: 26.531 - type: precision_at_10 value: 21.02 - type: precision_at_100 value: 7.51 - type: precision_at_1000 value: 1.541 - type: precision_at_3 value: 27.211000000000002 - type: precision_at_5 value: 25.306 - type: recall_at_1 value: 1.735 - type: recall_at_10 value: 15.870999999999999 - type: recall_at_100 value: 47.385 - type: recall_at_1000 value: 83.55 - type: recall_at_3 value: 5.813 - type: recall_at_5 value: 9.707 - task: type: Classification dataset: name: MTEB ToxicConversationsClassification type: mteb/toxic_conversations_50k config: default split: test revision: d7c0de2777da35d6aae2200a62c6e0e5af397c4c metrics: - type: accuracy value: 71.19 - type: ap value: 15.106812062408629 - type: f1 value: 55.254852511954255 - task: type: Classification dataset: name: MTEB TweetSentimentExtractionClassification type: mteb/tweet_sentiment_extraction config: default split: test revision: d604517c81ca91fe16a244d1248fc021f9ecee7a metrics: - type: accuracy value: 61.553480475382 - type: f1 value: 61.697424438626435 - task: type: Clustering dataset: name: MTEB TwentyNewsgroupsClustering type: mteb/twentynewsgroups-clustering config: default split: test revision: 6125ec4e24fa026cec8a478383ee943acfbd5449 metrics: - type: v_measure value: 53.12092298453447 - task: type: PairClassification dataset: name: MTEB TwitterSemEval2015 type: mteb/twittersemeval2015-pairclassification config: default split: test revision: 70970daeab8776df92f5ea462b6173c0b46fd2d1 metrics: - type: cos_sim_accuracy value: 87.35173153722357 - type: cos_sim_ap value: 78.22985044080261 - type: cos_sim_f1 value: 71.23356926188069 - type: cos_sim_precision value: 68.36487142163999 - type: cos_sim_recall value: 74.35356200527704 - type: dot_accuracy value: 87.35173153722357 - type: dot_ap value: 78.22985958574529 - type: dot_f1 value: 71.23356926188069 - type: dot_precision value: 68.36487142163999 - type: dot_recall value: 74.35356200527704 - type: euclidean_accuracy value: 87.35173153722357 - type: euclidean_ap value: 78.22985909816191 - type: euclidean_f1 value: 71.23356926188069 - type: euclidean_precision value: 68.36487142163999 - type: euclidean_recall value: 74.35356200527704 - type: manhattan_accuracy value: 87.36365261965786 - type: manhattan_ap value: 78.18108280854142 - type: manhattan_f1 value: 71.19958634953466 - type: manhattan_precision value: 69.79219462747086 - type: manhattan_recall value: 72.66490765171504 - type: max_accuracy value: 87.36365261965786 - type: max_ap value: 78.22985958574529 - type: max_f1 value: 71.23356926188069 - task: type: PairClassification dataset: name: MTEB TwitterURLCorpus type: mteb/twitterurlcorpus-pairclassification config: default split: test revision: 8b6510b0b1fa4e4c4f879467980e9be563ec1cdf metrics: - type: cos_sim_accuracy value: 88.71424690495596 - type: cos_sim_ap value: 85.53000600450122 - type: cos_sim_f1 value: 77.95508274231679 - type: cos_sim_precision value: 74.92189718829879 - type: cos_sim_recall value: 81.24422543886665 - type: dot_accuracy value: 88.71424690495596 - type: dot_ap value: 85.53000387261983 - type: dot_f1 value: 77.95508274231679 - type: dot_precision value: 74.92189718829879 - type: dot_recall value: 81.24422543886665 - type: euclidean_accuracy value: 88.71424690495596 - type: euclidean_ap value: 85.53000527321076 - type: euclidean_f1 value: 77.95508274231679 - type: euclidean_precision value: 74.92189718829879 - type: euclidean_recall value: 81.24422543886665 - type: manhattan_accuracy value: 88.7297706368611 - type: manhattan_ap value: 85.49670114967172 - type: manhattan_f1 value: 77.91265729089562 - type: manhattan_precision value: 75.01425313568986 - type: manhattan_recall value: 81.04404065291038 - type: max_accuracy value: 88.7297706368611 - type: max_ap value: 85.53000600450122 - type: max_f1 value: 77.95508274231679 --- # {MODEL_NAME} This is a [sentence-transformers](https://www.SBERT.net) model: It maps sentences & paragraphs to a 1024 dimensional dense vector space and can be used for tasks like clustering or semantic search. <!--- Describe your model here --> ## Usage (Sentence-Transformers) Using this model becomes easy when you have [sentence-transformers](https://www.SBERT.net) installed: ``` pip install -U sentence-transformers ``` Then you can use the model like this: ```python from sentence_transformers import SentenceTransformer sentences = ["This is an example sentence", "Each sentence is converted"] model = SentenceTransformer('{MODEL_NAME}') embeddings = model.encode(sentences) print(embeddings) ``` ## Evaluation Results <!--- Describe how your model was evaluated --> For an automated evaluation of this model, see the *Sentence Embeddings Benchmark*: [https://seb.sbert.net](https://seb.sbert.net?model_name={MODEL_NAME}) ## Training The model was trained with the parameters: **DataLoader**: `torch.utils.data.dataloader.DataLoader` of length 1196 with parameters: ``` {'batch_size': 10, 'sampler': 'torch.utils.data.sampler.SequentialSampler', 'batch_sampler': 'torch.utils.data.sampler.BatchSampler'} ``` **Loss**: `sentence_transformers.losses.MultipleNegativesRankingLoss.MultipleNegativesRankingLoss` with parameters: ``` {'scale': 20.0, 'similarity_fct': 'cos_sim'} ``` Parameters of the fit()-Method: ``` { "epochs": 5, "evaluation_steps": 50, "evaluator": "sentence_transformers.evaluation.InformationRetrievalEvaluator.InformationRetrievalEvaluator", "max_grad_norm": 1, "optimizer_class": "<class 'torch.optim.adamw.AdamW'>", "optimizer_params": { "lr": 2e-05 }, "scheduler": "WarmupLinear", "steps_per_epoch": null, "warmup_steps": 598, "weight_decay": 0.01 } ``` ## Full Model Architecture ``` SentenceTransformer( (0): Transformer({'max_seq_length': 512, 'do_lower_case': True}) with Transformer model: BertModel (1): Pooling({'word_embedding_dimension': 1024, 'pooling_mode_cls_token': True, 'pooling_mode_mean_tokens': False, 'pooling_mode_max_tokens': False, 'pooling_mode_mean_sqrt_len_tokens': False}) (2): Normalize() ) ``` ## Citing & Authors <!--- Describe where people can find more information -->
[ "SUMMARIZATION" ]
[ "BIOSSES", "SCIFACT" ]
google/Gemma-Embeddings-v1.0
google
null
[ "pytorch", "mteb", "en", "base_model:google/gemma-2-9b-it", "base_model:finetune:google/gemma-2-9b-it", "license:gemma", "model-index", "region:us" ]
2024-12-15T17:13:58
2024-12-16T21:46:31
132
123
--- base_model: - google/gemma-2-9b-it language: - en license: gemma tags: - mteb model-index: - name: google/Gemma-Embeddings-v1.0 results: - task: type: Classification dataset: name: MTEB AmazonCounterfactualClassification (en) type: mteb/amazon_counterfactual config: en split: test revision: e8379541af4e31359cca9fbcf4b00f2671dba205 metrics: - type: accuracy value: 94.6269 - type: f1 value: 91.9315 - type: f1_weighted value: 94.77029999999999 - type: ap value: 77.8258 - type: ap_weighted value: 77.8258 - type: main_score value: 94.6269 - task: type: Classification dataset: name: MTEB AmazonPolarityClassification (default) type: mteb/amazon_polarity config: default split: test revision: e2d317d38cd51312af73b3d32a06d1a08b442046 metrics: - type: accuracy value: 97.0382 - type: f1 value: 97.0377 - type: f1_weighted value: 97.0377 - type: ap value: 95.8721 - type: ap_weighted value: 95.8721 - type: main_score value: 97.0382 - task: type: Classification dataset: name: MTEB AmazonReviewsClassification (en) type: mteb/amazon_reviews_multi config: en split: test revision: 1399c76144fd37290681b995c656ef9b2e06e26d metrics: - type: accuracy value: 65.30799999999999 - type: f1 value: 64.4521 - type: f1_weighted value: 64.4521 - type: main_score value: 65.30799999999999 - task: type: Retrieval dataset: name: MTEB ArguAna (default) type: mteb/arguana config: default split: test revision: c22ab2a51041ffd869aaddef7af8d8215647e41a metrics: - type: ndcg_at_1 value: 69.844 - type: ndcg_at_3 value: 82.047 - type: ndcg_at_5 value: 83.734 - type: ndcg_at_10 value: 84.821 - type: ndcg_at_20 value: 85.051 - type: ndcg_at_100 value: 85.231 - type: ndcg_at_1000 value: 85.231 - type: map_at_1 value: 69.844 - type: map_at_3 value: 79.125 - type: map_at_5 value: 80.071 - type: map_at_10 value: 80.537 - type: map_at_20 value: 80.598 - type: map_at_100 value: 80.626 - type: map_at_1000 value: 80.626 - type: recall_at_1 value: 69.844 - type: recall_at_3 value: 90.469 - type: recall_at_5 value: 94.523 - type: recall_at_10 value: 97.795 - type: recall_at_20 value: 98.72 - type: recall_at_100 value: 99.644 - type: recall_at_1000 value: 99.644 - type: precision_at_1 value: 69.844 - type: precision_at_3 value: 30.156 - type: precision_at_5 value: 18.905 - type: precision_at_10 value: 9.78 - type: precision_at_20 value: 4.936 - type: precision_at_100 value: 0.996 - type: precision_at_1000 value: 0.1 - type: mrr_at_1 value: 70.0569 - type: mrr_at_3 value: 79.20819999999999 - type: mrr_at_5 value: 80.1541 - type: mrr_at_10 value: 80.6206 - type: mrr_at_20 value: 80.6819 - type: mrr_at_100 value: 80.7099 - type: mrr_at_1000 value: 80.7099 - type: nauc_ndcg_at_1_max value: 4.4853 - type: nauc_ndcg_at_1_std value: -32.4139 - type: nauc_ndcg_at_1_diff1 value: 54.961000000000006 - type: nauc_ndcg_at_3_max value: 10.9114 - type: nauc_ndcg_at_3_std value: -33.466499999999996 - type: nauc_ndcg_at_3_diff1 value: 50.809599999999996 - type: nauc_ndcg_at_5_max value: 8.2551 - type: nauc_ndcg_at_5_std value: -33.0891 - type: nauc_ndcg_at_5_diff1 value: 50.942600000000006 - type: nauc_ndcg_at_10_max value: 8.9955 - type: nauc_ndcg_at_10_std value: -33.372 - type: nauc_ndcg_at_10_diff1 value: 52.88420000000001 - type: nauc_ndcg_at_20_max value: 8.0304 - type: nauc_ndcg_at_20_std value: -33.2286 - type: nauc_ndcg_at_20_diff1 value: 52.56459999999999 - type: nauc_ndcg_at_100_max value: 7.7877 - type: nauc_ndcg_at_100_std value: -32.5506 - type: nauc_ndcg_at_100_diff1 value: 52.207800000000006 - type: nauc_ndcg_at_1000_max value: 7.7877 - type: nauc_ndcg_at_1000_std value: -32.5506 - type: nauc_ndcg_at_1000_diff1 value: 52.207800000000006 - type: nauc_map_at_1_max value: 4.4853 - type: nauc_map_at_1_std value: -32.4139 - type: nauc_map_at_1_diff1 value: 54.961000000000006 - type: nauc_map_at_3_max value: 8.5875 - type: nauc_map_at_3_std value: -33.1539 - type: nauc_map_at_3_diff1 value: 51.7761 - type: nauc_map_at_5_max value: 7.2324 - type: nauc_map_at_5_std value: -32.9639 - type: nauc_map_at_5_diff1 value: 51.9064 - type: nauc_map_at_10_max value: 7.4474 - type: nauc_map_at_10_std value: -33.0762 - type: nauc_map_at_10_diff1 value: 52.580400000000004 - type: nauc_map_at_20_max value: 7.2379999999999995 - type: nauc_map_at_20_std value: -33.056400000000004 - type: nauc_map_at_20_diff1 value: 52.5111 - type: nauc_map_at_100_max value: 7.210800000000001 - type: nauc_map_at_100_std value: -32.9841 - type: nauc_map_at_100_diff1 value: 52.469100000000005 - type: nauc_map_at_1000_max value: 7.210800000000001 - type: nauc_map_at_1000_std value: -32.9841 - type: nauc_map_at_1000_diff1 value: 52.469100000000005 - type: nauc_recall_at_1_max value: 4.4853 - type: nauc_recall_at_1_std value: -32.4139 - type: nauc_recall_at_1_diff1 value: 54.961000000000006 - type: nauc_recall_at_3_max value: 24.187 - type: nauc_recall_at_3_std value: -35.2013 - type: nauc_recall_at_3_diff1 value: 45.690599999999996 - type: nauc_recall_at_5_max value: 16.9677 - type: nauc_recall_at_5_std value: -34.041700000000006 - type: nauc_recall_at_5_diff1 value: 42.5248 - type: nauc_recall_at_10_max value: 43.9168 - type: nauc_recall_at_10_std value: -39.8657 - type: nauc_recall_at_10_diff1 value: 66.1909 - type: nauc_recall_at_20_max value: 29.317300000000003 - type: nauc_recall_at_20_std value: -37.4268 - type: nauc_recall_at_20_diff1 value: 62.67660000000001 - type: nauc_recall_at_100_max value: 37.0551 - type: nauc_recall_at_100_std value: 85.8517 - type: nauc_recall_at_100_diff1 value: 21.2768 - type: nauc_recall_at_1000_max value: 37.0551 - type: nauc_recall_at_1000_std value: 85.8517 - type: nauc_recall_at_1000_diff1 value: 21.2768 - type: nauc_precision_at_1_max value: 4.4853 - type: nauc_precision_at_1_std value: -32.4139 - type: nauc_precision_at_1_diff1 value: 54.961000000000006 - type: nauc_precision_at_3_max value: 24.187 - type: nauc_precision_at_3_std value: -35.2013 - type: nauc_precision_at_3_diff1 value: 45.690599999999996 - type: nauc_precision_at_5_max value: 16.9677 - type: nauc_precision_at_5_std value: -34.041700000000006 - type: nauc_precision_at_5_diff1 value: 42.5248 - type: nauc_precision_at_10_max value: 43.9168 - type: nauc_precision_at_10_std value: -39.8657 - type: nauc_precision_at_10_diff1 value: 66.1909 - type: nauc_precision_at_20_max value: 29.317300000000003 - type: nauc_precision_at_20_std value: -37.4268 - type: nauc_precision_at_20_diff1 value: 62.67660000000001 - type: nauc_precision_at_100_max value: 37.0551 - type: nauc_precision_at_100_std value: 85.8517 - type: nauc_precision_at_100_diff1 value: 21.2768 - type: nauc_precision_at_1000_max value: 37.0551 - type: nauc_precision_at_1000_std value: 85.8517 - type: nauc_precision_at_1000_diff1 value: 21.2768 - type: nauc_mrr_at_1_max value: 4.6327 - type: nauc_mrr_at_1_std value: -32.4116 - type: nauc_mrr_at_1_diff1 value: 54.4129 - type: nauc_mrr_at_3_max value: 8.6301 - type: nauc_mrr_at_3_std value: -33.264700000000005 - type: nauc_mrr_at_3_diff1 value: 51.452 - type: nauc_mrr_at_5_max value: 7.273899999999999 - type: nauc_mrr_at_5_std value: -33.0802 - type: nauc_mrr_at_5_diff1 value: 51.5652 - type: nauc_mrr_at_10_max value: 7.4876 - type: nauc_mrr_at_10_std value: -33.2021 - type: nauc_mrr_at_10_diff1 value: 52.2296 - type: nauc_mrr_at_20_max value: 7.277699999999999 - type: nauc_mrr_at_20_std value: -33.1827 - type: nauc_mrr_at_20_diff1 value: 52.15880000000001 - type: nauc_mrr_at_100_max value: 7.249999999999999 - type: nauc_mrr_at_100_std value: -33.110299999999995 - type: nauc_mrr_at_100_diff1 value: 52.1158 - type: nauc_mrr_at_1000_max value: 7.249999999999999 - type: nauc_mrr_at_1000_std value: -33.110299999999995 - type: nauc_mrr_at_1000_diff1 value: 52.1158 - type: main_score value: 84.821 - task: type: Clustering dataset: name: MTEB ArxivClusteringP2P (default) type: mteb/arxiv-clustering-p2p config: default split: test revision: a122ad7f3f0291bf49cc6f4d32aa80929df69d5d metrics: - type: v_measure value: 54.8264 - type: v_measure_std value: 14.505199999999999 - type: main_score value: 54.8264 - task: type: Clustering dataset: name: MTEB ArxivClusteringS2S (default) type: mteb/arxiv-clustering-s2s config: default split: test revision: f910caf1a6075f7329cdf8c1a6135696f37dbd53 metrics: - type: v_measure value: 50.022299999999994 - type: v_measure_std value: 14.4899 - type: main_score value: 50.022299999999994 - task: type: Reranking dataset: name: MTEB AskUbuntuDupQuestions (default) type: mteb/askubuntudupquestions-reranking config: default split: test revision: 2000358ca161889fa9c082cb41daa8dcfb161a54 metrics: - type: map value: 65.6388 - type: mrr value: 79.3677 - type: nAUC_map_max value: 31.682900000000004 - type: nAUC_map_std value: 20.0437 - type: nAUC_map_diff1 value: 8.7821 - type: nAUC_mrr_max value: 44.033 - type: nAUC_mrr_std value: 33.0875 - type: nAUC_mrr_diff1 value: 17.7949 - type: main_score value: 65.6388 - task: type: STS dataset: name: MTEB BIOSSES (default) type: mteb/biosses-sts config: default split: test revision: d3fb88f8f02e40887cd149695127462bbcf29b4a metrics: - type: pearson value: 89.9755 - type: spearman value: 89.8099 - type: cosine_pearson value: 89.9755 - type: cosine_spearman value: 89.8099 - type: manhattan_pearson value: 87.7735 - type: manhattan_spearman value: 89.57310000000001 - type: euclidean_pearson value: 87.708 - type: euclidean_spearman value: 89.8099 - type: main_score value: 89.8099 - task: type: Classification dataset: name: MTEB Banking77Classification (default) type: mteb/banking77 config: default split: test revision: 0fd18e25b25c072e09e0d92ab615fda904d66300 metrics: - type: accuracy value: 93.16879999999999 - type: f1 value: 93.1524 - type: f1_weighted value: 93.1524 - type: main_score value: 93.16879999999999 - task: type: Clustering dataset: name: MTEB BiorxivClusteringP2P (default) type: mteb/biorxiv-clustering-p2p config: default split: test revision: 65b79d1d13f80053f67aca9498d9402c2d9f1f40 metrics: - type: v_measure value: 54.024499999999996 - type: v_measure_std value: 1.0512000000000001 - type: main_score value: 54.024499999999996 - task: type: Clustering dataset: name: MTEB BiorxivClusteringS2S (default) type: mteb/biorxiv-clustering-s2s config: default split: test revision: 258694dd0231531bc1fd9de6ceb52a0853c6d908 metrics: - type: v_measure value: 50.925799999999995 - type: v_measure_std value: 1.024 - type: main_score value: 50.925799999999995 - task: type: Retrieval dataset: name: MTEB CQADupstackAndroidRetrieval (default) type: mteb/cqadupstack-android config: default split: test revision: f46a197baaae43b4f621051089b82a364682dfeb metrics: - type: ndcg_at_1 value: 47.067 - type: ndcg_at_3 value: 53.561 - type: ndcg_at_5 value: 56.497 - type: ndcg_at_10 value: 59.916000000000004 - type: ndcg_at_20 value: 61.814 - type: ndcg_at_100 value: 64.34 - type: ndcg_at_1000 value: 65.45299999999999 - type: map_at_1 value: 38.668 - type: map_at_3 value: 47.897 - type: map_at_5 value: 50.56699999999999 - type: map_at_10 value: 52.737 - type: map_at_20 value: 53.581 - type: map_at_100 value: 54.254000000000005 - type: map_at_1000 value: 54.339000000000006 - type: recall_at_1 value: 38.668 - type: recall_at_3 value: 56.269999999999996 - type: recall_at_5 value: 64.259 - type: recall_at_10 value: 74.05199999999999 - type: recall_at_20 value: 80.569 - type: recall_at_100 value: 91.43900000000001 - type: recall_at_1000 value: 98.257 - type: precision_at_1 value: 47.067 - type: precision_at_3 value: 25.799 - type: precision_at_5 value: 18.826999999999998 - type: precision_at_10 value: 11.716999999999999 - type: precision_at_20 value: 6.81 - type: precision_at_100 value: 1.7579999999999998 - type: precision_at_1000 value: 0.208 - type: mrr_at_1 value: 47.0672 - type: mrr_at_3 value: 55.555600000000005 - type: mrr_at_5 value: 57.257999999999996 - type: mrr_at_10 value: 58.383300000000006 - type: mrr_at_20 value: 58.7298 - type: mrr_at_100 value: 58.9092 - type: mrr_at_1000 value: 58.93659999999999 - type: nauc_ndcg_at_1_max value: 32.7003 - type: nauc_ndcg_at_1_std value: -9.8787 - type: nauc_ndcg_at_1_diff1 value: 53.543 - type: nauc_ndcg_at_3_max value: 32.6312 - type: nauc_ndcg_at_3_std value: -8.7036 - type: nauc_ndcg_at_3_diff1 value: 52.727599999999995 - type: nauc_ndcg_at_5_max value: 32.8095 - type: nauc_ndcg_at_5_std value: -6.3161 - type: nauc_ndcg_at_5_diff1 value: 51.804399999999994 - type: nauc_ndcg_at_10_max value: 32.1689 - type: nauc_ndcg_at_10_std value: -8.144 - type: nauc_ndcg_at_10_diff1 value: 51.0188 - type: nauc_ndcg_at_20_max value: 32.5967 - type: nauc_ndcg_at_20_std value: -7.3793 - type: nauc_ndcg_at_20_diff1 value: 51.698100000000004 - type: nauc_ndcg_at_100_max value: 33.3382 - type: nauc_ndcg_at_100_std value: -6.383900000000001 - type: nauc_ndcg_at_100_diff1 value: 51.465700000000005 - type: nauc_ndcg_at_1000_max value: 32.7849 - type: nauc_ndcg_at_1000_std value: -7.0913 - type: nauc_ndcg_at_1000_diff1 value: 51.4944 - type: nauc_map_at_1_max value: 26.2488 - type: nauc_map_at_1_std value: -11.1918 - type: nauc_map_at_1_diff1 value: 55.12629999999999 - type: nauc_map_at_3_max value: 30.157600000000002 - type: nauc_map_at_3_std value: -10.802299999999999 - type: nauc_map_at_3_diff1 value: 54.13440000000001 - type: nauc_map_at_5_max value: 31.088500000000003 - type: nauc_map_at_5_std value: -9.0711 - type: nauc_map_at_5_diff1 value: 53.729000000000006 - type: nauc_map_at_10_max value: 31.3165 - type: nauc_map_at_10_std value: -9.6771 - type: nauc_map_at_10_diff1 value: 53.3998 - type: nauc_map_at_20_max value: 31.5896 - type: nauc_map_at_20_std value: -9.163499999999999 - type: nauc_map_at_20_diff1 value: 53.436499999999995 - type: nauc_map_at_100_max value: 31.7416 - type: nauc_map_at_100_std value: -8.9088 - type: nauc_map_at_100_diff1 value: 53.213699999999996 - type: nauc_map_at_1000_max value: 31.7308 - type: nauc_map_at_1000_std value: -8.9222 - type: nauc_map_at_1000_diff1 value: 53.1991 - type: nauc_recall_at_1_max value: 26.2488 - type: nauc_recall_at_1_std value: -11.1918 - type: nauc_recall_at_1_diff1 value: 55.12629999999999 - type: nauc_recall_at_3_max value: 29.987799999999996 - type: nauc_recall_at_3_std value: -8.8979 - type: nauc_recall_at_3_diff1 value: 50.1606 - type: nauc_recall_at_5_max value: 30.7548 - type: nauc_recall_at_5_std value: -2.5221 - type: nauc_recall_at_5_diff1 value: 46.5351 - type: nauc_recall_at_10_max value: 27.4456 - type: nauc_recall_at_10_std value: -7.7719 - type: nauc_recall_at_10_diff1 value: 41.0327 - type: nauc_recall_at_20_max value: 30.598799999999997 - type: nauc_recall_at_20_std value: -0.7229 - type: nauc_recall_at_20_diff1 value: 43.335499999999996 - type: nauc_recall_at_100_max value: 44.4764 - type: nauc_recall_at_100_std value: 20.4865 - type: nauc_recall_at_100_diff1 value: 42.634100000000004 - type: nauc_recall_at_1000_max value: 44.5522 - type: nauc_recall_at_1000_std value: 53.301 - type: nauc_recall_at_1000_diff1 value: 39.488 - type: nauc_precision_at_1_max value: 32.7003 - type: nauc_precision_at_1_std value: -9.8787 - type: nauc_precision_at_1_diff1 value: 53.543 - type: nauc_precision_at_3_max value: 30.4913 - type: nauc_precision_at_3_std value: -2.7105 - type: nauc_precision_at_3_diff1 value: 28.8688 - type: nauc_precision_at_5_max value: 25.876900000000003 - type: nauc_precision_at_5_std value: 4.6525 - type: nauc_precision_at_5_diff1 value: 16.154 - type: nauc_precision_at_10_max value: 17.2851 - type: nauc_precision_at_10_std value: 4.2126 - type: nauc_precision_at_10_diff1 value: 2.6613 - type: nauc_precision_at_20_max value: 10.5899 - type: nauc_precision_at_20_std value: 6.668699999999999 - type: nauc_precision_at_20_diff1 value: -6.13 - type: nauc_precision_at_100_max value: 1.0815 - type: nauc_precision_at_100_std value: 7.1370000000000005 - type: nauc_precision_at_100_diff1 value: -17.5759 - type: nauc_precision_at_1000_max value: -5.915 - type: nauc_precision_at_1000_std value: 1.6254000000000002 - type: nauc_precision_at_1000_diff1 value: -21.4134 - type: nauc_mrr_at_1_max value: 32.7003 - type: nauc_mrr_at_1_std value: -9.8787 - type: nauc_mrr_at_1_diff1 value: 53.543 - type: nauc_mrr_at_3_max value: 33.9338 - type: nauc_mrr_at_3_std value: -7.9868999999999994 - type: nauc_mrr_at_3_diff1 value: 52.6479 - type: nauc_mrr_at_5_max value: 33.9982 - type: nauc_mrr_at_5_std value: -6.827500000000001 - type: nauc_mrr_at_5_diff1 value: 51.5701 - type: nauc_mrr_at_10_max value: 33.3568 - type: nauc_mrr_at_10_std value: -7.606300000000001 - type: nauc_mrr_at_10_diff1 value: 51.202400000000004 - type: nauc_mrr_at_20_max value: 33.4329 - type: nauc_mrr_at_20_std value: -7.5066 - type: nauc_mrr_at_20_diff1 value: 51.4203 - type: nauc_mrr_at_100_max value: 33.508700000000005 - type: nauc_mrr_at_100_std value: -7.455100000000001 - type: nauc_mrr_at_100_diff1 value: 51.442699999999995 - type: nauc_mrr_at_1000_max value: 33.4885 - type: nauc_mrr_at_1000_std value: -7.474200000000001 - type: nauc_mrr_at_1000_diff1 value: 51.4415 - type: main_score value: 59.916000000000004 - task: type: Retrieval dataset: name: MTEB CQADupstackEnglishRetrieval (default) type: mteb/cqadupstack-english config: default split: test revision: ad9991cb51e31e31e430383c75ffb2885547b5f0 metrics: - type: ndcg_at_1 value: 50.127 - type: ndcg_at_3 value: 55.615 - type: ndcg_at_5 value: 57.462 - type: ndcg_at_10 value: 59.40800000000001 - type: ndcg_at_20 value: 60.870000000000005 - type: ndcg_at_100 value: 63.321000000000005 - type: ndcg_at_1000 value: 64.716 - type: map_at_1 value: 39.722 - type: map_at_3 value: 49.721 - type: map_at_5 value: 51.762 - type: map_at_10 value: 53.205999999999996 - type: map_at_20 value: 53.989 - type: map_at_100 value: 54.675 - type: map_at_1000 value: 54.791000000000004 - type: recall_at_1 value: 39.722 - type: recall_at_3 value: 57.428999999999995 - type: recall_at_5 value: 63.041000000000004 - type: recall_at_10 value: 69.61 - type: recall_at_20 value: 74.785 - type: recall_at_100 value: 85.83 - type: recall_at_1000 value: 94.017 - type: precision_at_1 value: 50.127 - type: precision_at_3 value: 27.389000000000003 - type: precision_at_5 value: 19.223000000000003 - type: precision_at_10 value: 11.286999999999999 - type: precision_at_20 value: 6.551 - type: precision_at_100 value: 1.7239999999999998 - type: precision_at_1000 value: 0.211 - type: mrr_at_1 value: 50.1274 - type: mrr_at_3 value: 57.6539 - type: mrr_at_5 value: 58.695299999999996 - type: mrr_at_10 value: 59.3822 - type: mrr_at_20 value: 59.689899999999994 - type: mrr_at_100 value: 59.9139 - type: mrr_at_1000 value: 59.9405 - type: nauc_ndcg_at_1_max value: 40.3466 - type: nauc_ndcg_at_1_std value: -13.8013 - type: nauc_ndcg_at_1_diff1 value: 57.7384 - type: nauc_ndcg_at_3_max value: 44.8558 - type: nauc_ndcg_at_3_std value: -17.1649 - type: nauc_ndcg_at_3_diff1 value: 56.2083 - type: nauc_ndcg_at_5_max value: 45.3495 - type: nauc_ndcg_at_5_std value: -17.1787 - type: nauc_ndcg_at_5_diff1 value: 55.2946 - type: nauc_ndcg_at_10_max value: 45.5771 - type: nauc_ndcg_at_10_std value: -17.194200000000002 - type: nauc_ndcg_at_10_diff1 value: 55.22899999999999 - type: nauc_ndcg_at_20_max value: 46.1671 - type: nauc_ndcg_at_20_std value: -15.8971 - type: nauc_ndcg_at_20_diff1 value: 55.4699 - type: nauc_ndcg_at_100_max value: 46.665600000000005 - type: nauc_ndcg_at_100_std value: -14.2615 - type: nauc_ndcg_at_100_diff1 value: 55.521699999999996 - type: nauc_ndcg_at_1000_max value: 46.5416 - type: nauc_ndcg_at_1000_std value: -13.414100000000001 - type: nauc_ndcg_at_1000_diff1 value: 55.6847 - type: nauc_map_at_1_max value: 32.0258 - type: nauc_map_at_1_std value: -23.0677 - type: nauc_map_at_1_diff1 value: 58.34819999999999 - type: nauc_map_at_3_max value: 39.663199999999996 - type: nauc_map_at_3_std value: -23.261599999999998 - type: nauc_map_at_3_diff1 value: 56.930400000000006 - type: nauc_map_at_5_max value: 41.2777 - type: nauc_map_at_5_std value: -21.776200000000003 - type: nauc_map_at_5_diff1 value: 56.3832 - type: nauc_map_at_10_max value: 42.4307 - type: nauc_map_at_10_std value: -20.6698 - type: nauc_map_at_10_diff1 value: 56.3658 - type: nauc_map_at_20_max value: 43.146 - type: nauc_map_at_20_std value: -19.5408 - type: nauc_map_at_20_diff1 value: 56.432300000000005 - type: nauc_map_at_100_max value: 43.6798 - type: nauc_map_at_100_std value: -18.5361 - type: nauc_map_at_100_diff1 value: 56.4087 - type: nauc_map_at_1000_max value: 43.7037 - type: nauc_map_at_1000_std value: -18.3693 - type: nauc_map_at_1000_diff1 value: 56.4316 - type: nauc_recall_at_1_max value: 32.0258 - type: nauc_recall_at_1_std value: -23.0677 - type: nauc_recall_at_1_diff1 value: 58.34819999999999 - type: nauc_recall_at_3_max value: 41.504400000000004 - type: nauc_recall_at_3_std value: -23.471 - type: nauc_recall_at_3_diff1 value: 53.0711 - type: nauc_recall_at_5_max value: 43.6923 - type: nauc_recall_at_5_std value: -21.831999999999997 - type: nauc_recall_at_5_diff1 value: 50.1672 - type: nauc_recall_at_10_max value: 45.426100000000005 - type: nauc_recall_at_10_std value: -20.4132 - type: nauc_recall_at_10_diff1 value: 48.4065 - type: nauc_recall_at_20_max value: 49.0579 - type: nauc_recall_at_20_std value: -14.5552 - type: nauc_recall_at_20_diff1 value: 48.341499999999996 - type: nauc_recall_at_100_max value: 54.8657 - type: nauc_recall_at_100_std value: 0.1297 - type: nauc_recall_at_100_diff1 value: 46.576699999999995 - type: nauc_recall_at_1000_max value: 65.1502 - type: nauc_recall_at_1000_std value: 28.880699999999997 - type: nauc_recall_at_1000_diff1 value: 47.525099999999995 - type: nauc_precision_at_1_max value: 40.3466 - type: nauc_precision_at_1_std value: -13.8013 - type: nauc_precision_at_1_diff1 value: 57.7384 - type: nauc_precision_at_3_max value: 40.9044 - type: nauc_precision_at_3_std value: 3.1708 - type: nauc_precision_at_3_diff1 value: 27.9298 - type: nauc_precision_at_5_max value: 36.598000000000006 - type: nauc_precision_at_5_std value: 12.392 - type: nauc_precision_at_5_diff1 value: 15.7846 - type: nauc_precision_at_10_max value: 31.3687 - type: nauc_precision_at_10_std value: 20.7438 - type: nauc_precision_at_10_diff1 value: 6.7331 - type: nauc_precision_at_20_max value: 26.1811 - type: nauc_precision_at_20_std value: 28.4518 - type: nauc_precision_at_20_diff1 value: 0.30010000000000003 - type: nauc_precision_at_100_max value: 16.9808 - type: nauc_precision_at_100_std value: 38.7882 - type: nauc_precision_at_100_diff1 value: -8.8537 - type: nauc_precision_at_1000_max value: 7.2884 - type: nauc_precision_at_1000_std value: 39.2072 - type: nauc_precision_at_1000_diff1 value: -13.0202 - type: nauc_mrr_at_1_max value: 40.3466 - type: nauc_mrr_at_1_std value: -13.8013 - type: nauc_mrr_at_1_diff1 value: 57.7384 - type: nauc_mrr_at_3_max value: 45.2742 - type: nauc_mrr_at_3_std value: -12.6802 - type: nauc_mrr_at_3_diff1 value: 56.8512 - type: nauc_mrr_at_5_max value: 45.3012 - type: nauc_mrr_at_5_std value: -12.7147 - type: nauc_mrr_at_5_diff1 value: 56.2424 - type: nauc_mrr_at_10_max value: 45.1963 - type: nauc_mrr_at_10_std value: -12.7254 - type: nauc_mrr_at_10_diff1 value: 56.119699999999995 - type: nauc_mrr_at_20_max value: 45.2288 - type: nauc_mrr_at_20_std value: -12.5913 - type: nauc_mrr_at_20_diff1 value: 56.1426 - type: nauc_mrr_at_100_max value: 45.2468 - type: nauc_mrr_at_100_std value: -12.496500000000001 - type: nauc_mrr_at_100_diff1 value: 56.1812 - type: nauc_mrr_at_1000_max value: 45.2427 - type: nauc_mrr_at_1000_std value: -12.4903 - type: nauc_mrr_at_1000_diff1 value: 56.189299999999996 - type: main_score value: 59.40800000000001 - task: type: Retrieval dataset: name: MTEB CQADupstackGamingRetrieval (default) type: mteb/cqadupstack-gaming config: default split: test revision: 4885aa143210c98657558c04aaf3dc47cfb54340 metrics: - type: ndcg_at_1 value: 53.856 - type: ndcg_at_3 value: 62.492000000000004 - type: ndcg_at_5 value: 65.41 - type: ndcg_at_10 value: 68.134 - type: ndcg_at_20 value: 69.646 - type: ndcg_at_100 value: 71.184 - type: ndcg_at_1000 value: 71.663 - type: map_at_1 value: 47.236 - type: map_at_3 value: 58.36 - type: map_at_5 value: 60.484 - type: map_at_10 value: 61.978 - type: map_at_20 value: 62.57899999999999 - type: map_at_100 value: 62.900999999999996 - type: map_at_1000 value: 62.929 - type: recall_at_1 value: 47.236 - type: recall_at_3 value: 68.065 - type: recall_at_5 value: 75.155 - type: recall_at_10 value: 82.80499999999999 - type: recall_at_20 value: 88.215 - type: recall_at_100 value: 95.332 - type: recall_at_1000 value: 98.614 - type: precision_at_1 value: 53.856 - type: precision_at_3 value: 27.941 - type: precision_at_5 value: 19.034000000000002 - type: precision_at_10 value: 10.821 - type: precision_at_20 value: 5.947 - type: precision_at_100 value: 1.325 - type: precision_at_1000 value: 0.13899999999999998 - type: mrr_at_1 value: 53.855799999999995 - type: mrr_at_3 value: 62.6541 - type: mrr_at_5 value: 64.1243 - type: mrr_at_10 value: 65.084 - type: mrr_at_20 value: 65.3879 - type: mrr_at_100 value: 65.5377 - type: mrr_at_1000 value: 65.5496 - type: nauc_ndcg_at_1_max value: 33.8654 - type: nauc_ndcg_at_1_std value: -5.9437999999999995 - type: nauc_ndcg_at_1_diff1 value: 56.8669 - type: nauc_ndcg_at_3_max value: 34.058899999999994 - type: nauc_ndcg_at_3_std value: -5.7744 - type: nauc_ndcg_at_3_diff1 value: 52.8014 - type: nauc_ndcg_at_5_max value: 35.2914 - type: nauc_ndcg_at_5_std value: -4.482 - type: nauc_ndcg_at_5_diff1 value: 52.4343 - type: nauc_ndcg_at_10_max value: 36.458600000000004 - type: nauc_ndcg_at_10_std value: -2.3942 - type: nauc_ndcg_at_10_diff1 value: 52.9498 - type: nauc_ndcg_at_20_max value: 36.6183 - type: nauc_ndcg_at_20_std value: -1.8568000000000002 - type: nauc_ndcg_at_20_diff1 value: 52.5903 - type: nauc_ndcg_at_100_max value: 37.0184 - type: nauc_ndcg_at_100_std value: -0.7801 - type: nauc_ndcg_at_100_diff1 value: 53.011399999999995 - type: nauc_ndcg_at_1000_max value: 36.6608 - type: nauc_ndcg_at_1000_std value: -1.3958 - type: nauc_ndcg_at_1000_diff1 value: 53.0578 - type: nauc_map_at_1_max value: 25.787599999999998 - type: nauc_map_at_1_std value: -10.3742 - type: nauc_map_at_1_diff1 value: 56.4662 - type: nauc_map_at_3_max value: 31.4446 - type: nauc_map_at_3_std value: -8.140799999999999 - type: nauc_map_at_3_diff1 value: 53.8682 - type: nauc_map_at_5_max value: 32.8035 - type: nauc_map_at_5_std value: -6.8225999999999996 - type: nauc_map_at_5_diff1 value: 53.5451 - type: nauc_map_at_10_max value: 33.7173 - type: nauc_map_at_10_std value: -5.5325 - type: nauc_map_at_10_diff1 value: 53.6678 - type: nauc_map_at_20_max value: 34.2438 - type: nauc_map_at_20_std value: -4.8891 - type: nauc_map_at_20_diff1 value: 53.656000000000006 - type: nauc_map_at_100_max value: 34.473 - type: nauc_map_at_100_std value: -4.5106 - type: nauc_map_at_100_diff1 value: 53.7077 - type: nauc_map_at_1000_max value: 34.476600000000005 - type: nauc_map_at_1000_std value: -4.517 - type: nauc_map_at_1000_diff1 value: 53.7143 - type: nauc_recall_at_1_max value: 25.787599999999998 - type: nauc_recall_at_1_std value: -10.3742 - type: nauc_recall_at_1_diff1 value: 56.4662 - type: nauc_recall_at_3_max value: 32.044200000000004 - type: nauc_recall_at_3_std value: -7.696400000000001 - type: nauc_recall_at_3_diff1 value: 48.9202 - type: nauc_recall_at_5_max value: 34.389199999999995 - type: nauc_recall_at_5_std value: -4.2582 - type: nauc_recall_at_5_diff1 value: 46.0109 - type: nauc_recall_at_10_max value: 39.5274 - type: nauc_recall_at_10_std value: 3.9919999999999995 - type: nauc_recall_at_10_diff1 value: 46.383 - type: nauc_recall_at_20_max value: 43.5902 - type: nauc_recall_at_20_std value: 9.3885 - type: nauc_recall_at_20_diff1 value: 42.6035 - type: nauc_recall_at_100_max value: 61.5485 - type: nauc_recall_at_100_std value: 41.3982 - type: nauc_recall_at_100_diff1 value: 44.1753 - type: nauc_recall_at_1000_max value: 71.4815 - type: nauc_recall_at_1000_std value: 57.354400000000005 - type: nauc_recall_at_1000_diff1 value: 34.8468 - type: nauc_precision_at_1_max value: 33.8654 - type: nauc_precision_at_1_std value: -5.9437999999999995 - type: nauc_precision_at_1_diff1 value: 56.8669 - type: nauc_precision_at_3_max value: 33.655 - type: nauc_precision_at_3_std value: 7.826099999999999 - type: nauc_precision_at_3_diff1 value: 24.9975 - type: nauc_precision_at_5_max value: 32.9241 - type: nauc_precision_at_5_std value: 15.4324 - type: nauc_precision_at_5_diff1 value: 14.079 - type: nauc_precision_at_10_max value: 31.067600000000002 - type: nauc_precision_at_10_std value: 24.4877 - type: nauc_precision_at_10_diff1 value: 3.3716999999999997 - type: nauc_precision_at_20_max value: 28.786299999999997 - type: nauc_precision_at_20_std value: 29.323300000000003 - type: nauc_precision_at_20_diff1 value: -4.0988 - type: nauc_precision_at_100_max value: 23.4199 - type: nauc_precision_at_100_std value: 33.4154 - type: nauc_precision_at_100_diff1 value: -11.519400000000001 - type: nauc_precision_at_1000_max value: 19.2315 - type: nauc_precision_at_1000_std value: 31.391999999999996 - type: nauc_precision_at_1000_diff1 value: -14.5617 - type: nauc_mrr_at_1_max value: 33.8654 - type: nauc_mrr_at_1_std value: -5.9437999999999995 - type: nauc_mrr_at_1_diff1 value: 56.8669 - type: nauc_mrr_at_3_max value: 35.8396 - type: nauc_mrr_at_3_std value: -3.4635 - type: nauc_mrr_at_3_diff1 value: 53.6524 - type: nauc_mrr_at_5_max value: 36.0956 - type: nauc_mrr_at_5_std value: -3.0328999999999997 - type: nauc_mrr_at_5_diff1 value: 53.4449 - type: nauc_mrr_at_10_max value: 36.3936 - type: nauc_mrr_at_10_std value: -2.5233 - type: nauc_mrr_at_10_diff1 value: 53.858399999999996 - type: nauc_mrr_at_20_max value: 36.2638 - type: nauc_mrr_at_20_std value: -2.6908000000000003 - type: nauc_mrr_at_20_diff1 value: 53.805099999999996 - type: nauc_mrr_at_100_max value: 36.2945 - type: nauc_mrr_at_100_std value: -2.6416 - type: nauc_mrr_at_100_diff1 value: 53.8698 - type: nauc_mrr_at_1000_max value: 36.2806 - type: nauc_mrr_at_1000_std value: -2.6593 - type: nauc_mrr_at_1000_diff1 value: 53.8679 - type: main_score value: 68.134 - task: type: Retrieval dataset: name: MTEB CQADupstackGisRetrieval (default) type: mteb/cqadupstack-gis config: default split: test revision: 5003b3064772da1887988e05400cf3806fe491f2 metrics: - type: ndcg_at_1 value: 31.525 - type: ndcg_at_3 value: 40.165 - type: ndcg_at_5 value: 43.321 - type: ndcg_at_10 value: 46.778 - type: ndcg_at_20 value: 49.025 - type: ndcg_at_100 value: 51.461999999999996 - type: ndcg_at_1000 value: 52.935 - type: map_at_1 value: 29.044999999999998 - type: map_at_3 value: 36.976 - type: map_at_5 value: 38.853 - type: map_at_10 value: 40.388000000000005 - type: map_at_20 value: 41.082 - type: map_at_100 value: 41.486000000000004 - type: map_at_1000 value: 41.551 - type: recall_at_1 value: 29.044999999999998 - type: recall_at_3 value: 46.601 - type: recall_at_5 value: 54.062 - type: recall_at_10 value: 64.291 - type: recall_at_20 value: 72.531 - type: recall_at_100 value: 84.578 - type: recall_at_1000 value: 95.358 - type: precision_at_1 value: 31.525 - type: precision_at_3 value: 17.213 - type: precision_at_5 value: 12.203 - type: precision_at_10 value: 7.412000000000001 - type: precision_at_20 value: 4.266 - type: precision_at_100 value: 1.019 - type: precision_at_1000 value: 0.11800000000000001 - type: mrr_at_1 value: 31.525399999999998 - type: mrr_at_3 value: 39.529199999999996 - type: mrr_at_5 value: 41.3089 - type: mrr_at_10 value: 42.6025 - type: mrr_at_20 value: 43.1771 - type: mrr_at_100 value: 43.4611 - type: mrr_at_1000 value: 43.5085 - type: nauc_ndcg_at_1_max value: 22.6602 - type: nauc_ndcg_at_1_std value: -9.5981 - type: nauc_ndcg_at_1_diff1 value: 45.3169 - type: nauc_ndcg_at_3_max value: 19.7847 - type: nauc_ndcg_at_3_std value: -8.7083 - type: nauc_ndcg_at_3_diff1 value: 40.4401 - type: nauc_ndcg_at_5_max value: 20.457700000000003 - type: nauc_ndcg_at_5_std value: -8.6845 - type: nauc_ndcg_at_5_diff1 value: 38.7015 - type: nauc_ndcg_at_10_max value: 21.0795 - type: nauc_ndcg_at_10_std value: -6.5691 - type: nauc_ndcg_at_10_diff1 value: 37.966699999999996 - type: nauc_ndcg_at_20_max value: 21.4852 - type: nauc_ndcg_at_20_std value: -5.904800000000001 - type: nauc_ndcg_at_20_diff1 value: 38.0953 - type: nauc_ndcg_at_100_max value: 22.5439 - type: nauc_ndcg_at_100_std value: -5.1345 - type: nauc_ndcg_at_100_diff1 value: 38.7969 - type: nauc_ndcg_at_1000_max value: 22.3039 - type: nauc_ndcg_at_1000_std value: -5.9468 - type: nauc_ndcg_at_1000_diff1 value: 39.0377 - type: nauc_map_at_1_max value: 18.7633 - type: nauc_map_at_1_std value: -10.276 - type: nauc_map_at_1_diff1 value: 46.593 - type: nauc_map_at_3_max value: 19.0896 - type: nauc_map_at_3_std value: -9.214 - type: nauc_map_at_3_diff1 value: 41.980000000000004 - type: nauc_map_at_5_max value: 19.7273 - type: nauc_map_at_5_std value: -9.2142 - type: nauc_map_at_5_diff1 value: 41.073 - type: nauc_map_at_10_max value: 20.039 - type: nauc_map_at_10_std value: -8.3819 - type: nauc_map_at_10_diff1 value: 40.7757 - type: nauc_map_at_20_max value: 20.227600000000002 - type: nauc_map_at_20_std value: -8.2044 - type: nauc_map_at_20_diff1 value: 40.8699 - type: nauc_map_at_100_max value: 20.3876 - type: nauc_map_at_100_std value: -8.1094 - type: nauc_map_at_100_diff1 value: 40.9925 - type: nauc_map_at_1000_max value: 20.397299999999998 - type: nauc_map_at_1000_std value: -8.1295 - type: nauc_map_at_1000_diff1 value: 40.996300000000005 - type: nauc_recall_at_1_max value: 18.7633 - type: nauc_recall_at_1_std value: -10.276 - type: nauc_recall_at_1_diff1 value: 46.593 - type: nauc_recall_at_3_max value: 17.8827 - type: nauc_recall_at_3_std value: -7.2757000000000005 - type: nauc_recall_at_3_diff1 value: 35.817 - type: nauc_recall_at_5_max value: 18.8334 - type: nauc_recall_at_5_std value: -7.2427 - type: nauc_recall_at_5_diff1 value: 31.0566 - type: nauc_recall_at_10_max value: 20.1305 - type: nauc_recall_at_10_std value: -0.271 - type: nauc_recall_at_10_diff1 value: 27.4127 - type: nauc_recall_at_20_max value: 21.438 - type: nauc_recall_at_20_std value: 3.8486 - type: nauc_recall_at_20_diff1 value: 25.983099999999997 - type: nauc_recall_at_100_max value: 31.620900000000002 - type: nauc_recall_at_100_std value: 17.457700000000003 - type: nauc_recall_at_100_diff1 value: 26.546300000000002 - type: nauc_recall_at_1000_max value: 35.1108 - type: nauc_recall_at_1000_std value: 25.8201 - type: nauc_recall_at_1000_diff1 value: 15.2005 - type: nauc_precision_at_1_max value: 22.6602 - type: nauc_precision_at_1_std value: -9.5981 - type: nauc_precision_at_1_diff1 value: 45.3169 - type: nauc_precision_at_3_max value: 22.344 - type: nauc_precision_at_3_std value: -7.0357 - type: nauc_precision_at_3_diff1 value: 33.298100000000005 - type: nauc_precision_at_5_max value: 24.8904 - type: nauc_precision_at_5_std value: -5.7215 - type: nauc_precision_at_5_diff1 value: 27.1231 - type: nauc_precision_at_10_max value: 25.3317 - type: nauc_precision_at_10_std value: 2.7272000000000003 - type: nauc_precision_at_10_diff1 value: 19.3335 - type: nauc_precision_at_20_max value: 24.5711 - type: nauc_precision_at_20_std value: 6.5833 - type: nauc_precision_at_20_diff1 value: 13.7149 - type: nauc_precision_at_100_max value: 24.0549 - type: nauc_precision_at_100_std value: 12.7275 - type: nauc_precision_at_100_diff1 value: 5.2654 - type: nauc_precision_at_1000_max value: 17.191000000000003 - type: nauc_precision_at_1000_std value: 9.1143 - type: nauc_precision_at_1000_diff1 value: -5.5666 - type: nauc_mrr_at_1_max value: 22.6602 - type: nauc_mrr_at_1_std value: -9.5981 - type: nauc_mrr_at_1_diff1 value: 45.3169 - type: nauc_mrr_at_3_max value: 22.5354 - type: nauc_mrr_at_3_std value: -8.6094 - type: nauc_mrr_at_3_diff1 value: 40.982800000000005 - type: nauc_mrr_at_5_max value: 22.828699999999998 - type: nauc_mrr_at_5_std value: -8.6655 - type: nauc_mrr_at_5_diff1 value: 40.0766 - type: nauc_mrr_at_10_max value: 23.035600000000002 - type: nauc_mrr_at_10_std value: -7.864 - type: nauc_mrr_at_10_diff1 value: 39.8871 - type: nauc_mrr_at_20_max value: 23.0969 - type: nauc_mrr_at_20_std value: -7.6975 - type: nauc_mrr_at_20_diff1 value: 39.9707 - type: nauc_mrr_at_100_max value: 23.191200000000002 - type: nauc_mrr_at_100_std value: -7.6803 - type: nauc_mrr_at_100_diff1 value: 40.0729 - type: nauc_mrr_at_1000_max value: 23.1807 - type: nauc_mrr_at_1000_std value: -7.707 - type: nauc_mrr_at_1000_diff1 value: 40.0782 - type: main_score value: 46.778 - task: type: Retrieval dataset: name: MTEB CQADupstackMathematicaRetrieval (default) type: mteb/cqadupstack-mathematica config: default split: test revision: 90fceea13679c63fe563ded68f3b6f06e50061de metrics: - type: ndcg_at_1 value: 26.617 - type: ndcg_at_3 value: 33.623999999999995 - type: ndcg_at_5 value: 36.981 - type: ndcg_at_10 value: 39.602 - type: ndcg_at_20 value: 42.059000000000005 - type: ndcg_at_100 value: 45.248 - type: ndcg_at_1000 value: 47.384 - type: map_at_1 value: 21.018 - type: map_at_3 value: 29.529 - type: map_at_5 value: 31.666 - type: map_at_10 value: 32.952 - type: map_at_20 value: 33.794000000000004 - type: map_at_100 value: 34.317 - type: map_at_1000 value: 34.416999999999994 - type: recall_at_1 value: 21.018 - type: recall_at_3 value: 38.624 - type: recall_at_5 value: 47.014 - type: recall_at_10 value: 54.668000000000006 - type: recall_at_20 value: 63.302 - type: recall_at_100 value: 78.487 - type: recall_at_1000 value: 93.118 - type: precision_at_1 value: 26.617 - type: precision_at_3 value: 16.915 - type: precision_at_5 value: 12.537 - type: precision_at_10 value: 7.5 - type: precision_at_20 value: 4.484 - type: precision_at_100 value: 1.172 - type: precision_at_1000 value: 0.148 - type: mrr_at_1 value: 26.616899999999998 - type: mrr_at_3 value: 34.8051 - type: mrr_at_5 value: 36.9755 - type: mrr_at_10 value: 38.000499999999995 - type: mrr_at_20 value: 38.452999999999996 - type: mrr_at_100 value: 38.817299999999996 - type: mrr_at_1000 value: 38.873200000000004 - type: nauc_ndcg_at_1_max value: 29.749799999999997 - type: nauc_ndcg_at_1_std value: -2.3403 - type: nauc_ndcg_at_1_diff1 value: 41.9574 - type: nauc_ndcg_at_3_max value: 29.7929 - type: nauc_ndcg_at_3_std value: -1.0050999999999999 - type: nauc_ndcg_at_3_diff1 value: 36.471900000000005 - type: nauc_ndcg_at_5_max value: 29.6171 - type: nauc_ndcg_at_5_std value: -1.0074 - type: nauc_ndcg_at_5_diff1 value: 34.5702 - type: nauc_ndcg_at_10_max value: 30.7265 - type: nauc_ndcg_at_10_std value: 0.46430000000000005 - type: nauc_ndcg_at_10_diff1 value: 35.1612 - type: nauc_ndcg_at_20_max value: 31.698999999999998 - type: nauc_ndcg_at_20_std value: 1.657 - type: nauc_ndcg_at_20_diff1 value: 35.131 - type: nauc_ndcg_at_100_max value: 31.717499999999998 - type: nauc_ndcg_at_100_std value: 2.4316999999999998 - type: nauc_ndcg_at_100_diff1 value: 35.1483 - type: nauc_ndcg_at_1000_max value: 31.390099999999997 - type: nauc_ndcg_at_1000_std value: 2.2651999999999997 - type: nauc_ndcg_at_1000_diff1 value: 35.9287 - type: nauc_map_at_1_max value: 27.181 - type: nauc_map_at_1_std value: -1.923 - type: nauc_map_at_1_diff1 value: 41.3209 - type: nauc_map_at_3_max value: 28.718100000000003 - type: nauc_map_at_3_std value: -1.8913 - type: nauc_map_at_3_diff1 value: 37.3018 - type: nauc_map_at_5_max value: 28.751900000000003 - type: nauc_map_at_5_std value: -1.9649 - type: nauc_map_at_5_diff1 value: 36.3067 - type: nauc_map_at_10_max value: 29.4293 - type: nauc_map_at_10_std value: -1.1372 - type: nauc_map_at_10_diff1 value: 36.7561 - type: nauc_map_at_20_max value: 29.788500000000003 - type: nauc_map_at_20_std value: -0.7448 - type: nauc_map_at_20_diff1 value: 36.7633 - type: nauc_map_at_100_max value: 29.859799999999996 - type: nauc_map_at_100_std value: -0.6194 - type: nauc_map_at_100_diff1 value: 36.8069 - type: nauc_map_at_1000_max value: 29.8362 - type: nauc_map_at_1000_std value: -0.6232 - type: nauc_map_at_1000_diff1 value: 36.835499999999996 - type: nauc_recall_at_1_max value: 27.181 - type: nauc_recall_at_1_std value: -1.923 - type: nauc_recall_at_1_diff1 value: 41.3209 - type: nauc_recall_at_3_max value: 28.5155 - type: nauc_recall_at_3_std value: -0.131 - type: nauc_recall_at_3_diff1 value: 31.5708 - type: nauc_recall_at_5_max value: 27.0032 - type: nauc_recall_at_5_std value: -0.7121 - type: nauc_recall_at_5_diff1 value: 26.3405 - type: nauc_recall_at_10_max value: 29.665200000000002 - type: nauc_recall_at_10_std value: 3.1462999999999997 - type: nauc_recall_at_10_diff1 value: 27.2852 - type: nauc_recall_at_20_max value: 33.2976 - type: nauc_recall_at_20_std value: 7.6558 - type: nauc_recall_at_20_diff1 value: 26.5332 - type: nauc_recall_at_100_max value: 33.5446 - type: nauc_recall_at_100_std value: 16.308600000000002 - type: nauc_recall_at_100_diff1 value: 22.561700000000002 - type: nauc_recall_at_1000_max value: 35.5524 - type: nauc_recall_at_1000_std value: 38.9644 - type: nauc_recall_at_1000_diff1 value: 27.861900000000002 - type: nauc_precision_at_1_max value: 29.749799999999997 - type: nauc_precision_at_1_std value: -2.3403 - type: nauc_precision_at_1_diff1 value: 41.9574 - type: nauc_precision_at_3_max value: 28.370099999999997 - type: nauc_precision_at_3_std value: 1.0373 - type: nauc_precision_at_3_diff1 value: 28.8024 - type: nauc_precision_at_5_max value: 27.184599999999996 - type: nauc_precision_at_5_std value: 2.5995999999999997 - type: nauc_precision_at_5_diff1 value: 22.8208 - type: nauc_precision_at_10_max value: 26.372600000000002 - type: nauc_precision_at_10_std value: 7.833600000000001 - type: nauc_precision_at_10_diff1 value: 19.8669 - type: nauc_precision_at_20_max value: 23.1904 - type: nauc_precision_at_20_std value: 10.5558 - type: nauc_precision_at_20_diff1 value: 14.5559 - type: nauc_precision_at_100_max value: 13.3218 - type: nauc_precision_at_100_std value: 11.7868 - type: nauc_precision_at_100_diff1 value: 4.2146 - type: nauc_precision_at_1000_max value: 0.7887 - type: nauc_precision_at_1000_std value: 5.9056 - type: nauc_precision_at_1000_diff1 value: -3.2767999999999997 - type: nauc_mrr_at_1_max value: 29.749799999999997 - type: nauc_mrr_at_1_std value: -2.3403 - type: nauc_mrr_at_1_diff1 value: 41.9574 - type: nauc_mrr_at_3_max value: 31.509500000000003 - type: nauc_mrr_at_3_std value: -0.41859999999999997 - type: nauc_mrr_at_3_diff1 value: 38.6987 - type: nauc_mrr_at_5_max value: 31.5247 - type: nauc_mrr_at_5_std value: -0.2595 - type: nauc_mrr_at_5_diff1 value: 37.5028 - type: nauc_mrr_at_10_max value: 31.7081 - type: nauc_mrr_at_10_std value: -0.0492 - type: nauc_mrr_at_10_diff1 value: 37.6581 - type: nauc_mrr_at_20_max value: 31.932 - type: nauc_mrr_at_20_std value: 0.2097 - type: nauc_mrr_at_20_diff1 value: 37.7422 - type: nauc_mrr_at_100_max value: 31.949699999999996 - type: nauc_mrr_at_100_std value: 0.1865 - type: nauc_mrr_at_100_diff1 value: 37.8221 - type: nauc_mrr_at_1000_max value: 31.9386 - type: nauc_mrr_at_1000_std value: 0.1795 - type: nauc_mrr_at_1000_diff1 value: 37.8506 - type: main_score value: 39.602 - task: type: Retrieval dataset: name: MTEB CQADupstackPhysicsRetrieval (default) type: mteb/cqadupstack-physics config: default split: test revision: 79531abbd1fb92d06c6d6315a0cbbbf5bb247ea4 metrics: - type: ndcg_at_1 value: 46.006 - type: ndcg_at_3 value: 51.910999999999994 - type: ndcg_at_5 value: 54.86299999999999 - type: ndcg_at_10 value: 57.135000000000005 - type: ndcg_at_20 value: 59.422 - type: ndcg_at_100 value: 62.474 - type: ndcg_at_1000 value: 63.532 - type: map_at_1 value: 37.16 - type: map_at_3 value: 46.947 - type: map_at_5 value: 49.295 - type: map_at_10 value: 50.662 - type: map_at_20 value: 51.53 - type: map_at_100 value: 52.149 - type: map_at_1000 value: 52.224000000000004 - type: recall_at_1 value: 37.16 - type: recall_at_3 value: 55.249 - type: recall_at_5 value: 63.234 - type: recall_at_10 value: 70.231 - type: recall_at_20 value: 77.9 - type: recall_at_100 value: 91.509 - type: recall_at_1000 value: 97.711 - type: precision_at_1 value: 46.006 - type: precision_at_3 value: 25.024 - type: precision_at_5 value: 17.671 - type: precision_at_10 value: 10.212 - type: precision_at_20 value: 5.914 - type: precision_at_100 value: 1.513 - type: precision_at_1000 value: 0.17500000000000002 - type: mrr_at_1 value: 46.0058 - type: mrr_at_3 value: 54.154599999999995 - type: mrr_at_5 value: 55.8101 - type: mrr_at_10 value: 56.6384 - type: mrr_at_20 value: 57.1217 - type: mrr_at_100 value: 57.3844 - type: mrr_at_1000 value: 57.404599999999995 - type: nauc_ndcg_at_1_max value: 25.011400000000002 - type: nauc_ndcg_at_1_std value: -10.9453 - type: nauc_ndcg_at_1_diff1 value: 52.5635 - type: nauc_ndcg_at_3_max value: 20.5699 - type: nauc_ndcg_at_3_std value: -14.1374 - type: nauc_ndcg_at_3_diff1 value: 50.095 - type: nauc_ndcg_at_5_max value: 20.6937 - type: nauc_ndcg_at_5_std value: -14.7377 - type: nauc_ndcg_at_5_diff1 value: 49.6968 - type: nauc_ndcg_at_10_max value: 21.0545 - type: nauc_ndcg_at_10_std value: -14.100999999999999 - type: nauc_ndcg_at_10_diff1 value: 49.2876 - type: nauc_ndcg_at_20_max value: 22.1813 - type: nauc_ndcg_at_20_std value: -13.619700000000002 - type: nauc_ndcg_at_20_diff1 value: 49.7752 - type: nauc_ndcg_at_100_max value: 23.765800000000002 - type: nauc_ndcg_at_100_std value: -11.5192 - type: nauc_ndcg_at_100_diff1 value: 49.8519 - type: nauc_ndcg_at_1000_max value: 23.2792 - type: nauc_ndcg_at_1000_std value: -11.7505 - type: nauc_ndcg_at_1000_diff1 value: 49.8422 - type: nauc_map_at_1_max value: 17.0234 - type: nauc_map_at_1_std value: -14.726600000000001 - type: nauc_map_at_1_diff1 value: 55.854000000000006 - type: nauc_map_at_3_max value: 18.4476 - type: nauc_map_at_3_std value: -14.8542 - type: nauc_map_at_3_diff1 value: 51.5951 - type: nauc_map_at_5_max value: 19.3995 - type: nauc_map_at_5_std value: -14.9116 - type: nauc_map_at_5_diff1 value: 51.081900000000005 - type: nauc_map_at_10_max value: 19.8911 - type: nauc_map_at_10_std value: -14.354700000000001 - type: nauc_map_at_10_diff1 value: 50.6725 - type: nauc_map_at_20_max value: 20.2847 - type: nauc_map_at_20_std value: -14.099999999999998 - type: nauc_map_at_20_diff1 value: 50.82020000000001 - type: nauc_map_at_100_max value: 20.6892 - type: nauc_map_at_100_std value: -13.6554 - type: nauc_map_at_100_diff1 value: 50.7695 - type: nauc_map_at_1000_max value: 20.6883 - type: nauc_map_at_1000_std value: -13.6632 - type: nauc_map_at_1000_diff1 value: 50.7647 - type: nauc_recall_at_1_max value: 17.0234 - type: nauc_recall_at_1_std value: -14.726600000000001 - type: nauc_recall_at_1_diff1 value: 55.854000000000006 - type: nauc_recall_at_3_max value: 16.1844 - type: nauc_recall_at_3_std value: -17.0942 - type: nauc_recall_at_3_diff1 value: 47.6143 - type: nauc_recall_at_5_max value: 17.1338 - type: nauc_recall_at_5_std value: -17.636499999999998 - type: nauc_recall_at_5_diff1 value: 44.345600000000005 - type: nauc_recall_at_10_max value: 18.972 - type: nauc_recall_at_10_std value: -15.596099999999998 - type: nauc_recall_at_10_diff1 value: 41.552499999999995 - type: nauc_recall_at_20_max value: 23.8339 - type: nauc_recall_at_20_std value: -14.122699999999998 - type: nauc_recall_at_20_diff1 value: 42.7171 - type: nauc_recall_at_100_max value: 43.231 - type: nauc_recall_at_100_std value: 8.0154 - type: nauc_recall_at_100_diff1 value: 42.7817 - type: nauc_recall_at_1000_max value: 53.58540000000001 - type: nauc_recall_at_1000_std value: 37.0029 - type: nauc_recall_at_1000_diff1 value: 44.239200000000004 - type: nauc_precision_at_1_max value: 25.011400000000002 - type: nauc_precision_at_1_std value: -10.9453 - type: nauc_precision_at_1_diff1 value: 52.5635 - type: nauc_precision_at_3_max value: 22.2424 - type: nauc_precision_at_3_std value: -5.4350000000000005 - type: nauc_precision_at_3_diff1 value: 23.4114 - type: nauc_precision_at_5_max value: 21.3318 - type: nauc_precision_at_5_std value: -2.8209999999999997 - type: nauc_precision_at_5_diff1 value: 14.0476 - type: nauc_precision_at_10_max value: 19.2971 - type: nauc_precision_at_10_std value: 2.5547 - type: nauc_precision_at_10_diff1 value: 4.0724 - type: nauc_precision_at_20_max value: 17.6513 - type: nauc_precision_at_20_std value: 6.0579 - type: nauc_precision_at_20_diff1 value: -3.1468999999999996 - type: nauc_precision_at_100_max value: 14.8878 - type: nauc_precision_at_100_std value: 13.919200000000002 - type: nauc_precision_at_100_diff1 value: -17.358999999999998 - type: nauc_precision_at_1000_max value: 8.6286 - type: nauc_precision_at_1000_std value: 11.5922 - type: nauc_precision_at_1000_diff1 value: -22.1277 - type: nauc_mrr_at_1_max value: 25.011400000000002 - type: nauc_mrr_at_1_std value: -10.9453 - type: nauc_mrr_at_1_diff1 value: 52.5635 - type: nauc_mrr_at_3_max value: 23.816000000000003 - type: nauc_mrr_at_3_std value: -12.188400000000001 - type: nauc_mrr_at_3_diff1 value: 51.1699 - type: nauc_mrr_at_5_max value: 23.7135 - type: nauc_mrr_at_5_std value: -12.1816 - type: nauc_mrr_at_5_diff1 value: 50.339 - type: nauc_mrr_at_10_max value: 23.9975 - type: nauc_mrr_at_10_std value: -11.7119 - type: nauc_mrr_at_10_diff1 value: 50.32489999999999 - type: nauc_mrr_at_20_max value: 24.2972 - type: nauc_mrr_at_20_std value: -11.6891 - type: nauc_mrr_at_20_diff1 value: 50.4005 - type: nauc_mrr_at_100_max value: 24.3557 - type: nauc_mrr_at_100_std value: -11.5637 - type: nauc_mrr_at_100_diff1 value: 50.454100000000004 - type: nauc_mrr_at_1000_max value: 24.334400000000002 - type: nauc_mrr_at_1000_std value: -11.574900000000001 - type: nauc_mrr_at_1000_diff1 value: 50.45269999999999 - type: main_score value: 57.135000000000005 - task: type: Retrieval dataset: name: MTEB CQADupstackProgrammersRetrieval (default) type: mteb/cqadupstack-programmers config: default split: test revision: 6184bc1440d2dbc7612be22b50686b8826d22b32 metrics: - type: ndcg_at_1 value: 40.753 - type: ndcg_at_3 value: 47.27 - type: ndcg_at_5 value: 50.385999999999996 - type: ndcg_at_10 value: 53.565 - type: ndcg_at_20 value: 55.967999999999996 - type: ndcg_at_100 value: 58.763 - type: ndcg_at_1000 value: 60.02499999999999 - type: map_at_1 value: 33.005 - type: map_at_3 value: 42.314 - type: map_at_5 value: 44.856 - type: map_at_10 value: 46.633 - type: map_at_20 value: 47.494 - type: map_at_100 value: 48.064 - type: map_at_1000 value: 48.14 - type: recall_at_1 value: 33.005 - type: recall_at_3 value: 50.73800000000001 - type: recall_at_5 value: 59.047000000000004 - type: recall_at_10 value: 68.27600000000001 - type: recall_at_20 value: 76.75800000000001 - type: recall_at_100 value: 89.505 - type: recall_at_1000 value: 97.636 - type: precision_at_1 value: 40.753 - type: precision_at_3 value: 22.945 - type: precision_at_5 value: 16.644000000000002 - type: precision_at_10 value: 10.057 - type: precision_at_20 value: 5.862 - type: precision_at_100 value: 1.467 - type: precision_at_1000 value: 0.173 - type: mrr_at_1 value: 40.7534 - type: mrr_at_3 value: 49.048700000000004 - type: mrr_at_5 value: 50.9209 - type: mrr_at_10 value: 52.0898 - type: mrr_at_20 value: 52.605599999999995 - type: mrr_at_100 value: 52.85300000000001 - type: mrr_at_1000 value: 52.8799 - type: nauc_ndcg_at_1_max value: 34.4441 - type: nauc_ndcg_at_1_std value: -7.0414 - type: nauc_ndcg_at_1_diff1 value: 45.8482 - type: nauc_ndcg_at_3_max value: 31.577699999999997 - type: nauc_ndcg_at_3_std value: -6.3458 - type: nauc_ndcg_at_3_diff1 value: 40.919200000000004 - type: nauc_ndcg_at_5_max value: 32.2014 - type: nauc_ndcg_at_5_std value: -5.2417 - type: nauc_ndcg_at_5_diff1 value: 40.288000000000004 - type: nauc_ndcg_at_10_max value: 34.2368 - type: nauc_ndcg_at_10_std value: -4.5674 - type: nauc_ndcg_at_10_diff1 value: 40.5809 - type: nauc_ndcg_at_20_max value: 35.1035 - type: nauc_ndcg_at_20_std value: -3.9905000000000004 - type: nauc_ndcg_at_20_diff1 value: 41.1355 - type: nauc_ndcg_at_100_max value: 35.7455 - type: nauc_ndcg_at_100_std value: -3.2745 - type: nauc_ndcg_at_100_diff1 value: 41.4431 - type: nauc_ndcg_at_1000_max value: 35.1084 - type: nauc_ndcg_at_1000_std value: -4.0846 - type: nauc_ndcg_at_1000_diff1 value: 41.755900000000004 - type: nauc_map_at_1_max value: 28.055200000000003 - type: nauc_map_at_1_std value: -11.2817 - type: nauc_map_at_1_diff1 value: 45.1938 - type: nauc_map_at_3_max value: 29.7864 - type: nauc_map_at_3_std value: -8.1494 - type: nauc_map_at_3_diff1 value: 41.788 - type: nauc_map_at_5_max value: 30.809199999999997 - type: nauc_map_at_5_std value: -7.012599999999999 - type: nauc_map_at_5_diff1 value: 41.554 - type: nauc_map_at_10_max value: 32.2321 - type: nauc_map_at_10_std value: -6.3894 - type: nauc_map_at_10_diff1 value: 41.8427 - type: nauc_map_at_20_max value: 32.7711 - type: nauc_map_at_20_std value: -6.0764 - type: nauc_map_at_20_diff1 value: 42.1419 - type: nauc_map_at_100_max value: 33.0054 - type: nauc_map_at_100_std value: -5.8844 - type: nauc_map_at_100_diff1 value: 42.3068 - type: nauc_map_at_1000_max value: 32.9949 - type: nauc_map_at_1000_std value: -5.9162 - type: nauc_map_at_1000_diff1 value: 42.3228 - type: nauc_recall_at_1_max value: 28.055200000000003 - type: nauc_recall_at_1_std value: -11.2817 - type: nauc_recall_at_1_diff1 value: 45.1938 - type: nauc_recall_at_3_max value: 27.1828 - type: nauc_recall_at_3_std value: -6.9705 - type: nauc_recall_at_3_diff1 value: 35.2147 - type: nauc_recall_at_5_max value: 28.0093 - type: nauc_recall_at_5_std value: -2.9148 - type: nauc_recall_at_5_diff1 value: 32.376599999999996 - type: nauc_recall_at_10_max value: 33.3355 - type: nauc_recall_at_10_std value: -0.4752 - type: nauc_recall_at_10_diff1 value: 32.5726 - type: nauc_recall_at_20_max value: 35.9026 - type: nauc_recall_at_20_std value: 3.1338 - type: nauc_recall_at_20_diff1 value: 32.1894 - type: nauc_recall_at_100_max value: 45.4995 - type: nauc_recall_at_100_std value: 18.2978 - type: nauc_recall_at_100_diff1 value: 29.535 - type: nauc_recall_at_1000_max value: 42.8817 - type: nauc_recall_at_1000_std value: 34.7251 - type: nauc_recall_at_1000_diff1 value: 33.1814 - type: nauc_precision_at_1_max value: 34.4441 - type: nauc_precision_at_1_std value: -7.0414 - type: nauc_precision_at_1_diff1 value: 45.8482 - type: nauc_precision_at_3_max value: 30.514000000000003 - type: nauc_precision_at_3_std value: 2.968 - type: nauc_precision_at_3_diff1 value: 25.0624 - type: nauc_precision_at_5_max value: 30.268 - type: nauc_precision_at_5_std value: 7.8429 - type: nauc_precision_at_5_diff1 value: 18.8704 - type: nauc_precision_at_10_max value: 31.6838 - type: nauc_precision_at_10_std value: 11.9131 - type: nauc_precision_at_10_diff1 value: 14.0232 - type: nauc_precision_at_20_max value: 28.375099999999996 - type: nauc_precision_at_20_std value: 13.497700000000002 - type: nauc_precision_at_20_diff1 value: 10.795 - type: nauc_precision_at_100_max value: 20.1953 - type: nauc_precision_at_100_std value: 14.4028 - type: nauc_precision_at_100_diff1 value: 4.6725 - type: nauc_precision_at_1000_max value: 11.3706 - type: nauc_precision_at_1000_std value: 9.1752 - type: nauc_precision_at_1000_diff1 value: 1.302 - type: nauc_mrr_at_1_max value: 34.4441 - type: nauc_mrr_at_1_std value: -7.0414 - type: nauc_mrr_at_1_diff1 value: 45.8482 - type: nauc_mrr_at_3_max value: 34.760799999999996 - type: nauc_mrr_at_3_std value: -5.7082 - type: nauc_mrr_at_3_diff1 value: 41.8373 - type: nauc_mrr_at_5_max value: 35.0958 - type: nauc_mrr_at_5_std value: -4.7876 - type: nauc_mrr_at_5_diff1 value: 41.574299999999994 - type: nauc_mrr_at_10_max value: 35.5072 - type: nauc_mrr_at_10_std value: -4.820399999999999 - type: nauc_mrr_at_10_diff1 value: 41.9727 - type: nauc_mrr_at_20_max value: 35.6201 - type: nauc_mrr_at_20_std value: -4.7524 - type: nauc_mrr_at_20_diff1 value: 42.2289 - type: nauc_mrr_at_100_max value: 35.6408 - type: nauc_mrr_at_100_std value: -4.7266 - type: nauc_mrr_at_100_diff1 value: 42.2145 - type: nauc_mrr_at_1000_max value: 35.6255 - type: nauc_mrr_at_1000_std value: -4.7333 - type: nauc_mrr_at_1000_diff1 value: 42.221399999999996 - type: main_score value: 53.565 - task: type: Retrieval dataset: name: MTEB CQADupstackRetrieval (default) type: CQADupstackRetrieval_is_a_combined_dataset config: default split: test revision: CQADupstackRetrieval_is_a_combined_dataset metrics: - type: main_score value: 51.03358333333333 - type: ndcg_at_10 value: 51.03358333333333 - task: type: Retrieval dataset: name: MTEB CQADupstackStatsRetrieval (default) type: mteb/cqadupstack-stats config: default split: test revision: 65ac3a16b8e91f9cee4c9828cc7c335575432a2a metrics: - type: ndcg_at_1 value: 34.355999999999995 - type: ndcg_at_3 value: 39.660000000000004 - type: ndcg_at_5 value: 42.625 - type: ndcg_at_10 value: 45.717 - type: ndcg_at_20 value: 47.738 - type: ndcg_at_100 value: 50.586 - type: ndcg_at_1000 value: 52.317 - type: map_at_1 value: 30.009999999999998 - type: map_at_3 value: 36.597 - type: map_at_5 value: 38.507999999999996 - type: map_at_10 value: 40.034 - type: map_at_20 value: 40.633 - type: map_at_100 value: 41.089 - type: map_at_1000 value: 41.166000000000004 - type: recall_at_1 value: 30.009999999999998 - type: recall_at_3 value: 43.646 - type: recall_at_5 value: 50.763000000000005 - type: recall_at_10 value: 60.218 - type: recall_at_20 value: 67.756 - type: recall_at_100 value: 81.78 - type: recall_at_1000 value: 94.179 - type: precision_at_1 value: 34.355999999999995 - type: precision_at_3 value: 17.28 - type: precision_at_5 value: 12.454 - type: precision_at_10 value: 7.485 - type: precision_at_20 value: 4.287 - type: precision_at_100 value: 1.0670000000000002 - type: precision_at_1000 value: 0.128 - type: mrr_at_1 value: 34.355799999999995 - type: mrr_at_3 value: 40.0562 - type: mrr_at_5 value: 41.8124 - type: mrr_at_10 value: 42.998799999999996 - type: mrr_at_20 value: 43.5177 - type: mrr_at_100 value: 43.8815 - type: mrr_at_1000 value: 43.928200000000004 - type: nauc_ndcg_at_1_max value: 22.8762 - type: nauc_ndcg_at_1_std value: -7.6788 - type: nauc_ndcg_at_1_diff1 value: 57.015499999999996 - type: nauc_ndcg_at_3_max value: 22.8095 - type: nauc_ndcg_at_3_std value: -5.3355 - type: nauc_ndcg_at_3_diff1 value: 49.9449 - type: nauc_ndcg_at_5_max value: 25.366100000000003 - type: nauc_ndcg_at_5_std value: -3.8400999999999996 - type: nauc_ndcg_at_5_diff1 value: 49.0563 - type: nauc_ndcg_at_10_max value: 23.7052 - type: nauc_ndcg_at_10_std value: -4.4089 - type: nauc_ndcg_at_10_diff1 value: 47.130300000000005 - type: nauc_ndcg_at_20_max value: 24.2726 - type: nauc_ndcg_at_20_std value: -3.8846 - type: nauc_ndcg_at_20_diff1 value: 47.5163 - type: nauc_ndcg_at_100_max value: 25.487 - type: nauc_ndcg_at_100_std value: -2.1590000000000003 - type: nauc_ndcg_at_100_diff1 value: 47.8372 - type: nauc_ndcg_at_1000_max value: 25.2363 - type: nauc_ndcg_at_1000_std value: -2.5404 - type: nauc_ndcg_at_1000_diff1 value: 48.7815 - type: nauc_map_at_1_max value: 18.9891 - type: nauc_map_at_1_std value: -9.9207 - type: nauc_map_at_1_diff1 value: 55.4997 - type: nauc_map_at_3_max value: 21.235699999999998 - type: nauc_map_at_3_std value: -7.048 - type: nauc_map_at_3_diff1 value: 51.2863 - type: nauc_map_at_5_max value: 23.0436 - type: nauc_map_at_5_std value: -6.1008 - type: nauc_map_at_5_diff1 value: 50.779799999999994 - type: nauc_map_at_10_max value: 22.4576 - type: nauc_map_at_10_std value: -6.3836 - type: nauc_map_at_10_diff1 value: 49.8457 - type: nauc_map_at_20_max value: 22.599800000000002 - type: nauc_map_at_20_std value: -6.2443 - type: nauc_map_at_20_diff1 value: 49.9702 - type: nauc_map_at_100_max value: 22.8352 - type: nauc_map_at_100_std value: -5.9363 - type: nauc_map_at_100_diff1 value: 50.0868 - type: nauc_map_at_1000_max value: 22.8394 - type: nauc_map_at_1000_std value: -5.934699999999999 - type: nauc_map_at_1000_diff1 value: 50.1389 - type: nauc_recall_at_1_max value: 18.9891 - type: nauc_recall_at_1_std value: -9.9207 - type: nauc_recall_at_1_diff1 value: 55.4997 - type: nauc_recall_at_3_max value: 22.3469 - type: nauc_recall_at_3_std value: -3.1021 - type: nauc_recall_at_3_diff1 value: 44.217600000000004 - type: nauc_recall_at_5_max value: 29.2041 - type: nauc_recall_at_5_std value: 1.013 - type: nauc_recall_at_5_diff1 value: 41.4239 - type: nauc_recall_at_10_max value: 23.7313 - type: nauc_recall_at_10_std value: 0.3575 - type: nauc_recall_at_10_diff1 value: 34.661500000000004 - type: nauc_recall_at_20_max value: 25.496999999999996 - type: nauc_recall_at_20_std value: 3.1315000000000004 - type: nauc_recall_at_20_diff1 value: 34.2149 - type: nauc_recall_at_100_max value: 35.957 - type: nauc_recall_at_100_std value: 21.1095 - type: nauc_recall_at_100_diff1 value: 27.4781 - type: nauc_recall_at_1000_max value: 45.015699999999995 - type: nauc_recall_at_1000_std value: 45.8094 - type: nauc_recall_at_1000_diff1 value: 22.481499999999997 - type: nauc_precision_at_1_max value: 22.8762 - type: nauc_precision_at_1_std value: -7.6788 - type: nauc_precision_at_1_diff1 value: 57.015499999999996 - type: nauc_precision_at_3_max value: 24.8891 - type: nauc_precision_at_3_std value: -0.9313 - type: nauc_precision_at_3_diff1 value: 40.6115 - type: nauc_precision_at_5_max value: 28.7576 - type: nauc_precision_at_5_std value: 2.9669 - type: nauc_precision_at_5_diff1 value: 35.298 - type: nauc_precision_at_10_max value: 23.8354 - type: nauc_precision_at_10_std value: 3.2748 - type: nauc_precision_at_10_diff1 value: 24.2013 - type: nauc_precision_at_20_max value: 24.089199999999998 - type: nauc_precision_at_20_std value: 5.7543 - type: nauc_precision_at_20_diff1 value: 20.718 - type: nauc_precision_at_100_max value: 22.074199999999998 - type: nauc_precision_at_100_std value: 12.0253 - type: nauc_precision_at_100_diff1 value: 10.3669 - type: nauc_precision_at_1000_max value: 12.845799999999999 - type: nauc_precision_at_1000_std value: 8.9314 - type: nauc_precision_at_1000_diff1 value: 4.3847 - type: nauc_mrr_at_1_max value: 22.8762 - type: nauc_mrr_at_1_std value: -7.6788 - type: nauc_mrr_at_1_diff1 value: 57.015499999999996 - type: nauc_mrr_at_3_max value: 24.8244 - type: nauc_mrr_at_3_std value: -5.184699999999999 - type: nauc_mrr_at_3_diff1 value: 52.567 - type: nauc_mrr_at_5_max value: 25.9477 - type: nauc_mrr_at_5_std value: -4.3008999999999995 - type: nauc_mrr_at_5_diff1 value: 52.0231 - type: nauc_mrr_at_10_max value: 25.164599999999997 - type: nauc_mrr_at_10_std value: -4.3651 - type: nauc_mrr_at_10_diff1 value: 51.3857 - type: nauc_mrr_at_20_max value: 25.210500000000003 - type: nauc_mrr_at_20_std value: -4.3703 - type: nauc_mrr_at_20_diff1 value: 51.4896 - type: nauc_mrr_at_100_max value: 25.3392 - type: nauc_mrr_at_100_std value: -4.174300000000001 - type: nauc_mrr_at_100_diff1 value: 51.6015 - type: nauc_mrr_at_1000_max value: 25.3401 - type: nauc_mrr_at_1000_std value: -4.1697 - type: nauc_mrr_at_1000_diff1 value: 51.623799999999996 - type: main_score value: 45.717 - task: type: Retrieval dataset: name: MTEB CQADupstackTexRetrieval (default) type: mteb/cqadupstack-tex config: default split: test revision: 46989137a86843e03a6195de44b09deda022eec7 metrics: - type: ndcg_at_1 value: 26.807 - type: ndcg_at_3 value: 32.134 - type: ndcg_at_5 value: 34.37 - type: ndcg_at_10 value: 37.219 - type: ndcg_at_20 value: 39.412000000000006 - type: ndcg_at_100 value: 42.775 - type: ndcg_at_1000 value: 45.174 - type: map_at_1 value: 21.89 - type: map_at_3 value: 28.498 - type: map_at_5 value: 30.076999999999998 - type: map_at_10 value: 31.455 - type: map_at_20 value: 32.173 - type: map_at_100 value: 32.738 - type: map_at_1000 value: 32.86 - type: recall_at_1 value: 21.89 - type: recall_at_3 value: 35.674 - type: recall_at_5 value: 41.582 - type: recall_at_10 value: 49.988 - type: recall_at_20 value: 58.012 - type: recall_at_100 value: 74.357 - type: recall_at_1000 value: 91.003 - type: precision_at_1 value: 26.807 - type: precision_at_3 value: 15.359 - type: precision_at_5 value: 11.067 - type: precision_at_10 value: 6.912999999999999 - type: precision_at_20 value: 4.14 - type: precision_at_100 value: 1.137 - type: precision_at_1000 value: 0.152 - type: mrr_at_1 value: 26.806600000000003 - type: mrr_at_3 value: 33.276 - type: mrr_at_5 value: 34.685100000000006 - type: mrr_at_10 value: 35.8652 - type: mrr_at_20 value: 36.3975 - type: mrr_at_100 value: 36.7734 - type: mrr_at_1000 value: 36.8373 - type: nauc_ndcg_at_1_max value: 22.2824 - type: nauc_ndcg_at_1_std value: -1.1636 - type: nauc_ndcg_at_1_diff1 value: 44.0723 - type: nauc_ndcg_at_3_max value: 23.5119 - type: nauc_ndcg_at_3_std value: 0.2747 - type: nauc_ndcg_at_3_diff1 value: 37.7517 - type: nauc_ndcg_at_5_max value: 23.494200000000003 - type: nauc_ndcg_at_5_std value: 0.5172 - type: nauc_ndcg_at_5_diff1 value: 35.808800000000005 - type: nauc_ndcg_at_10_max value: 23.9137 - type: nauc_ndcg_at_10_std value: 1.2572 - type: nauc_ndcg_at_10_diff1 value: 35.3517 - type: nauc_ndcg_at_20_max value: 24.147299999999998 - type: nauc_ndcg_at_20_std value: 1.7857999999999998 - type: nauc_ndcg_at_20_diff1 value: 34.904 - type: nauc_ndcg_at_100_max value: 24.677 - type: nauc_ndcg_at_100_std value: 3.3762 - type: nauc_ndcg_at_100_diff1 value: 35.476400000000005 - type: nauc_ndcg_at_1000_max value: 24.9518 - type: nauc_ndcg_at_1000_std value: 3.3005 - type: nauc_ndcg_at_1000_diff1 value: 35.9856 - type: nauc_map_at_1_max value: 18.5395 - type: nauc_map_at_1_std value: -1.8748 - type: nauc_map_at_1_diff1 value: 43.2271 - type: nauc_map_at_3_max value: 21.956300000000002 - type: nauc_map_at_3_std value: -0.3228 - type: nauc_map_at_3_diff1 value: 39.0086 - type: nauc_map_at_5_max value: 22.2144 - type: nauc_map_at_5_std value: -0.1749 - type: nauc_map_at_5_diff1 value: 37.7466 - type: nauc_map_at_10_max value: 22.621 - type: nauc_map_at_10_std value: 0.11750000000000001 - type: nauc_map_at_10_diff1 value: 37.5604 - type: nauc_map_at_20_max value: 22.744 - type: nauc_map_at_20_std value: 0.3284 - type: nauc_map_at_20_diff1 value: 37.4046 - type: nauc_map_at_100_max value: 22.9403 - type: nauc_map_at_100_std value: 0.594 - type: nauc_map_at_100_diff1 value: 37.519999999999996 - type: nauc_map_at_1000_max value: 22.983 - type: nauc_map_at_1000_std value: 0.6118 - type: nauc_map_at_1000_diff1 value: 37.5586 - type: nauc_recall_at_1_max value: 18.5395 - type: nauc_recall_at_1_std value: -1.8748 - type: nauc_recall_at_1_diff1 value: 43.2271 - type: nauc_recall_at_3_max value: 22.7927 - type: nauc_recall_at_3_std value: 1.0538 - type: nauc_recall_at_3_diff1 value: 33.2051 - type: nauc_recall_at_5_max value: 22.7185 - type: nauc_recall_at_5_std value: 1.3141 - type: nauc_recall_at_5_diff1 value: 28.321099999999998 - type: nauc_recall_at_10_max value: 23.3274 - type: nauc_recall_at_10_std value: 3.3770000000000002 - type: nauc_recall_at_10_diff1 value: 26.0137 - type: nauc_recall_at_20_max value: 23.8623 - type: nauc_recall_at_20_std value: 5.5042 - type: nauc_recall_at_20_diff1 value: 23.5772 - type: nauc_recall_at_100_max value: 26.5351 - type: nauc_recall_at_100_std value: 17.011100000000003 - type: nauc_recall_at_100_diff1 value: 23.150399999999998 - type: nauc_recall_at_1000_max value: 35.7909 - type: nauc_recall_at_1000_std value: 33.4656 - type: nauc_recall_at_1000_diff1 value: 19.8029 - type: nauc_precision_at_1_max value: 22.2824 - type: nauc_precision_at_1_std value: -1.1636 - type: nauc_precision_at_1_diff1 value: 44.0723 - type: nauc_precision_at_3_max value: 27.798099999999998 - type: nauc_precision_at_3_std value: 2.538 - type: nauc_precision_at_3_diff1 value: 30.9728 - type: nauc_precision_at_5_max value: 26.5049 - type: nauc_precision_at_5_std value: 2.7146 - type: nauc_precision_at_5_diff1 value: 24.1766 - type: nauc_precision_at_10_max value: 26.168799999999997 - type: nauc_precision_at_10_std value: 4.5483 - type: nauc_precision_at_10_diff1 value: 19.7263 - type: nauc_precision_at_20_max value: 24.2909 - type: nauc_precision_at_20_std value: 5.985399999999999 - type: nauc_precision_at_20_diff1 value: 14.394699999999998 - type: nauc_precision_at_100_max value: 20.945700000000002 - type: nauc_precision_at_100_std value: 9.717099999999999 - type: nauc_precision_at_100_diff1 value: 10.1707 - type: nauc_precision_at_1000_max value: 17.9958 - type: nauc_precision_at_1000_std value: 6.352399999999999 - type: nauc_precision_at_1000_diff1 value: 6.671100000000001 - type: nauc_mrr_at_1_max value: 22.2824 - type: nauc_mrr_at_1_std value: -1.1636 - type: nauc_mrr_at_1_diff1 value: 44.0723 - type: nauc_mrr_at_3_max value: 24.4906 - type: nauc_mrr_at_3_std value: 0.5277 - type: nauc_mrr_at_3_diff1 value: 39.3446 - type: nauc_mrr_at_5_max value: 24.3708 - type: nauc_mrr_at_5_std value: 0.5988 - type: nauc_mrr_at_5_diff1 value: 38.5081 - type: nauc_mrr_at_10_max value: 24.5065 - type: nauc_mrr_at_10_std value: 0.9650000000000001 - type: nauc_mrr_at_10_diff1 value: 38.4531 - type: nauc_mrr_at_20_max value: 24.577099999999998 - type: nauc_mrr_at_20_std value: 0.9927999999999999 - type: nauc_mrr_at_20_diff1 value: 38.3527 - type: nauc_mrr_at_100_max value: 24.593999999999998 - type: nauc_mrr_at_100_std value: 1.1214 - type: nauc_mrr_at_100_diff1 value: 38.4554 - type: nauc_mrr_at_1000_max value: 24.5991 - type: nauc_mrr_at_1000_std value: 1.1217 - type: nauc_mrr_at_1000_diff1 value: 38.4672 - type: main_score value: 37.219 - task: type: Retrieval dataset: name: MTEB CQADupstackUnixRetrieval (default) type: mteb/cqadupstack-unix config: default split: test revision: 6c6430d3a6d36f8d2a829195bc5dc94d7e063e53 metrics: - type: ndcg_at_1 value: 41.884 - type: ndcg_at_3 value: 47.415 - type: ndcg_at_5 value: 50.442 - type: ndcg_at_10 value: 53.733 - type: ndcg_at_20 value: 55.527 - type: ndcg_at_100 value: 58.12199999999999 - type: ndcg_at_1000 value: 59.540000000000006 - type: map_at_1 value: 35.569 - type: map_at_3 value: 43.517 - type: map_at_5 value: 45.673 - type: map_at_10 value: 47.373 - type: map_at_20 value: 47.997 - type: map_at_100 value: 48.449999999999996 - type: map_at_1000 value: 48.524 - type: recall_at_1 value: 35.569 - type: recall_at_3 value: 51.43600000000001 - type: recall_at_5 value: 59.229 - type: recall_at_10 value: 68.675 - type: recall_at_20 value: 74.935 - type: recall_at_100 value: 87.12100000000001 - type: recall_at_1000 value: 96.389 - type: precision_at_1 value: 41.884 - type: precision_at_3 value: 21.735 - type: precision_at_5 value: 15.354000000000001 - type: precision_at_10 value: 9.207 - type: precision_at_20 value: 5.159 - type: precision_at_100 value: 1.2510000000000001 - type: precision_at_1000 value: 0.147 - type: mrr_at_1 value: 41.8843 - type: mrr_at_3 value: 48.8029 - type: mrr_at_5 value: 50.5053 - type: mrr_at_10 value: 51.6938 - type: mrr_at_20 value: 52.0562 - type: mrr_at_100 value: 52.3425 - type: mrr_at_1000 value: 52.3775 - type: nauc_ndcg_at_1_max value: 39.1822 - type: nauc_ndcg_at_1_std value: -10.6489 - type: nauc_ndcg_at_1_diff1 value: 53.662299999999995 - type: nauc_ndcg_at_3_max value: 39.4505 - type: nauc_ndcg_at_3_std value: -10.6853 - type: nauc_ndcg_at_3_diff1 value: 48.5749 - type: nauc_ndcg_at_5_max value: 38.7618 - type: nauc_ndcg_at_5_std value: -10.972800000000001 - type: nauc_ndcg_at_5_diff1 value: 47.846 - type: nauc_ndcg_at_10_max value: 38.9284 - type: nauc_ndcg_at_10_std value: -10.6664 - type: nauc_ndcg_at_10_diff1 value: 46.9536 - type: nauc_ndcg_at_20_max value: 39.760400000000004 - type: nauc_ndcg_at_20_std value: -9.5981 - type: nauc_ndcg_at_20_diff1 value: 47.6581 - type: nauc_ndcg_at_100_max value: 40.1241 - type: nauc_ndcg_at_100_std value: -8.7613 - type: nauc_ndcg_at_100_diff1 value: 47.8645 - type: nauc_ndcg_at_1000_max value: 39.8779 - type: nauc_ndcg_at_1000_std value: -9.0252 - type: nauc_ndcg_at_1000_diff1 value: 48.0716 - type: nauc_map_at_1_max value: 33.7108 - type: nauc_map_at_1_std value: -11.0197 - type: nauc_map_at_1_diff1 value: 51.6481 - type: nauc_map_at_3_max value: 37.4784 - type: nauc_map_at_3_std value: -11.2809 - type: nauc_map_at_3_diff1 value: 49.408 - type: nauc_map_at_5_max value: 37.6673 - type: nauc_map_at_5_std value: -11.2829 - type: nauc_map_at_5_diff1 value: 48.89 - type: nauc_map_at_10_max value: 37.9209 - type: nauc_map_at_10_std value: -11.2194 - type: nauc_map_at_10_diff1 value: 48.2784 - type: nauc_map_at_20_max value: 38.2975 - type: nauc_map_at_20_std value: -10.8997 - type: nauc_map_at_20_diff1 value: 48.547000000000004 - type: nauc_map_at_100_max value: 38.352799999999995 - type: nauc_map_at_100_std value: -10.7712 - type: nauc_map_at_100_diff1 value: 48.5685 - type: nauc_map_at_1000_max value: 38.3309 - type: nauc_map_at_1000_std value: -10.7669 - type: nauc_map_at_1000_diff1 value: 48.5663 - type: nauc_recall_at_1_max value: 33.7108 - type: nauc_recall_at_1_std value: -11.0197 - type: nauc_recall_at_1_diff1 value: 51.6481 - type: nauc_recall_at_3_max value: 37.8568 - type: nauc_recall_at_3_std value: -10.046 - type: nauc_recall_at_3_diff1 value: 44.973200000000006 - type: nauc_recall_at_5_max value: 36.5963 - type: nauc_recall_at_5_std value: -10.656 - type: nauc_recall_at_5_diff1 value: 41.8226 - type: nauc_recall_at_10_max value: 36.905300000000004 - type: nauc_recall_at_10_std value: -9.5656 - type: nauc_recall_at_10_diff1 value: 37.8973 - type: nauc_recall_at_20_max value: 40.465 - type: nauc_recall_at_20_std value: -4.2909999999999995 - type: nauc_recall_at_20_diff1 value: 40.2965 - type: nauc_recall_at_100_max value: 47.295500000000004 - type: nauc_recall_at_100_std value: 6.931900000000001 - type: nauc_recall_at_100_diff1 value: 39.684599999999996 - type: nauc_recall_at_1000_max value: 64.6766 - type: nauc_recall_at_1000_std value: 32.608399999999996 - type: nauc_recall_at_1000_diff1 value: 41.2191 - type: nauc_precision_at_1_max value: 39.1822 - type: nauc_precision_at_1_std value: -10.6489 - type: nauc_precision_at_1_diff1 value: 53.662299999999995 - type: nauc_precision_at_3_max value: 37.938 - type: nauc_precision_at_3_std value: -7.1814 - type: nauc_precision_at_3_diff1 value: 33.5813 - type: nauc_precision_at_5_max value: 33.5192 - type: nauc_precision_at_5_std value: -5.5998 - type: nauc_precision_at_5_diff1 value: 24.4701 - type: nauc_precision_at_10_max value: 27.776600000000002 - type: nauc_precision_at_10_std value: -4.016900000000001 - type: nauc_precision_at_10_diff1 value: 13.019400000000001 - type: nauc_precision_at_20_max value: 25.036199999999997 - type: nauc_precision_at_20_std value: 0.1629 - type: nauc_precision_at_20_diff1 value: 9.332 - type: nauc_precision_at_100_max value: 14.1849 - type: nauc_precision_at_100_std value: 6.534800000000001 - type: nauc_precision_at_100_diff1 value: -3.1784 - type: nauc_precision_at_1000_max value: 0.3891 - type: nauc_precision_at_1000_std value: 4.8176 - type: nauc_precision_at_1000_diff1 value: -13.1996 - type: nauc_mrr_at_1_max value: 39.1822 - type: nauc_mrr_at_1_std value: -10.6489 - type: nauc_mrr_at_1_diff1 value: 53.662299999999995 - type: nauc_mrr_at_3_max value: 40.5435 - type: nauc_mrr_at_3_std value: -9.9119 - type: nauc_mrr_at_3_diff1 value: 50.5792 - type: nauc_mrr_at_5_max value: 40.5036 - type: nauc_mrr_at_5_std value: -10.0048 - type: nauc_mrr_at_5_diff1 value: 50.1912 - type: nauc_mrr_at_10_max value: 40.367 - type: nauc_mrr_at_10_std value: -10.0094 - type: nauc_mrr_at_10_diff1 value: 49.914500000000004 - type: nauc_mrr_at_20_max value: 40.487 - type: nauc_mrr_at_20_std value: -9.8134 - type: nauc_mrr_at_20_diff1 value: 50.068900000000006 - type: nauc_mrr_at_100_max value: 40.4627 - type: nauc_mrr_at_100_std value: -9.7388 - type: nauc_mrr_at_100_diff1 value: 50.094300000000004 - type: nauc_mrr_at_1000_max value: 40.4524 - type: nauc_mrr_at_1000_std value: -9.748700000000001 - type: nauc_mrr_at_1000_diff1 value: 50.1065 - type: main_score value: 53.733 - task: type: Retrieval dataset: name: MTEB CQADupstackWebmastersRetrieval (default) type: mteb/cqadupstack-webmasters config: default split: test revision: 160c094312a0e1facb97e55eeddb698c0abe3571 metrics: - type: ndcg_at_1 value: 37.945 - type: ndcg_at_3 value: 44.157000000000004 - type: ndcg_at_5 value: 46.88 - type: ndcg_at_10 value: 50.208 - type: ndcg_at_20 value: 52.536 - type: ndcg_at_100 value: 55.711999999999996 - type: ndcg_at_1000 value: 57.340999999999994 - type: map_at_1 value: 31.174000000000003 - type: map_at_3 value: 39.391 - type: map_at_5 value: 41.333 - type: map_at_10 value: 43.246 - type: map_at_20 value: 44.21 - type: map_at_100 value: 45.013 - type: map_at_1000 value: 45.221000000000004 - type: recall_at_1 value: 31.174000000000003 - type: recall_at_3 value: 47.085 - type: recall_at_5 value: 54.237 - type: recall_at_10 value: 63.611 - type: recall_at_20 value: 72.473 - type: recall_at_100 value: 87.45100000000001 - type: recall_at_1000 value: 97.429 - type: precision_at_1 value: 37.945 - type: precision_at_3 value: 20.751 - type: precision_at_5 value: 15.02 - type: precision_at_10 value: 9.722999999999999 - type: precision_at_20 value: 5.988 - type: precision_at_100 value: 1.818 - type: precision_at_1000 value: 0.256 - type: mrr_at_1 value: 37.9447 - type: mrr_at_3 value: 45.3228 - type: mrr_at_5 value: 47.0224 - type: mrr_at_10 value: 48.234 - type: mrr_at_20 value: 48.7403 - type: mrr_at_100 value: 49.059999999999995 - type: mrr_at_1000 value: 49.0914 - type: nauc_ndcg_at_1_max value: 26.172 - type: nauc_ndcg_at_1_std value: -9.07 - type: nauc_ndcg_at_1_diff1 value: 46.664899999999996 - type: nauc_ndcg_at_3_max value: 23.9966 - type: nauc_ndcg_at_3_std value: -11.0207 - type: nauc_ndcg_at_3_diff1 value: 43.539 - type: nauc_ndcg_at_5_max value: 24.9051 - type: nauc_ndcg_at_5_std value: -9.9938 - type: nauc_ndcg_at_5_diff1 value: 44.5711 - type: nauc_ndcg_at_10_max value: 27.603 - type: nauc_ndcg_at_10_std value: -8.339599999999999 - type: nauc_ndcg_at_10_diff1 value: 45.121 - type: nauc_ndcg_at_20_max value: 27.1764 - type: nauc_ndcg_at_20_std value: -7.295400000000001 - type: nauc_ndcg_at_20_diff1 value: 43.925 - type: nauc_ndcg_at_100_max value: 28.0352 - type: nauc_ndcg_at_100_std value: -6.6677 - type: nauc_ndcg_at_100_diff1 value: 43.6903 - type: nauc_ndcg_at_1000_max value: 27.104400000000002 - type: nauc_ndcg_at_1000_std value: -6.9685 - type: nauc_ndcg_at_1000_diff1 value: 43.952000000000005 - type: nauc_map_at_1_max value: 20.5331 - type: nauc_map_at_1_std value: -12.358600000000001 - type: nauc_map_at_1_diff1 value: 48.4715 - type: nauc_map_at_3_max value: 21.4883 - type: nauc_map_at_3_std value: -12.6776 - type: nauc_map_at_3_diff1 value: 44.2352 - type: nauc_map_at_5_max value: 22.3393 - type: nauc_map_at_5_std value: -11.6253 - type: nauc_map_at_5_diff1 value: 44.4847 - type: nauc_map_at_10_max value: 24.371399999999998 - type: nauc_map_at_10_std value: -10.5509 - type: nauc_map_at_10_diff1 value: 45.3059 - type: nauc_map_at_20_max value: 24.4314 - type: nauc_map_at_20_std value: -10.012799999999999 - type: nauc_map_at_20_diff1 value: 45.1512 - type: nauc_map_at_100_max value: 24.672900000000002 - type: nauc_map_at_100_std value: -9.637500000000001 - type: nauc_map_at_100_diff1 value: 45.31 - type: nauc_map_at_1000_max value: 24.432499999999997 - type: nauc_map_at_1000_std value: -9.5451 - type: nauc_map_at_1000_diff1 value: 45.3162 - type: nauc_recall_at_1_max value: 20.5331 - type: nauc_recall_at_1_std value: -12.358600000000001 - type: nauc_recall_at_1_diff1 value: 48.4715 - type: nauc_recall_at_3_max value: 19.8608 - type: nauc_recall_at_3_std value: -12.6162 - type: nauc_recall_at_3_diff1 value: 39.216699999999996 - type: nauc_recall_at_5_max value: 22.131700000000002 - type: nauc_recall_at_5_std value: -9.728100000000001 - type: nauc_recall_at_5_diff1 value: 39.307900000000004 - type: nauc_recall_at_10_max value: 32.0438 - type: nauc_recall_at_10_std value: -3.6334999999999997 - type: nauc_recall_at_10_diff1 value: 39.2567 - type: nauc_recall_at_20_max value: 32.0439 - type: nauc_recall_at_20_std value: 2.7743 - type: nauc_recall_at_20_diff1 value: 32.6522 - type: nauc_recall_at_100_max value: 47.1356 - type: nauc_recall_at_100_std value: 12.581700000000001 - type: nauc_recall_at_100_diff1 value: 25.913700000000002 - type: nauc_recall_at_1000_max value: 59.09799999999999 - type: nauc_recall_at_1000_std value: 47.4747 - type: nauc_recall_at_1000_diff1 value: -1.6067999999999998 - type: nauc_precision_at_1_max value: 26.172 - type: nauc_precision_at_1_std value: -9.07 - type: nauc_precision_at_1_diff1 value: 46.664899999999996 - type: nauc_precision_at_3_max value: 24.7472 - type: nauc_precision_at_3_std value: -5.6165 - type: nauc_precision_at_3_diff1 value: 29.5543 - type: nauc_precision_at_5_max value: 26.334000000000003 - type: nauc_precision_at_5_std value: 0.8363 - type: nauc_precision_at_5_diff1 value: 26.732899999999997 - type: nauc_precision_at_10_max value: 26.837100000000003 - type: nauc_precision_at_10_std value: 8.7927 - type: nauc_precision_at_10_diff1 value: 20.8763 - type: nauc_precision_at_20_max value: 18.232200000000002 - type: nauc_precision_at_20_std value: 11.752600000000001 - type: nauc_precision_at_20_diff1 value: 11.7568 - type: nauc_precision_at_100_max value: 2.1069 - type: nauc_precision_at_100_std value: 14.2173 - type: nauc_precision_at_100_diff1 value: 0.9792000000000001 - type: nauc_precision_at_1000_max value: -12.2237 - type: nauc_precision_at_1000_std value: 9.9255 - type: nauc_precision_at_1000_diff1 value: -5.8681 - type: nauc_mrr_at_1_max value: 26.172 - type: nauc_mrr_at_1_std value: -9.07 - type: nauc_mrr_at_1_diff1 value: 46.664899999999996 - type: nauc_mrr_at_3_max value: 25.629800000000003 - type: nauc_mrr_at_3_std value: -10.238800000000001 - type: nauc_mrr_at_3_diff1 value: 44.330799999999996 - type: nauc_mrr_at_5_max value: 26.7314 - type: nauc_mrr_at_5_std value: -9.589 - type: nauc_mrr_at_5_diff1 value: 45.0557 - type: nauc_mrr_at_10_max value: 27.4486 - type: nauc_mrr_at_10_std value: -8.8187 - type: nauc_mrr_at_10_diff1 value: 44.6457 - type: nauc_mrr_at_20_max value: 27.270100000000003 - type: nauc_mrr_at_20_std value: -8.6464 - type: nauc_mrr_at_20_diff1 value: 44.4286 - type: nauc_mrr_at_100_max value: 27.284399999999998 - type: nauc_mrr_at_100_std value: -8.664299999999999 - type: nauc_mrr_at_100_diff1 value: 44.4562 - type: nauc_mrr_at_1000_max value: 27.27 - type: nauc_mrr_at_1000_std value: -8.6626 - type: nauc_mrr_at_1000_diff1 value: 44.465900000000005 - type: main_score value: 50.208 - task: type: Retrieval dataset: name: MTEB CQADupstackWordpressRetrieval (default) type: mteb/cqadupstack-wordpress config: default split: test revision: 4ffe81d471b1924886b33c7567bfb200e9eec5c4 metrics: - type: ndcg_at_1 value: 27.911 - type: ndcg_at_3 value: 34.677 - type: ndcg_at_5 value: 38.315 - type: ndcg_at_10 value: 40.988 - type: ndcg_at_20 value: 42.99 - type: ndcg_at_100 value: 46.389 - type: ndcg_at_1000 value: 48.172 - type: map_at_1 value: 25.456 - type: map_at_3 value: 31.837 - type: map_at_5 value: 34.097 - type: map_at_10 value: 35.326 - type: map_at_20 value: 35.918 - type: map_at_100 value: 36.434 - type: map_at_1000 value: 36.513 - type: recall_at_1 value: 25.456 - type: recall_at_3 value: 39.892 - type: recall_at_5 value: 48.524 - type: recall_at_10 value: 56.254000000000005 - type: recall_at_20 value: 63.783 - type: recall_at_100 value: 81.164 - type: recall_at_1000 value: 93.89 - type: precision_at_1 value: 27.911 - type: precision_at_3 value: 14.849 - type: precision_at_5 value: 11.054 - type: precision_at_10 value: 6.543 - type: precision_at_20 value: 3.762 - type: precision_at_100 value: 0.9820000000000001 - type: precision_at_1000 value: 0.126 - type: mrr_at_1 value: 27.9113 - type: mrr_at_3 value: 34.5348 - type: mrr_at_5 value: 36.411 - type: mrr_at_10 value: 37.4528 - type: mrr_at_20 value: 37.992399999999996 - type: mrr_at_100 value: 38.4368 - type: mrr_at_1000 value: 38.4845 - type: nauc_ndcg_at_1_max value: 20.846999999999998 - type: nauc_ndcg_at_1_std value: -4.2672 - type: nauc_ndcg_at_1_diff1 value: 44.0328 - type: nauc_ndcg_at_3_max value: 22.7709 - type: nauc_ndcg_at_3_std value: -4.4297 - type: nauc_ndcg_at_3_diff1 value: 39.555099999999996 - type: nauc_ndcg_at_5_max value: 24.9005 - type: nauc_ndcg_at_5_std value: -2.1591 - type: nauc_ndcg_at_5_diff1 value: 37.3692 - type: nauc_ndcg_at_10_max value: 24.909100000000002 - type: nauc_ndcg_at_10_std value: -0.384 - type: nauc_ndcg_at_10_diff1 value: 37.2953 - type: nauc_ndcg_at_20_max value: 25.519399999999997 - type: nauc_ndcg_at_20_std value: 0.2725 - type: nauc_ndcg_at_20_diff1 value: 37.1091 - type: nauc_ndcg_at_100_max value: 25.6145 - type: nauc_ndcg_at_100_std value: 0.8262999999999999 - type: nauc_ndcg_at_100_diff1 value: 36.5502 - type: nauc_ndcg_at_1000_max value: 24.5673 - type: nauc_ndcg_at_1000_std value: 0.060899999999999996 - type: nauc_ndcg_at_1000_diff1 value: 36.9253 - type: nauc_map_at_1_max value: 19.8422 - type: nauc_map_at_1_std value: -5.319100000000001 - type: nauc_map_at_1_diff1 value: 44.1229 - type: nauc_map_at_3_max value: 21.9723 - type: nauc_map_at_3_std value: -5.1189 - type: nauc_map_at_3_diff1 value: 40.771 - type: nauc_map_at_5_max value: 23.4629 - type: nauc_map_at_5_std value: -3.5612 - type: nauc_map_at_5_diff1 value: 39.307700000000004 - type: nauc_map_at_10_max value: 23.519499999999997 - type: nauc_map_at_10_std value: -2.8228 - type: nauc_map_at_10_diff1 value: 39.4316 - type: nauc_map_at_20_max value: 23.6993 - type: nauc_map_at_20_std value: -2.5308 - type: nauc_map_at_20_diff1 value: 39.2955 - type: nauc_map_at_100_max value: 23.674799999999998 - type: nauc_map_at_100_std value: -2.4657999999999998 - type: nauc_map_at_100_diff1 value: 39.1997 - type: nauc_map_at_1000_max value: 23.629 - type: nauc_map_at_1000_std value: -2.4773 - type: nauc_map_at_1000_diff1 value: 39.1866 - type: nauc_recall_at_1_max value: 19.8422 - type: nauc_recall_at_1_std value: -5.319100000000001 - type: nauc_recall_at_1_diff1 value: 44.1229 - type: nauc_recall_at_3_max value: 23.5368 - type: nauc_recall_at_3_std value: -4.4474 - type: nauc_recall_at_3_diff1 value: 36.3819 - type: nauc_recall_at_5_max value: 28.0457 - type: nauc_recall_at_5_std value: 0.7798 - type: nauc_recall_at_5_diff1 value: 31.097599999999996 - type: nauc_recall_at_10_max value: 27.5608 - type: nauc_recall_at_10_std value: 5.9596 - type: nauc_recall_at_10_diff1 value: 29.6752 - type: nauc_recall_at_20_max value: 30.1434 - type: nauc_recall_at_20_std value: 8.7057 - type: nauc_recall_at_20_diff1 value: 28.402500000000003 - type: nauc_recall_at_100_max value: 35.001 - type: nauc_recall_at_100_std value: 18.8733 - type: nauc_recall_at_100_diff1 value: 18.171499999999998 - type: nauc_recall_at_1000_max value: 24.1775 - type: nauc_recall_at_1000_std value: 23.6246 - type: nauc_recall_at_1000_diff1 value: 9.8065 - type: nauc_precision_at_1_max value: 20.846999999999998 - type: nauc_precision_at_1_std value: -4.2672 - type: nauc_precision_at_1_diff1 value: 44.0328 - type: nauc_precision_at_3_max value: 25.306600000000003 - type: nauc_precision_at_3_std value: -1.959 - type: nauc_precision_at_3_diff1 value: 36.350500000000004 - type: nauc_precision_at_5_max value: 28.2705 - type: nauc_precision_at_5_std value: 5.4924 - type: nauc_precision_at_5_diff1 value: 28.198099999999997 - type: nauc_precision_at_10_max value: 26.6247 - type: nauc_precision_at_10_std value: 11.3267 - type: nauc_precision_at_10_diff1 value: 25.2188 - type: nauc_precision_at_20_max value: 27.254499999999997 - type: nauc_precision_at_20_std value: 15.3152 - type: nauc_precision_at_20_diff1 value: 19.916 - type: nauc_precision_at_100_max value: 20.3749 - type: nauc_precision_at_100_std value: 20.8664 - type: nauc_precision_at_100_diff1 value: 3.8397 - type: nauc_precision_at_1000_max value: -12.1998 - type: nauc_precision_at_1000_std value: 2.7227 - type: nauc_precision_at_1000_diff1 value: -18.4254 - type: nauc_mrr_at_1_max value: 20.846999999999998 - type: nauc_mrr_at_1_std value: -4.2672 - type: nauc_mrr_at_1_diff1 value: 44.0328 - type: nauc_mrr_at_3_max value: 22.907 - type: nauc_mrr_at_3_std value: -3.8749 - type: nauc_mrr_at_3_diff1 value: 40.1759 - type: nauc_mrr_at_5_max value: 23.819499999999998 - type: nauc_mrr_at_5_std value: -2.5065 - type: nauc_mrr_at_5_diff1 value: 39.2975 - type: nauc_mrr_at_10_max value: 23.8817 - type: nauc_mrr_at_10_std value: -1.6466999999999998 - type: nauc_mrr_at_10_diff1 value: 39.1727 - type: nauc_mrr_at_20_max value: 24 - type: nauc_mrr_at_20_std value: -1.5741 - type: nauc_mrr_at_20_diff1 value: 39.1967 - type: nauc_mrr_at_100_max value: 23.811799999999998 - type: nauc_mrr_at_100_std value: -1.6327 - type: nauc_mrr_at_100_diff1 value: 39.0917 - type: nauc_mrr_at_1000_max value: 23.7897 - type: nauc_mrr_at_1000_std value: -1.6494000000000002 - type: nauc_mrr_at_1000_diff1 value: 39.1019 - type: main_score value: 40.988 - task: type: Retrieval dataset: name: MTEB ClimateFEVER (default) type: mteb/climate-fever config: default split: test revision: 47f2ac6acb640fc46020b02a5b59fdda04d39380 metrics: - type: ndcg_at_1 value: 45.668 - type: ndcg_at_3 value: 38.864 - type: ndcg_at_5 value: 41.327000000000005 - type: ndcg_at_10 value: 45.04 - type: ndcg_at_20 value: 47.542 - type: ndcg_at_100 value: 50.183 - type: ndcg_at_1000 value: 52.129000000000005 - type: map_at_1 value: 20.186 - type: map_at_3 value: 29.237000000000002 - type: map_at_5 value: 32.458999999999996 - type: map_at_10 value: 34.713 - type: map_at_20 value: 35.759 - type: map_at_100 value: 36.351 - type: map_at_1000 value: 36.455 - type: recall_at_1 value: 20.186 - type: recall_at_3 value: 34.772 - type: recall_at_5 value: 42.491 - type: recall_at_10 value: 50.611 - type: recall_at_20 value: 57.595 - type: recall_at_100 value: 67.374 - type: recall_at_1000 value: 78.244 - type: precision_at_1 value: 45.668 - type: precision_at_3 value: 29.316 - type: precision_at_5 value: 22.306 - type: precision_at_10 value: 13.668 - type: precision_at_20 value: 7.925 - type: precision_at_100 value: 1.9109999999999998 - type: precision_at_1000 value: 0.22899999999999998 - type: mrr_at_1 value: 45.6678 - type: mrr_at_3 value: 55.7438 - type: mrr_at_5 value: 57.3398 - type: mrr_at_10 value: 58.032799999999995 - type: mrr_at_20 value: 58.3472 - type: mrr_at_100 value: 58.4846 - type: mrr_at_1000 value: 58.504400000000004 - type: nauc_ndcg_at_1_max value: 39.312599999999996 - type: nauc_ndcg_at_1_std value: 13.444600000000001 - type: nauc_ndcg_at_1_diff1 value: 31.551499999999997 - type: nauc_ndcg_at_3_max value: 40.7886 - type: nauc_ndcg_at_3_std value: 11.7545 - type: nauc_ndcg_at_3_diff1 value: 24.758399999999998 - type: nauc_ndcg_at_5_max value: 41.4458 - type: nauc_ndcg_at_5_std value: 12.7212 - type: nauc_ndcg_at_5_diff1 value: 23.8522 - type: nauc_ndcg_at_10_max value: 41.6993 - type: nauc_ndcg_at_10_std value: 14.6038 - type: nauc_ndcg_at_10_diff1 value: 23.8755 - type: nauc_ndcg_at_20_max value: 41.4782 - type: nauc_ndcg_at_20_std value: 17.1696 - type: nauc_ndcg_at_20_diff1 value: 23.877200000000002 - type: nauc_ndcg_at_100_max value: 41.652499999999996 - type: nauc_ndcg_at_100_std value: 19.2863 - type: nauc_ndcg_at_100_diff1 value: 23.9355 - type: nauc_ndcg_at_1000_max value: 41.7572 - type: nauc_ndcg_at_1000_std value: 19.889200000000002 - type: nauc_ndcg_at_1000_diff1 value: 24.0865 - type: nauc_map_at_1_max value: 34.5948 - type: nauc_map_at_1_std value: 9.3331 - type: nauc_map_at_1_diff1 value: 33.4788 - type: nauc_map_at_3_max value: 39.2329 - type: nauc_map_at_3_std value: 11.0441 - type: nauc_map_at_3_diff1 value: 26.2025 - type: nauc_map_at_5_max value: 40.1248 - type: nauc_map_at_5_std value: 12.484 - type: nauc_map_at_5_diff1 value: 24.7156 - type: nauc_map_at_10_max value: 40.6486 - type: nauc_map_at_10_std value: 13.386400000000002 - type: nauc_map_at_10_diff1 value: 24.726100000000002 - type: nauc_map_at_20_max value: 40.6126 - type: nauc_map_at_20_std value: 14.5582 - type: nauc_map_at_20_diff1 value: 24.6569 - type: nauc_map_at_100_max value: 40.7502 - type: nauc_map_at_100_std value: 15.082899999999999 - type: nauc_map_at_100_diff1 value: 24.5925 - type: nauc_map_at_1000_max value: 40.745 - type: nauc_map_at_1000_std value: 15.1392 - type: nauc_map_at_1000_diff1 value: 24.6006 - type: nauc_recall_at_1_max value: 34.5948 - type: nauc_recall_at_1_std value: 9.3331 - type: nauc_recall_at_1_diff1 value: 33.4788 - type: nauc_recall_at_3_max value: 38.5191 - type: nauc_recall_at_3_std value: 9.8077 - type: nauc_recall_at_3_diff1 value: 21.4604 - type: nauc_recall_at_5_max value: 38.1356 - type: nauc_recall_at_5_std value: 11.158 - type: nauc_recall_at_5_diff1 value: 17.6417 - type: nauc_recall_at_10_max value: 36.6836 - type: nauc_recall_at_10_std value: 14.6125 - type: nauc_recall_at_10_diff1 value: 16.9109 - type: nauc_recall_at_20_max value: 34.7404 - type: nauc_recall_at_20_std value: 20.89 - type: nauc_recall_at_20_diff1 value: 16.233 - type: nauc_recall_at_100_max value: 33.6466 - type: nauc_recall_at_100_std value: 28.839399999999998 - type: nauc_recall_at_100_diff1 value: 15.2031 - type: nauc_recall_at_1000_max value: 33.4333 - type: nauc_recall_at_1000_std value: 35.3876 - type: nauc_recall_at_1000_diff1 value: 14.2567 - type: nauc_precision_at_1_max value: 39.312599999999996 - type: nauc_precision_at_1_std value: 13.444600000000001 - type: nauc_precision_at_1_diff1 value: 31.551499999999997 - type: nauc_precision_at_3_max value: 38.6969 - type: nauc_precision_at_3_std value: 11.604000000000001 - type: nauc_precision_at_3_diff1 value: 12.2982 - type: nauc_precision_at_5_max value: 34.0346 - type: nauc_precision_at_5_std value: 13.222700000000001 - type: nauc_precision_at_5_diff1 value: 7.2342 - type: nauc_precision_at_10_max value: 29.3584 - type: nauc_precision_at_10_std value: 16.1479 - type: nauc_precision_at_10_diff1 value: 5.3597 - type: nauc_precision_at_20_max value: 23.502799999999997 - type: nauc_precision_at_20_std value: 21.465799999999998 - type: nauc_precision_at_20_diff1 value: 2.835 - type: nauc_precision_at_100_max value: 16.001 - type: nauc_precision_at_100_std value: 26.1729 - type: nauc_precision_at_100_diff1 value: -1.1341999999999999 - type: nauc_precision_at_1000_max value: 6.7147 - type: nauc_precision_at_1000_std value: 25.3562 - type: nauc_precision_at_1000_diff1 value: -5.8931 - type: nauc_mrr_at_1_max value: 39.312599999999996 - type: nauc_mrr_at_1_std value: 13.444600000000001 - type: nauc_mrr_at_1_diff1 value: 31.551499999999997 - type: nauc_mrr_at_3_max value: 41.599799999999995 - type: nauc_mrr_at_3_std value: 13.084499999999998 - type: nauc_mrr_at_3_diff1 value: 27.8827 - type: nauc_mrr_at_5_max value: 41.7667 - type: nauc_mrr_at_5_std value: 13.2025 - type: nauc_mrr_at_5_diff1 value: 27.8692 - type: nauc_mrr_at_10_max value: 41.6294 - type: nauc_mrr_at_10_std value: 13.9039 - type: nauc_mrr_at_10_diff1 value: 27.9569 - type: nauc_mrr_at_20_max value: 41.6353 - type: nauc_mrr_at_20_std value: 13.9752 - type: nauc_mrr_at_20_diff1 value: 28.0767 - type: nauc_mrr_at_100_max value: 41.6002 - type: nauc_mrr_at_100_std value: 14.0432 - type: nauc_mrr_at_100_diff1 value: 28.1348 - type: nauc_mrr_at_1000_max value: 41.5999 - type: nauc_mrr_at_1000_std value: 14.043 - type: nauc_mrr_at_1000_diff1 value: 28.1343 - type: main_score value: 45.04 - task: type: Retrieval dataset: name: MTEB DBPedia (default) type: mteb/dbpedia config: default split: test revision: c0f706b76e590d620bd6618b3ca8efdd34e2d659 metrics: - type: ndcg_at_1 value: 65.625 - type: ndcg_at_3 value: 57.938 - type: ndcg_at_5 value: 55.498999999999995 - type: ndcg_at_10 value: 52.577 - type: ndcg_at_20 value: 52.566 - type: ndcg_at_100 value: 58.352000000000004 - type: ndcg_at_1000 value: 64.887 - type: map_at_1 value: 10.327 - type: map_at_3 value: 17.702 - type: map_at_5 value: 21.409 - type: map_at_10 value: 25.832 - type: map_at_20 value: 31.006 - type: map_at_100 value: 38.357 - type: map_at_1000 value: 40.194 - type: recall_at_1 value: 10.327 - type: recall_at_3 value: 18.999 - type: recall_at_5 value: 24.297 - type: recall_at_10 value: 31.435000000000002 - type: recall_at_20 value: 41.801 - type: recall_at_100 value: 64.751 - type: recall_at_1000 value: 86.043 - type: precision_at_1 value: 76.5 - type: precision_at_3 value: 61.833000000000006 - type: precision_at_5 value: 53.55 - type: precision_at_10 value: 41.8 - type: precision_at_20 value: 32.963 - type: precision_at_100 value: 13.498 - type: precision_at_1000 value: 2.357 - type: mrr_at_1 value: 76.5 - type: mrr_at_3 value: 82.8333 - type: mrr_at_5 value: 83.4458 - type: mrr_at_10 value: 83.6805 - type: mrr_at_20 value: 83.7449 - type: mrr_at_100 value: 83.8219 - type: mrr_at_1000 value: 83.8283 - type: nauc_ndcg_at_1_max value: 51.568400000000004 - type: nauc_ndcg_at_1_std value: 30.5435 - type: nauc_ndcg_at_1_diff1 value: 49.4987 - type: nauc_ndcg_at_3_max value: 43.4883 - type: nauc_ndcg_at_3_std value: 31.5687 - type: nauc_ndcg_at_3_diff1 value: 30.1514 - type: nauc_ndcg_at_5_max value: 42.2335 - type: nauc_ndcg_at_5_std value: 32.3902 - type: nauc_ndcg_at_5_diff1 value: 31.9359 - type: nauc_ndcg_at_10_max value: 42.0877 - type: nauc_ndcg_at_10_std value: 31.6409 - type: nauc_ndcg_at_10_diff1 value: 34.9684 - type: nauc_ndcg_at_20_max value: 39.1372 - type: nauc_ndcg_at_20_std value: 27.4368 - type: nauc_ndcg_at_20_diff1 value: 34.865899999999996 - type: nauc_ndcg_at_100_max value: 42.838300000000004 - type: nauc_ndcg_at_100_std value: 35.3636 - type: nauc_ndcg_at_100_diff1 value: 36.2467 - type: nauc_ndcg_at_1000_max value: 48.1669 - type: nauc_ndcg_at_1000_std value: 43.3838 - type: nauc_ndcg_at_1000_diff1 value: 36.2397 - type: nauc_map_at_1_max value: -4.0852 - type: nauc_map_at_1_std value: -20.336299999999998 - type: nauc_map_at_1_diff1 value: 37.5075 - type: nauc_map_at_3_max value: 5.606 - type: nauc_map_at_3_std value: -15.477599999999999 - type: nauc_map_at_3_diff1 value: 30.1676 - type: nauc_map_at_5_max value: 9.9675 - type: nauc_map_at_5_std value: -10.4882 - type: nauc_map_at_5_diff1 value: 29.8808 - type: nauc_map_at_10_max value: 16.0247 - type: nauc_map_at_10_std value: -1.3446 - type: nauc_map_at_10_diff1 value: 30.4367 - type: nauc_map_at_20_max value: 23.0361 - type: nauc_map_at_20_std value: 8.992899999999999 - type: nauc_map_at_20_diff1 value: 30.1643 - type: nauc_map_at_100_max value: 31.816699999999997 - type: nauc_map_at_100_std value: 25.555099999999996 - type: nauc_map_at_100_diff1 value: 30.549 - type: nauc_map_at_1000_max value: 33.242399999999996 - type: nauc_map_at_1000_std value: 28.1767 - type: nauc_map_at_1000_diff1 value: 30.0242 - type: nauc_recall_at_1_max value: -4.0852 - type: nauc_recall_at_1_std value: -20.336299999999998 - type: nauc_recall_at_1_diff1 value: 37.5075 - type: nauc_recall_at_3_max value: 2.3935 - type: nauc_recall_at_3_std value: -16.4596 - type: nauc_recall_at_3_diff1 value: 26.9506 - type: nauc_recall_at_5_max value: 5.1899 - type: nauc_recall_at_5_std value: -12.879399999999999 - type: nauc_recall_at_5_diff1 value: 25.2065 - type: nauc_recall_at_10_max value: 11.216 - type: nauc_recall_at_10_std value: -5.339 - type: nauc_recall_at_10_diff1 value: 26.0229 - type: nauc_recall_at_20_max value: 17.707800000000002 - type: nauc_recall_at_20_std value: 3.9654000000000003 - type: nauc_recall_at_20_diff1 value: 27.145200000000003 - type: nauc_recall_at_100_max value: 31.8321 - type: nauc_recall_at_100_std value: 31.219599999999996 - type: nauc_recall_at_100_diff1 value: 27.9692 - type: nauc_recall_at_1000_max value: 52.7876 - type: nauc_recall_at_1000_std value: 52.9031 - type: nauc_recall_at_1000_diff1 value: 33.1839 - type: nauc_precision_at_1_max value: 61.8036 - type: nauc_precision_at_1_std value: 44.4747 - type: nauc_precision_at_1_diff1 value: 53.412800000000004 - type: nauc_precision_at_3_max value: 43.5783 - type: nauc_precision_at_3_std value: 43.266799999999996 - type: nauc_precision_at_3_diff1 value: 8.7252 - type: nauc_precision_at_5_max value: 41.7952 - type: nauc_precision_at_5_std value: 45.880900000000004 - type: nauc_precision_at_5_diff1 value: 7.077400000000001 - type: nauc_precision_at_10_max value: 38.8324 - type: nauc_precision_at_10_std value: 50.418099999999995 - type: nauc_precision_at_10_diff1 value: 4.1962 - type: nauc_precision_at_20_max value: 35.4474 - type: nauc_precision_at_20_std value: 49.4221 - type: nauc_precision_at_20_diff1 value: 1.1421000000000001 - type: nauc_precision_at_100_max value: 26.096700000000002 - type: nauc_precision_at_100_std value: 43.0639 - type: nauc_precision_at_100_diff1 value: -4.6077 - type: nauc_precision_at_1000_max value: 4.3174 - type: nauc_precision_at_1000_std value: 19.775599999999997 - type: nauc_precision_at_1000_diff1 value: -15.1778 - type: nauc_mrr_at_1_max value: 61.8036 - type: nauc_mrr_at_1_std value: 44.4747 - type: nauc_mrr_at_1_diff1 value: 53.412800000000004 - type: nauc_mrr_at_3_max value: 61.1576 - type: nauc_mrr_at_3_std value: 49.4501 - type: nauc_mrr_at_3_diff1 value: 48.682900000000004 - type: nauc_mrr_at_5_max value: 60.728 - type: nauc_mrr_at_5_std value: 48.776399999999995 - type: nauc_mrr_at_5_diff1 value: 48.9195 - type: nauc_mrr_at_10_max value: 60.7957 - type: nauc_mrr_at_10_std value: 48.849199999999996 - type: nauc_mrr_at_10_diff1 value: 48.6244 - type: nauc_mrr_at_20_max value: 60.879099999999994 - type: nauc_mrr_at_20_std value: 48.715599999999995 - type: nauc_mrr_at_20_diff1 value: 48.6482 - type: nauc_mrr_at_100_max value: 60.7809 - type: nauc_mrr_at_100_std value: 48.5439 - type: nauc_mrr_at_100_diff1 value: 48.869099999999996 - type: nauc_mrr_at_1000_max value: 60.7977 - type: nauc_mrr_at_1000_std value: 48.5617 - type: nauc_mrr_at_1000_diff1 value: 48.875099999999996 - type: main_score value: 52.577 - task: type: Classification dataset: name: MTEB EmotionClassification (default) type: mteb/emotion config: default split: test revision: 4f58c6b202a23cf9a4da393831edf4f9183cad37 metrics: - type: accuracy value: 92.855 - type: f1 value: 89.1999 - type: f1_weighted value: 92.9881 - type: main_score value: 92.855 - task: type: Retrieval dataset: name: MTEB FEVER (default) type: mteb/fever config: default split: test revision: bea83ef9e8fb933d90a2f1d5515737465d613e12 metrics: - type: ndcg_at_1 value: 91.089 - type: ndcg_at_3 value: 92.536 - type: ndcg_at_5 value: 93.135 - type: ndcg_at_10 value: 93.57900000000001 - type: ndcg_at_20 value: 93.828 - type: ndcg_at_100 value: 94.072 - type: ndcg_at_1000 value: 94.195 - type: map_at_1 value: 84.598 - type: map_at_3 value: 90.347 - type: map_at_5 value: 90.928 - type: map_at_10 value: 91.25 - type: map_at_20 value: 91.36800000000001 - type: map_at_100 value: 91.432 - type: map_at_1000 value: 91.44 - type: recall_at_1 value: 84.598 - type: recall_at_3 value: 94.30199999999999 - type: recall_at_5 value: 95.86099999999999 - type: recall_at_10 value: 97.07900000000001 - type: recall_at_20 value: 97.816 - type: recall_at_100 value: 98.775 - type: recall_at_1000 value: 99.49 - type: precision_at_1 value: 91.089 - type: precision_at_3 value: 34.833 - type: precision_at_5 value: 21.482 - type: precision_at_10 value: 11.020000000000001 - type: precision_at_20 value: 5.614 - type: precision_at_100 value: 1.151 - type: precision_at_1000 value: 0.117 - type: mrr_at_1 value: 91.0891 - type: mrr_at_3 value: 94.56700000000001 - type: mrr_at_5 value: 94.7537 - type: mrr_at_10 value: 94.8075 - type: mrr_at_20 value: 94.8157 - type: mrr_at_100 value: 94.8214 - type: mrr_at_1000 value: 94.82169999999999 - type: nauc_ndcg_at_1_max value: 27.069399999999998 - type: nauc_ndcg_at_1_std value: -14.5981 - type: nauc_ndcg_at_1_diff1 value: 77.91120000000001 - type: nauc_ndcg_at_3_max value: 21.5811 - type: nauc_ndcg_at_3_std value: -4.1468 - type: nauc_ndcg_at_3_diff1 value: 48.83 - type: nauc_ndcg_at_5_max value: 20.523 - type: nauc_ndcg_at_5_std value: -3.3154999999999997 - type: nauc_ndcg_at_5_diff1 value: 47.5873 - type: nauc_ndcg_at_10_max value: 20.2836 - type: nauc_ndcg_at_10_std value: -2.5668 - type: nauc_ndcg_at_10_diff1 value: 48.6967 - type: nauc_ndcg_at_20_max value: 21.810499999999998 - type: nauc_ndcg_at_20_std value: -2.731 - type: nauc_ndcg_at_20_diff1 value: 50.4818 - type: nauc_ndcg_at_100_max value: 22.7895 - type: nauc_ndcg_at_100_std value: -3.3550000000000004 - type: nauc_ndcg_at_100_diff1 value: 52.141099999999994 - type: nauc_ndcg_at_1000_max value: 22.8887 - type: nauc_ndcg_at_1000_std value: -3.8968000000000003 - type: nauc_ndcg_at_1000_diff1 value: 53.1322 - type: nauc_map_at_1_max value: 17.4165 - type: nauc_map_at_1_std value: -13.8024 - type: nauc_map_at_1_diff1 value: 55.0895 - type: nauc_map_at_3_max value: 18.6504 - type: nauc_map_at_3_std value: -5.1091999999999995 - type: nauc_map_at_3_diff1 value: 46.7271 - type: nauc_map_at_5_max value: 18.9415 - type: nauc_map_at_5_std value: -4.5544 - type: nauc_map_at_5_diff1 value: 47.0325 - type: nauc_map_at_10_max value: 19.3631 - type: nauc_map_at_10_std value: -4.2128 - type: nauc_map_at_10_diff1 value: 47.8632 - type: nauc_map_at_20_max value: 19.9518 - type: nauc_map_at_20_std value: -4.1866 - type: nauc_map_at_20_diff1 value: 48.464600000000004 - type: nauc_map_at_100_max value: 20.1926 - type: nauc_map_at_100_std value: -4.2646999999999995 - type: nauc_map_at_100_diff1 value: 48.7761 - type: nauc_map_at_1000_max value: 20.2031 - type: nauc_map_at_1000_std value: -4.2917 - type: nauc_map_at_1000_diff1 value: 48.8186 - type: nauc_recall_at_1_max value: 17.4165 - type: nauc_recall_at_1_std value: -13.8024 - type: nauc_recall_at_1_diff1 value: 55.0895 - type: nauc_recall_at_3_max value: 13.7634 - type: nauc_recall_at_3_std value: 4.8161000000000005 - type: nauc_recall_at_3_diff1 value: 23.3279 - type: nauc_recall_at_5_max value: 11.2744 - type: nauc_recall_at_5_std value: 9.3473 - type: nauc_recall_at_5_diff1 value: 13.1573 - type: nauc_recall_at_10_max value: 7.927199999999999 - type: nauc_recall_at_10_std value: 16.963900000000002 - type: nauc_recall_at_10_diff1 value: 7.453 - type: nauc_recall_at_20_max value: 15.133 - type: nauc_recall_at_20_std value: 22.0635 - type: nauc_recall_at_20_diff1 value: 8.630799999999999 - type: nauc_recall_at_100_max value: 24.5063 - type: nauc_recall_at_100_std value: 29.017799999999998 - type: nauc_recall_at_100_diff1 value: 7.1233 - type: nauc_recall_at_1000_max value: 29.046 - type: nauc_recall_at_1000_std value: 41.5053 - type: nauc_recall_at_1000_diff1 value: 8.9752 - type: nauc_precision_at_1_max value: 27.069399999999998 - type: nauc_precision_at_1_std value: -14.5981 - type: nauc_precision_at_1_diff1 value: 77.91120000000001 - type: nauc_precision_at_3_max value: 4.7452000000000005 - type: nauc_precision_at_3_std value: 18.5957 - type: nauc_precision_at_3_diff1 value: -11.627 - type: nauc_precision_at_5_max value: 2.5 - type: nauc_precision_at_5_std value: 17.3486 - type: nauc_precision_at_5_diff1 value: -16.4117 - type: nauc_precision_at_10_max value: 2.2216 - type: nauc_precision_at_10_std value: 15.543899999999999 - type: nauc_precision_at_10_diff1 value: -15.697700000000001 - type: nauc_precision_at_20_max value: 4.5785 - type: nauc_precision_at_20_std value: 13.3715 - type: nauc_precision_at_20_diff1 value: -13.305900000000001 - type: nauc_precision_at_100_max value: 5.5239 - type: nauc_precision_at_100_std value: 10.3968 - type: nauc_precision_at_100_diff1 value: -11.649700000000001 - type: nauc_precision_at_1000_max value: 4.2727 - type: nauc_precision_at_1000_std value: 7.7141 - type: nauc_precision_at_1000_diff1 value: -10.2325 - type: nauc_mrr_at_1_max value: 27.069399999999998 - type: nauc_mrr_at_1_std value: -14.5981 - type: nauc_mrr_at_1_diff1 value: 77.91120000000001 - type: nauc_mrr_at_3_max value: 30.462600000000002 - type: nauc_mrr_at_3_std value: -10.8943 - type: nauc_mrr_at_3_diff1 value: 76.82 - type: nauc_mrr_at_5_max value: 30.1114 - type: nauc_mrr_at_5_std value: -11.483799999999999 - type: nauc_mrr_at_5_diff1 value: 76.5938 - type: nauc_mrr_at_10_max value: 29.8093 - type: nauc_mrr_at_10_std value: -11.4619 - type: nauc_mrr_at_10_diff1 value: 76.7031 - type: nauc_mrr_at_20_max value: 29.817700000000002 - type: nauc_mrr_at_20_std value: -11.5811 - type: nauc_mrr_at_20_diff1 value: 76.7699 - type: nauc_mrr_at_100_max value: 29.8109 - type: nauc_mrr_at_100_std value: -11.6356 - type: nauc_mrr_at_100_diff1 value: 76.7814 - type: nauc_mrr_at_1000_max value: 29.810599999999997 - type: nauc_mrr_at_1000_std value: -11.638 - type: nauc_mrr_at_1000_diff1 value: 76.7821 - type: main_score value: 93.57900000000001 - task: type: Retrieval dataset: name: MTEB FiQA2018 (default) type: mteb/fiqa config: default split: test revision: 27a168819829fe9bcd655c2df245fb19452e8e06 metrics: - type: ndcg_at_1 value: 58.48799999999999 - type: ndcg_at_3 value: 56.16100000000001 - type: ndcg_at_5 value: 57.511 - type: ndcg_at_10 value: 60.284000000000006 - type: ndcg_at_20 value: 63.104000000000006 - type: ndcg_at_100 value: 66.61399999999999 - type: ndcg_at_1000 value: 68.08 - type: map_at_1 value: 31.047000000000004 - type: map_at_3 value: 45.858 - type: map_at_5 value: 49.452 - type: map_at_10 value: 52.19200000000001 - type: map_at_20 value: 53.488 - type: map_at_100 value: 54.367 - type: map_at_1000 value: 54.484 - type: recall_at_1 value: 31.047000000000004 - type: recall_at_3 value: 51.278 - type: recall_at_5 value: 58.619 - type: recall_at_10 value: 67.388 - type: recall_at_20 value: 76.058 - type: recall_at_100 value: 89.872 - type: recall_at_1000 value: 98.104 - type: precision_at_1 value: 58.48799999999999 - type: precision_at_3 value: 37.397000000000006 - type: precision_at_5 value: 27.315 - type: precision_at_10 value: 16.636 - type: precision_at_20 value: 9.506 - type: precision_at_100 value: 2.31 - type: precision_at_1000 value: 0.258 - type: mrr_at_1 value: 58.4877 - type: mrr_at_3 value: 65.3035 - type: mrr_at_5 value: 66.5381 - type: mrr_at_10 value: 67.3128 - type: mrr_at_20 value: 67.6732 - type: mrr_at_100 value: 67.8703 - type: mrr_at_1000 value: 67.8843 - type: nauc_ndcg_at_1_max value: 22.2154 - type: nauc_ndcg_at_1_std value: -6.8969000000000005 - type: nauc_ndcg_at_1_diff1 value: 63.343 - type: nauc_ndcg_at_3_max value: 18.290100000000002 - type: nauc_ndcg_at_3_std value: -4.3137 - type: nauc_ndcg_at_3_diff1 value: 49.6392 - type: nauc_ndcg_at_5_max value: 15.2734 - type: nauc_ndcg_at_5_std value: -4.8328999999999995 - type: nauc_ndcg_at_5_diff1 value: 50.128099999999996 - type: nauc_ndcg_at_10_max value: 14.333499999999999 - type: nauc_ndcg_at_10_std value: -4.4392000000000005 - type: nauc_ndcg_at_10_diff1 value: 50.4035 - type: nauc_ndcg_at_20_max value: 16.0761 - type: nauc_ndcg_at_20_std value: -1.917 - type: nauc_ndcg_at_20_diff1 value: 51.334900000000005 - type: nauc_ndcg_at_100_max value: 18.3939 - type: nauc_ndcg_at_100_std value: -0.16199999999999998 - type: nauc_ndcg_at_100_diff1 value: 51.565099999999994 - type: nauc_ndcg_at_1000_max value: 19.3296 - type: nauc_ndcg_at_1000_std value: -2.0654 - type: nauc_ndcg_at_1000_diff1 value: 51.78620000000001 - type: nauc_map_at_1_max value: 1.4908 - type: nauc_map_at_1_std value: -9.4582 - type: nauc_map_at_1_diff1 value: 53.4035 - type: nauc_map_at_3_max value: 8.225100000000001 - type: nauc_map_at_3_std value: -8.0511 - type: nauc_map_at_3_diff1 value: 49.9005 - type: nauc_map_at_5_max value: 11.188099999999999 - type: nauc_map_at_5_std value: -7.1714 - type: nauc_map_at_5_diff1 value: 49.3836 - type: nauc_map_at_10_max value: 12.885299999999999 - type: nauc_map_at_10_std value: -6.292000000000001 - type: nauc_map_at_10_diff1 value: 49.1492 - type: nauc_map_at_20_max value: 13.8849 - type: nauc_map_at_20_std value: -5.256 - type: nauc_map_at_20_diff1 value: 49.5846 - type: nauc_map_at_100_max value: 14.6337 - type: nauc_map_at_100_std value: -4.7753 - type: nauc_map_at_100_diff1 value: 49.6103 - type: nauc_map_at_1000_max value: 14.6885 - type: nauc_map_at_1000_std value: -4.8452 - type: nauc_map_at_1000_diff1 value: 49.6053 - type: nauc_recall_at_1_max value: 1.4908 - type: nauc_recall_at_1_std value: -9.4582 - type: nauc_recall_at_1_diff1 value: 53.4035 - type: nauc_recall_at_3_max value: 4.301 - type: nauc_recall_at_3_std value: -5.7848999999999995 - type: nauc_recall_at_3_diff1 value: 43.4693 - type: nauc_recall_at_5_max value: 5.289 - type: nauc_recall_at_5_std value: -4.2011 - type: nauc_recall_at_5_diff1 value: 41.1386 - type: nauc_recall_at_10_max value: 4.936999999999999 - type: nauc_recall_at_10_std value: -2.048 - type: nauc_recall_at_10_diff1 value: 39.4644 - type: nauc_recall_at_20_max value: 7.1711 - type: nauc_recall_at_20_std value: 8.978800000000001 - type: nauc_recall_at_20_diff1 value: 40.2059 - type: nauc_recall_at_100_max value: 10.020199999999999 - type: nauc_recall_at_100_std value: 37.0448 - type: nauc_recall_at_100_diff1 value: 34.5356 - type: nauc_recall_at_1000_max value: 30.9022 - type: nauc_recall_at_1000_std value: 42.3465 - type: nauc_recall_at_1000_diff1 value: 34.7997 - type: nauc_precision_at_1_max value: 22.2154 - type: nauc_precision_at_1_std value: -6.8969000000000005 - type: nauc_precision_at_1_diff1 value: 63.343 - type: nauc_precision_at_3_max value: 27.120499999999996 - type: nauc_precision_at_3_std value: 2.8301 - type: nauc_precision_at_3_diff1 value: 21.6329 - type: nauc_precision_at_5_max value: 28.3782 - type: nauc_precision_at_5_std value: 4.8704 - type: nauc_precision_at_5_diff1 value: 12.8683 - type: nauc_precision_at_10_max value: 27.403899999999997 - type: nauc_precision_at_10_std value: 8.1265 - type: nauc_precision_at_10_diff1 value: 5.0926 - type: nauc_precision_at_20_max value: 29.383300000000002 - type: nauc_precision_at_20_std value: 12.908100000000001 - type: nauc_precision_at_20_diff1 value: 0.6472 - type: nauc_precision_at_100_max value: 30.294500000000003 - type: nauc_precision_at_100_std value: 15.93 - type: nauc_precision_at_100_diff1 value: -8.704 - type: nauc_precision_at_1000_max value: 29.9313 - type: nauc_precision_at_1000_std value: 10.1372 - type: nauc_precision_at_1000_diff1 value: -13.424800000000001 - type: nauc_mrr_at_1_max value: 22.2154 - type: nauc_mrr_at_1_std value: -6.8969000000000005 - type: nauc_mrr_at_1_diff1 value: 63.343 - type: nauc_mrr_at_3_max value: 23.3901 - type: nauc_mrr_at_3_std value: -4.6844 - type: nauc_mrr_at_3_diff1 value: 60.8869 - type: nauc_mrr_at_5_max value: 22.615299999999998 - type: nauc_mrr_at_5_std value: -4.5552 - type: nauc_mrr_at_5_diff1 value: 60.522 - type: nauc_mrr_at_10_max value: 22.7886 - type: nauc_mrr_at_10_std value: -4.4885 - type: nauc_mrr_at_10_diff1 value: 60.4902 - type: nauc_mrr_at_20_max value: 22.9083 - type: nauc_mrr_at_20_std value: -4.1969 - type: nauc_mrr_at_20_diff1 value: 60.547799999999995 - type: nauc_mrr_at_100_max value: 23.0224 - type: nauc_mrr_at_100_std value: -4.151 - type: nauc_mrr_at_100_diff1 value: 60.581399999999995 - type: nauc_mrr_at_1000_max value: 23.0223 - type: nauc_mrr_at_1000_std value: -4.1821 - type: nauc_mrr_at_1000_diff1 value: 60.5878 - type: main_score value: 60.284000000000006 - task: type: Retrieval dataset: name: MTEB HotpotQA (default) type: mteb/hotpotqa config: default split: test revision: ab518f4d6fcca38d87c25209f94beba119d02014 metrics: - type: ndcg_at_1 value: 92.086 - type: ndcg_at_3 value: 84.129 - type: ndcg_at_5 value: 86.128 - type: ndcg_at_10 value: 87.473 - type: ndcg_at_20 value: 88.273 - type: ndcg_at_100 value: 89.067 - type: ndcg_at_1000 value: 89.467 - type: map_at_1 value: 46.043 - type: map_at_3 value: 79.89 - type: map_at_5 value: 81.625 - type: map_at_10 value: 82.485 - type: map_at_20 value: 82.83 - type: map_at_100 value: 83.00699999999999 - type: map_at_1000 value: 83.03 - type: recall_at_1 value: 46.043 - type: recall_at_3 value: 83.011 - type: recall_at_5 value: 86.935 - type: recall_at_10 value: 90.304 - type: recall_at_20 value: 92.86999999999999 - type: recall_at_100 value: 96.32 - type: recall_at_1000 value: 98.933 - type: precision_at_1 value: 92.086 - type: precision_at_3 value: 55.340999999999994 - type: precision_at_5 value: 34.774 - type: precision_at_10 value: 18.061 - type: precision_at_20 value: 9.286999999999999 - type: precision_at_100 value: 1.926 - type: precision_at_1000 value: 0.198 - type: mrr_at_1 value: 92.0864 - type: mrr_at_3 value: 94.4452 - type: mrr_at_5 value: 94.6255 - type: mrr_at_10 value: 94.7333 - type: mrr_at_20 value: 94.76440000000001 - type: mrr_at_100 value: 94.7801 - type: mrr_at_1000 value: 94.7809 - type: nauc_ndcg_at_1_max value: 37.6408 - type: nauc_ndcg_at_1_std value: -7.9706 - type: nauc_ndcg_at_1_diff1 value: 64.2193 - type: nauc_ndcg_at_3_max value: 35.579 - type: nauc_ndcg_at_3_std value: 4.5917 - type: nauc_ndcg_at_3_diff1 value: -7.6203 - type: nauc_ndcg_at_5_max value: 37.7564 - type: nauc_ndcg_at_5_std value: 7.9123 - type: nauc_ndcg_at_5_diff1 value: -6.2265 - type: nauc_ndcg_at_10_max value: 38.8436 - type: nauc_ndcg_at_10_std value: 9.86 - type: nauc_ndcg_at_10_diff1 value: -5.3233999999999995 - type: nauc_ndcg_at_20_max value: 39.0612 - type: nauc_ndcg_at_20_std value: 11.0778 - type: nauc_ndcg_at_20_diff1 value: -4.0485 - type: nauc_ndcg_at_100_max value: 38.9758 - type: nauc_ndcg_at_100_std value: 10.9792 - type: nauc_ndcg_at_100_diff1 value: -2.8198999999999996 - type: nauc_ndcg_at_1000_max value: 38.366099999999996 - type: nauc_ndcg_at_1000_std value: 9.4395 - type: nauc_ndcg_at_1000_diff1 value: -2.1656 - type: nauc_map_at_1_max value: 37.6408 - type: nauc_map_at_1_std value: -7.9706 - type: nauc_map_at_1_diff1 value: 64.2193 - type: nauc_map_at_3_max value: 33.882 - type: nauc_map_at_3_std value: 3.9527 - type: nauc_map_at_3_diff1 value: -12.516 - type: nauc_map_at_5_max value: 35.452099999999994 - type: nauc_map_at_5_std value: 6.228899999999999 - type: nauc_map_at_5_diff1 value: -11.5097 - type: nauc_map_at_10_max value: 35.961999999999996 - type: nauc_map_at_10_std value: 7.000000000000001 - type: nauc_map_at_10_diff1 value: -11.0337 - type: nauc_map_at_20_max value: 35.9944 - type: nauc_map_at_20_std value: 7.3074 - type: nauc_map_at_20_diff1 value: -10.6965 - type: nauc_map_at_100_max value: 35.970600000000005 - type: nauc_map_at_100_std value: 7.279299999999999 - type: nauc_map_at_100_diff1 value: -10.5362 - type: nauc_map_at_1000_max value: 35.9476 - type: nauc_map_at_1000_std value: 7.2231000000000005 - type: nauc_map_at_1000_diff1 value: -10.5154 - type: nauc_recall_at_1_max value: 37.6408 - type: nauc_recall_at_1_std value: -7.9706 - type: nauc_recall_at_1_diff1 value: 64.2193 - type: nauc_recall_at_3_max value: 35.9731 - type: nauc_recall_at_3_std value: 8.0627 - type: nauc_recall_at_3_diff1 value: -18.9248 - type: nauc_recall_at_5_max value: 40.184799999999996 - type: nauc_recall_at_5_std value: 15.5623 - type: nauc_recall_at_5_diff1 value: -18.8156 - type: nauc_recall_at_10_max value: 43.8976 - type: nauc_recall_at_10_std value: 23.7287 - type: nauc_recall_at_10_diff1 value: -19.8106 - type: nauc_recall_at_20_max value: 46.7029 - type: nauc_recall_at_20_std value: 34.2093 - type: nauc_recall_at_20_diff1 value: -18.305 - type: nauc_recall_at_100_max value: 53.403999999999996 - type: nauc_recall_at_100_std value: 53.4122 - type: nauc_recall_at_100_diff1 value: -16.8661 - type: nauc_recall_at_1000_max value: 56.882299999999994 - type: nauc_recall_at_1000_std value: 70.0182 - type: nauc_recall_at_1000_diff1 value: -17.042099999999998 - type: nauc_precision_at_1_max value: 37.6408 - type: nauc_precision_at_1_std value: -7.9706 - type: nauc_precision_at_1_diff1 value: 64.2193 - type: nauc_precision_at_3_max value: 35.9731 - type: nauc_precision_at_3_std value: 8.0627 - type: nauc_precision_at_3_diff1 value: -18.9248 - type: nauc_precision_at_5_max value: 40.184799999999996 - type: nauc_precision_at_5_std value: 15.5623 - type: nauc_precision_at_5_diff1 value: -18.8156 - type: nauc_precision_at_10_max value: 43.8976 - type: nauc_precision_at_10_std value: 23.7287 - type: nauc_precision_at_10_diff1 value: -19.8106 - type: nauc_precision_at_20_max value: 46.7029 - type: nauc_precision_at_20_std value: 34.2093 - type: nauc_precision_at_20_diff1 value: -18.305 - type: nauc_precision_at_100_max value: 53.403999999999996 - type: nauc_precision_at_100_std value: 53.4122 - type: nauc_precision_at_100_diff1 value: -16.8661 - type: nauc_precision_at_1000_max value: 56.882299999999994 - type: nauc_precision_at_1000_std value: 70.0182 - type: nauc_precision_at_1000_diff1 value: -17.042099999999998 - type: nauc_mrr_at_1_max value: 37.6408 - type: nauc_mrr_at_1_std value: -7.9706 - type: nauc_mrr_at_1_diff1 value: 64.2193 - type: nauc_mrr_at_3_max value: 43.0267 - type: nauc_mrr_at_3_std value: -3.9602 - type: nauc_mrr_at_3_diff1 value: 64.7898 - type: nauc_mrr_at_5_max value: 42.548700000000004 - type: nauc_mrr_at_5_std value: -4.1829 - type: nauc_mrr_at_5_diff1 value: 64.81989999999999 - type: nauc_mrr_at_10_max value: 42.5037 - type: nauc_mrr_at_10_std value: -3.8122000000000003 - type: nauc_mrr_at_10_diff1 value: 64.84440000000001 - type: nauc_mrr_at_20_max value: 42.4425 - type: nauc_mrr_at_20_std value: -3.8257 - type: nauc_mrr_at_20_diff1 value: 64.8602 - type: nauc_mrr_at_100_max value: 42.3146 - type: nauc_mrr_at_100_std value: -3.9995999999999996 - type: nauc_mrr_at_100_diff1 value: 64.81660000000001 - type: nauc_mrr_at_1000_max value: 42.3073 - type: nauc_mrr_at_1000_std value: -4.0055 - type: nauc_mrr_at_1000_diff1 value: 64.81360000000001 - type: main_score value: 87.473 - task: type: Classification dataset: name: MTEB ImdbClassification (default) type: mteb/imdb config: default split: test revision: 3d86128a09e091d6018b6d26cad27f2739fc2db7 metrics: - type: accuracy value: 97.07679999999999 - type: f1 value: 97.07639999999999 - type: f1_weighted value: 97.07639999999999 - type: ap value: 95.4623 - type: ap_weighted value: 95.4623 - type: main_score value: 97.07679999999999 - task: type: Retrieval dataset: name: MTEB MSMARCO (default) type: mteb/msmarco config: default split: dev revision: c5a29a104738b98a9e76336939199e264163d4a0 metrics: - type: ndcg_at_1 value: 27.12 - type: ndcg_at_3 value: 39.287 - type: ndcg_at_5 value: 43.478 - type: ndcg_at_10 value: 47.396 - type: ndcg_at_20 value: 49.915 - type: ndcg_at_100 value: 52.410000000000004 - type: ndcg_at_1000 value: 53.20700000000001 - type: map_at_1 value: 26.391 - type: map_at_3 value: 36.016999999999996 - type: map_at_5 value: 38.385999999999996 - type: map_at_10 value: 40.058 - type: map_at_20 value: 40.772999999999996 - type: map_at_100 value: 41.15 - type: map_at_1000 value: 41.185 - type: recall_at_1 value: 26.391 - type: recall_at_3 value: 48.025 - type: recall_at_5 value: 58.036 - type: recall_at_10 value: 69.852 - type: recall_at_20 value: 79.605 - type: recall_at_100 value: 92.499 - type: recall_at_1000 value: 98.446 - type: precision_at_1 value: 27.12 - type: precision_at_3 value: 16.608999999999998 - type: precision_at_5 value: 12.089 - type: precision_at_10 value: 7.314 - type: precision_at_20 value: 4.18 - type: precision_at_100 value: 0.9780000000000001 - type: precision_at_1000 value: 0.105 - type: mrr_at_1 value: 27.120300000000004 - type: mrr_at_3 value: 36.728699999999996 - type: mrr_at_5 value: 39.0518 - type: mrr_at_10 value: 40.6561 - type: mrr_at_20 value: 41.3384 - type: mrr_at_100 value: 41.6825 - type: mrr_at_1000 value: 41.7118 - type: nauc_ndcg_at_1_max value: -1.7341 - type: nauc_ndcg_at_1_std value: -22.0897 - type: nauc_ndcg_at_1_diff1 value: 43.9115 - type: nauc_ndcg_at_3_max value: -2.2762000000000002 - type: nauc_ndcg_at_3_std value: -24.0353 - type: nauc_ndcg_at_3_diff1 value: 40.4042 - type: nauc_ndcg_at_5_max value: -2.1643 - type: nauc_ndcg_at_5_std value: -23.5777 - type: nauc_ndcg_at_5_diff1 value: 40.1946 - type: nauc_ndcg_at_10_max value: -1.6878 - type: nauc_ndcg_at_10_std value: -22.9484 - type: nauc_ndcg_at_10_diff1 value: 40.5053 - type: nauc_ndcg_at_20_max value: -1.0808 - type: nauc_ndcg_at_20_std value: -20.8231 - type: nauc_ndcg_at_20_diff1 value: 40.4996 - type: nauc_ndcg_at_100_max value: -1.387 - type: nauc_ndcg_at_100_std value: -19.6544 - type: nauc_ndcg_at_100_diff1 value: 40.808499999999995 - type: nauc_ndcg_at_1000_max value: -1.3396 - type: nauc_ndcg_at_1000_std value: -20.7437 - type: nauc_ndcg_at_1000_diff1 value: 40.8921 - type: nauc_map_at_1_max value: -1.7507000000000001 - type: nauc_map_at_1_std value: -22.192899999999998 - type: nauc_map_at_1_diff1 value: 44.0719 - type: nauc_map_at_3_max value: -2.1371 - type: nauc_map_at_3_std value: -23.7158 - type: nauc_map_at_3_diff1 value: 41.351 - type: nauc_map_at_5_max value: -2.1025 - type: nauc_map_at_5_std value: -23.5251 - type: nauc_map_at_5_diff1 value: 41.255399999999995 - type: nauc_map_at_10_max value: -1.9206 - type: nauc_map_at_10_std value: -23.2697 - type: nauc_map_at_10_diff1 value: 41.4134 - type: nauc_map_at_20_max value: -1.7760000000000002 - type: nauc_map_at_20_std value: -22.7164 - type: nauc_map_at_20_diff1 value: 41.4186 - type: nauc_map_at_100_max value: -1.8270000000000002 - type: nauc_map_at_100_std value: -22.551 - type: nauc_map_at_100_diff1 value: 41.4761 - type: nauc_map_at_1000_max value: -1.8245 - type: nauc_map_at_1000_std value: -22.5827 - type: nauc_map_at_1000_diff1 value: 41.4796 - type: nauc_recall_at_1_max value: -1.7507000000000001 - type: nauc_recall_at_1_std value: -22.192899999999998 - type: nauc_recall_at_1_diff1 value: 44.0719 - type: nauc_recall_at_3_max value: -2.5709 - type: nauc_recall_at_3_std value: -24.9526 - type: nauc_recall_at_3_diff1 value: 37.6496 - type: nauc_recall_at_5_max value: -2.2352 - type: nauc_recall_at_5_std value: -23.7151 - type: nauc_recall_at_5_diff1 value: 36.7421 - type: nauc_recall_at_10_max value: -0.4821 - type: nauc_recall_at_10_std value: -21.5386 - type: nauc_recall_at_10_diff1 value: 37.1132 - type: nauc_recall_at_20_max value: 3.5499 - type: nauc_recall_at_20_std value: -8.5039 - type: nauc_recall_at_20_diff1 value: 35.985299999999995 - type: nauc_recall_at_100_max value: 4.6888 - type: nauc_recall_at_100_std value: 30.0406 - type: nauc_recall_at_100_diff1 value: 34.8416 - type: nauc_recall_at_1000_max value: 30.544300000000003 - type: nauc_recall_at_1000_std value: 72.42269999999999 - type: nauc_recall_at_1000_diff1 value: 26.676299999999998 - type: nauc_precision_at_1_max value: -1.7341 - type: nauc_precision_at_1_std value: -22.0897 - type: nauc_precision_at_1_diff1 value: 43.9115 - type: nauc_precision_at_3_max value: -2.7643 - type: nauc_precision_at_3_std value: -24.537100000000002 - type: nauc_precision_at_3_diff1 value: 36.9028 - type: nauc_precision_at_5_max value: -2.4927 - type: nauc_precision_at_5_std value: -22.6954 - type: nauc_precision_at_5_diff1 value: 35.0569 - type: nauc_precision_at_10_max value: -1.3371 - type: nauc_precision_at_10_std value: -19.017 - type: nauc_precision_at_10_diff1 value: 33.0978 - type: nauc_precision_at_20_max value: 1.9426999999999999 - type: nauc_precision_at_20_std value: -5.3872 - type: nauc_precision_at_20_diff1 value: 28.509400000000003 - type: nauc_precision_at_100_max value: 2.8586 - type: nauc_precision_at_100_std value: 20.869 - type: nauc_precision_at_100_diff1 value: 13.559899999999999 - type: nauc_precision_at_1000_max value: 6.1333 - type: nauc_precision_at_1000_std value: 15.551400000000001 - type: nauc_precision_at_1000_diff1 value: -3.4235 - type: nauc_mrr_at_1_max value: -1.7341 - type: nauc_mrr_at_1_std value: -22.0897 - type: nauc_mrr_at_1_diff1 value: 43.9115 - type: nauc_mrr_at_3_max value: -2.1852 - type: nauc_mrr_at_3_std value: -23.5165 - type: nauc_mrr_at_3_diff1 value: 41.1678 - type: nauc_mrr_at_5_max value: -2.1132999999999997 - type: nauc_mrr_at_5_std value: -23.1653 - type: nauc_mrr_at_5_diff1 value: 41.0944 - type: nauc_mrr_at_10_max value: -1.8908 - type: nauc_mrr_at_10_std value: -22.8918 - type: nauc_mrr_at_10_diff1 value: 41.1907 - type: nauc_mrr_at_20_max value: -1.7221 - type: nauc_mrr_at_20_std value: -22.375 - type: nauc_mrr_at_20_diff1 value: 41.2234 - type: nauc_mrr_at_100_max value: -1.7874999999999999 - type: nauc_mrr_at_100_std value: -22.2616 - type: nauc_mrr_at_100_diff1 value: 41.286899999999996 - type: nauc_mrr_at_1000_max value: -1.7856 - type: nauc_mrr_at_1000_std value: -22.2926 - type: nauc_mrr_at_1000_diff1 value: 41.2906 - type: main_score value: 47.396 - task: type: Classification dataset: name: MTEB MTOPDomainClassification (en) type: mteb/mtop_domain config: en split: test revision: d80d48c1eb48d3562165c59d59d0034df9fff0bf metrics: - type: accuracy value: 99.2157 - type: f1 value: 99.1286 - type: f1_weighted value: 99.21640000000001 - type: main_score value: 99.2157 - task: type: Classification dataset: name: MTEB MTOPIntentClassification (en) type: mteb/mtop_intent config: en split: test revision: ae001d0e6b1228650b7bd1c2c65fb50ad11a8aba metrics: - type: accuracy value: 94.5372 - type: f1 value: 78.7627 - type: f1_weighted value: 95.2685 - type: main_score value: 94.5372 - task: type: Classification dataset: name: MTEB MassiveIntentClassification (en) type: mteb/amazon_massive_intent config: en split: test revision: 4672e20407010da34463acc759c162ca9734bca6 metrics: - type: accuracy value: 82.0646 - type: f1 value: 80.2035 - type: f1_weighted value: 80.8017 - type: main_score value: 82.0646 - task: type: Classification dataset: name: MTEB MassiveScenarioClassification (en) type: mteb/amazon_massive_scenario config: en split: test revision: fad2c6e8459f9e1c45d9315f4953d921437d70f8 metrics: - type: accuracy value: 87.5723 - type: f1 value: 86.2565 - type: f1_weighted value: 86.92020000000001 - type: main_score value: 87.5723 - task: type: Clustering dataset: name: MTEB MedrxivClusteringP2P (default) type: mteb/medrxiv-clustering-p2p config: default split: test revision: e7a26af6f3ae46b30dde8737f02c07b1505bcc73 metrics: - type: v_measure value: 47.488200000000006 - type: v_measure_std value: 1.2606 - type: main_score value: 47.488200000000006 - task: type: Clustering dataset: name: MTEB MedrxivClusteringS2S (default) type: mteb/medrxiv-clustering-s2s config: default split: test revision: 35191c8c0dca72d8ff3efcd72aa802307d469663 metrics: - type: v_measure value: 45.0597 - type: v_measure_std value: 1.5357 - type: main_score value: 45.0597 - task: type: Reranking dataset: name: MTEB MindSmallReranking (default) type: mteb/mind_small config: default split: test revision: 59042f120c80e8afa9cdbb224f67076cec0fc9a7 metrics: - type: map value: 30.8519 - type: mrr value: 32.1466 - type: nAUC_map_max value: -16.602800000000002 - type: nAUC_map_std value: -8.7712 - type: nAUC_map_diff1 value: 8.7311 - type: nAUC_mrr_max value: -11.0311 - type: nAUC_mrr_std value: -5.2932 - type: nAUC_mrr_diff1 value: 8.7991 - type: main_score value: 30.8519 - task: type: Retrieval dataset: name: MTEB NFCorpus (default) type: mteb/nfcorpus config: default split: test revision: ec0fa4fe99da2ff19ca1214b7966684033a58814 metrics: - type: ndcg_at_1 value: 50.773999999999994 - type: ndcg_at_3 value: 46.766000000000005 - type: ndcg_at_5 value: 44.401 - type: ndcg_at_10 value: 40.955000000000005 - type: ndcg_at_20 value: 38.436 - type: ndcg_at_100 value: 37.101 - type: ndcg_at_1000 value: 45.458999999999996 - type: map_at_1 value: 6.7860000000000005 - type: map_at_3 value: 11.305 - type: map_at_5 value: 13.355 - type: map_at_10 value: 15.841 - type: map_at_20 value: 17.724 - type: map_at_100 value: 20.146 - type: map_at_1000 value: 21.664 - type: recall_at_1 value: 6.7860000000000005 - type: recall_at_3 value: 12.848 - type: recall_at_5 value: 16.059 - type: recall_at_10 value: 20.699 - type: recall_at_20 value: 25.349 - type: recall_at_100 value: 37.377 - type: recall_at_1000 value: 68.326 - type: precision_at_1 value: 52.322 - type: precision_at_3 value: 43.963 - type: precision_at_5 value: 38.7 - type: precision_at_10 value: 30.402 - type: precision_at_20 value: 22.415 - type: precision_at_100 value: 9.074 - type: precision_at_1000 value: 2.141 - type: mrr_at_1 value: 52.322 - type: mrr_at_3 value: 60.2167 - type: mrr_at_5 value: 61.161 - type: mrr_at_10 value: 61.6213 - type: mrr_at_20 value: 61.9851 - type: mrr_at_100 value: 62.1286 - type: mrr_at_1000 value: 62.16159999999999 - type: nauc_ndcg_at_1_max value: 47.306 - type: nauc_ndcg_at_1_std value: 15.371299999999998 - type: nauc_ndcg_at_1_diff1 value: 34.3673 - type: nauc_ndcg_at_3_max value: 49.5171 - type: nauc_ndcg_at_3_std value: 21.7163 - type: nauc_ndcg_at_3_diff1 value: 24.3249 - type: nauc_ndcg_at_5_max value: 50.1667 - type: nauc_ndcg_at_5_std value: 25.496799999999997 - type: nauc_ndcg_at_5_diff1 value: 21.0998 - type: nauc_ndcg_at_10_max value: 48.174499999999995 - type: nauc_ndcg_at_10_std value: 25.674799999999998 - type: nauc_ndcg_at_10_diff1 value: 19.2271 - type: nauc_ndcg_at_20_max value: 46.451100000000004 - type: nauc_ndcg_at_20_std value: 26.3454 - type: nauc_ndcg_at_20_diff1 value: 19.6892 - type: nauc_ndcg_at_100_max value: 47.394 - type: nauc_ndcg_at_100_std value: 29.3957 - type: nauc_ndcg_at_100_diff1 value: 22.639 - type: nauc_ndcg_at_1000_max value: 48.8094 - type: nauc_ndcg_at_1000_std value: 33.6209 - type: nauc_ndcg_at_1000_diff1 value: 24.0513 - type: nauc_map_at_1_max value: 22.2337 - type: nauc_map_at_1_std value: -15.3141 - type: nauc_map_at_1_diff1 value: 46.8412 - type: nauc_map_at_3_max value: 31.1176 - type: nauc_map_at_3_std value: -6.8641 - type: nauc_map_at_3_diff1 value: 38.2225 - type: nauc_map_at_5_max value: 34.1685 - type: nauc_map_at_5_std value: -2.7371 - type: nauc_map_at_5_diff1 value: 33.8161 - type: nauc_map_at_10_max value: 38.3438 - type: nauc_map_at_10_std value: 2.4334000000000002 - type: nauc_map_at_10_diff1 value: 29.9155 - type: nauc_map_at_20_max value: 41.6186 - type: nauc_map_at_20_std value: 8.1891 - type: nauc_map_at_20_diff1 value: 28.083999999999996 - type: nauc_map_at_100_max value: 43.8986 - type: nauc_map_at_100_std value: 14.971699999999998 - type: nauc_map_at_100_diff1 value: 25.7392 - type: nauc_map_at_1000_max value: 43.7337 - type: nauc_map_at_1000_std value: 17.3602 - type: nauc_map_at_1000_diff1 value: 24.3521 - type: nauc_recall_at_1_max value: 22.2337 - type: nauc_recall_at_1_std value: -15.3141 - type: nauc_recall_at_1_diff1 value: 46.8412 - type: nauc_recall_at_3_max value: 27.4814 - type: nauc_recall_at_3_std value: -6.2251 - type: nauc_recall_at_3_diff1 value: 33.189099999999996 - type: nauc_recall_at_5_max value: 27.6656 - type: nauc_recall_at_5_std value: -1.3779 - type: nauc_recall_at_5_diff1 value: 26.5088 - type: nauc_recall_at_10_max value: 29.8338 - type: nauc_recall_at_10_std value: 0.6765 - type: nauc_recall_at_10_diff1 value: 19.3518 - type: nauc_recall_at_20_max value: 29.566300000000002 - type: nauc_recall_at_20_std value: 6.649299999999999 - type: nauc_recall_at_20_diff1 value: 16.3787 - type: nauc_recall_at_100_max value: 29.775299999999998 - type: nauc_recall_at_100_std value: 19.5727 - type: nauc_recall_at_100_diff1 value: 13.4263 - type: nauc_recall_at_1000_max value: 15.575800000000001 - type: nauc_recall_at_1000_std value: 16.5073 - type: nauc_recall_at_1000_diff1 value: 9.413 - type: nauc_precision_at_1_max value: 47.6567 - type: nauc_precision_at_1_std value: 16.1159 - type: nauc_precision_at_1_diff1 value: 35.7474 - type: nauc_precision_at_3_max value: 45.9337 - type: nauc_precision_at_3_std value: 28.306700000000003 - type: nauc_precision_at_3_diff1 value: 12.9558 - type: nauc_precision_at_5_max value: 45.3828 - type: nauc_precision_at_5_std value: 34.0723 - type: nauc_precision_at_5_diff1 value: 3.936 - type: nauc_precision_at_10_max value: 40.2787 - type: nauc_precision_at_10_std value: 36.1164 - type: nauc_precision_at_10_diff1 value: -1.9665 - type: nauc_precision_at_20_max value: 33.8095 - type: nauc_precision_at_20_std value: 37.288 - type: nauc_precision_at_20_diff1 value: -4.3394 - type: nauc_precision_at_100_max value: 19.880200000000002 - type: nauc_precision_at_100_std value: 35.8879 - type: nauc_precision_at_100_diff1 value: -11.5763 - type: nauc_precision_at_1000_max value: 2.9351 - type: nauc_precision_at_1000_std value: 17.5752 - type: nauc_precision_at_1000_diff1 value: -13.2391 - type: nauc_mrr_at_1_max value: 47.6567 - type: nauc_mrr_at_1_std value: 16.1159 - type: nauc_mrr_at_1_diff1 value: 35.7474 - type: nauc_mrr_at_3_max value: 51.1154 - type: nauc_mrr_at_3_std value: 22.6976 - type: nauc_mrr_at_3_diff1 value: 35.0163 - type: nauc_mrr_at_5_max value: 50.6561 - type: nauc_mrr_at_5_std value: 23.716 - type: nauc_mrr_at_5_diff1 value: 34.965 - type: nauc_mrr_at_10_max value: 50.6931 - type: nauc_mrr_at_10_std value: 24.0343 - type: nauc_mrr_at_10_diff1 value: 34.5146 - type: nauc_mrr_at_20_max value: 50.7143 - type: nauc_mrr_at_20_std value: 24.1366 - type: nauc_mrr_at_20_diff1 value: 34.819 - type: nauc_mrr_at_100_max value: 50.76500000000001 - type: nauc_mrr_at_100_std value: 24.1494 - type: nauc_mrr_at_100_diff1 value: 34.7759 - type: nauc_mrr_at_1000_max value: 50.7421 - type: nauc_mrr_at_1000_std value: 24.110300000000002 - type: nauc_mrr_at_1000_diff1 value: 34.7687 - type: main_score value: 40.955000000000005 - task: type: Retrieval dataset: name: MTEB NQ (default) type: mteb/nq config: default split: test revision: b774495ed302d8c44a3a7ea25c90dbce03968f31 metrics: - type: ndcg_at_1 value: 56.518 - type: ndcg_at_3 value: 68.057 - type: ndcg_at_5 value: 71.497 - type: ndcg_at_10 value: 73.91499999999999 - type: ndcg_at_20 value: 74.994 - type: ndcg_at_100 value: 75.804 - type: ndcg_at_1000 value: 75.917 - type: map_at_1 value: 50.739000000000004 - type: map_at_3 value: 63.958000000000006 - type: map_at_5 value: 66.194 - type: map_at_10 value: 67.375 - type: map_at_20 value: 67.74 - type: map_at_100 value: 67.887 - type: map_at_1000 value: 67.893 - type: recall_at_1 value: 50.739000000000004 - type: recall_at_3 value: 76.364 - type: recall_at_5 value: 84.11800000000001 - type: recall_at_10 value: 91.037 - type: recall_at_20 value: 94.914 - type: recall_at_100 value: 98.84100000000001 - type: recall_at_1000 value: 99.643 - type: precision_at_1 value: 56.518 - type: precision_at_3 value: 29.809 - type: precision_at_5 value: 20.023 - type: precision_at_10 value: 10.943999999999999 - type: precision_at_20 value: 5.7459999999999996 - type: precision_at_100 value: 1.202 - type: precision_at_1000 value: 0.121 - type: mrr_at_1 value: 56.518 - type: mrr_at_3 value: 67.4392 - type: mrr_at_5 value: 68.9064 - type: mrr_at_10 value: 69.6792 - type: mrr_at_20 value: 69.8936 - type: mrr_at_100 value: 69.9803 - type: mrr_at_1000 value: 69.9841 - type: nauc_ndcg_at_1_max value: 21.104899999999997 - type: nauc_ndcg_at_1_std value: -8.8061 - type: nauc_ndcg_at_1_diff1 value: 51.5617 - type: nauc_ndcg_at_3_max value: 25.8231 - type: nauc_ndcg_at_3_std value: -11.3673 - type: nauc_ndcg_at_3_diff1 value: 48.4532 - type: nauc_ndcg_at_5_max value: 27.3793 - type: nauc_ndcg_at_5_std value: -10.9771 - type: nauc_ndcg_at_5_diff1 value: 48.3739 - type: nauc_ndcg_at_10_max value: 27.019 - type: nauc_ndcg_at_10_std value: -9.5004 - type: nauc_ndcg_at_10_diff1 value: 48.762 - type: nauc_ndcg_at_20_max value: 26.8793 - type: nauc_ndcg_at_20_std value: -9.1081 - type: nauc_ndcg_at_20_diff1 value: 48.971599999999995 - type: nauc_ndcg_at_100_max value: 26.188200000000002 - type: nauc_ndcg_at_100_std value: -8.8193 - type: nauc_ndcg_at_100_diff1 value: 49.160900000000005 - type: nauc_ndcg_at_1000_max value: 25.976 - type: nauc_ndcg_at_1000_std value: -9.037 - type: nauc_ndcg_at_1000_diff1 value: 49.032 - type: nauc_map_at_1_max value: 19.5507 - type: nauc_map_at_1_std value: -10.5558 - type: nauc_map_at_1_diff1 value: 51.809099999999994 - type: nauc_map_at_3_max value: 24.3671 - type: nauc_map_at_3_std value: -11.4169 - type: nauc_map_at_3_diff1 value: 49.2235 - type: nauc_map_at_5_max value: 25.221 - type: nauc_map_at_5_std value: -11.1358 - type: nauc_map_at_5_diff1 value: 49.161500000000004 - type: nauc_map_at_10_max value: 25.0963 - type: nauc_map_at_10_std value: -10.516300000000001 - type: nauc_map_at_10_diff1 value: 49.239 - type: nauc_map_at_20_max value: 25.065900000000003 - type: nauc_map_at_20_std value: -10.3531 - type: nauc_map_at_20_diff1 value: 49.278 - type: nauc_map_at_100_max value: 24.9721 - type: nauc_map_at_100_std value: -10.2936 - type: nauc_map_at_100_diff1 value: 49.2973 - type: nauc_map_at_1000_max value: 24.9646 - type: nauc_map_at_1000_std value: -10.3019 - type: nauc_map_at_1000_diff1 value: 49.2939 - type: nauc_recall_at_1_max value: 19.5507 - type: nauc_recall_at_1_std value: -10.5558 - type: nauc_recall_at_1_diff1 value: 51.809099999999994 - type: nauc_recall_at_3_max value: 29.2624 - type: nauc_recall_at_3_std value: -13.894400000000001 - type: nauc_recall_at_3_diff1 value: 44.7434 - type: nauc_recall_at_5_max value: 36.0211 - type: nauc_recall_at_5_std value: -14.130999999999998 - type: nauc_recall_at_5_diff1 value: 43.3309 - type: nauc_recall_at_10_max value: 39.385799999999996 - type: nauc_recall_at_10_std value: -6.685199999999999 - type: nauc_recall_at_10_diff1 value: 44.2087 - type: nauc_recall_at_20_max value: 47.641600000000004 - type: nauc_recall_at_20_std value: -0.281 - type: nauc_recall_at_20_diff1 value: 47.0697 - type: nauc_recall_at_100_max value: 64.6308 - type: nauc_recall_at_100_std value: 45.0589 - type: nauc_recall_at_100_diff1 value: 65.0598 - type: nauc_recall_at_1000_max value: 68.5287 - type: nauc_recall_at_1000_std value: 77.1208 - type: nauc_recall_at_1000_diff1 value: 49.7482 - type: nauc_precision_at_1_max value: 21.104899999999997 - type: nauc_precision_at_1_std value: -8.8061 - type: nauc_precision_at_1_diff1 value: 51.5617 - type: nauc_precision_at_3_max value: 21.184 - type: nauc_precision_at_3_std value: -3.5241000000000002 - type: nauc_precision_at_3_diff1 value: 19.3059 - type: nauc_precision_at_5_max value: 18.4921 - type: nauc_precision_at_5_std value: 1.0416999999999998 - type: nauc_precision_at_5_diff1 value: 7.2985999999999995 - type: nauc_precision_at_10_max value: 12.1251 - type: nauc_precision_at_10_std value: 7.9022 - type: nauc_precision_at_10_diff1 value: -3.3798000000000004 - type: nauc_precision_at_20_max value: 8.2779 - type: nauc_precision_at_20_std value: 10.8969 - type: nauc_precision_at_20_diff1 value: -10.1609 - type: nauc_precision_at_100_max value: 2.0527 - type: nauc_precision_at_100_std value: 14.127799999999999 - type: nauc_precision_at_100_diff1 value: -17.0174 - type: nauc_precision_at_1000_max value: 0.0936 - type: nauc_precision_at_1000_std value: 13.403 - type: nauc_precision_at_1000_diff1 value: -19.3205 - type: nauc_mrr_at_1_max value: 21.104899999999997 - type: nauc_mrr_at_1_std value: -8.8061 - type: nauc_mrr_at_1_diff1 value: 51.5617 - type: nauc_mrr_at_3_max value: 24.9568 - type: nauc_mrr_at_3_std value: -8.7933 - type: nauc_mrr_at_3_diff1 value: 48.821799999999996 - type: nauc_mrr_at_5_max value: 25.3627 - type: nauc_mrr_at_5_std value: -8.7224 - type: nauc_mrr_at_5_diff1 value: 48.9393 - type: nauc_mrr_at_10_max value: 25.1135 - type: nauc_mrr_at_10_std value: -8.3704 - type: nauc_mrr_at_10_diff1 value: 49.132999999999996 - type: nauc_mrr_at_20_max value: 25.015700000000002 - type: nauc_mrr_at_20_std value: -8.4009 - type: nauc_mrr_at_20_diff1 value: 49.2012 - type: nauc_mrr_at_100_max value: 24.9285 - type: nauc_mrr_at_100_std value: -8.3989 - type: nauc_mrr_at_100_diff1 value: 49.223099999999995 - type: nauc_mrr_at_1000_max value: 24.921599999999998 - type: nauc_mrr_at_1000_std value: -8.4031 - type: nauc_mrr_at_1000_diff1 value: 49.2186 - type: main_score value: 73.91499999999999 - task: type: Retrieval dataset: name: MTEB QuoraRetrieval (default) type: mteb/quora config: default split: test revision: e4e08e0b7dbe3c8700f0daef558ff32256715259 metrics: - type: ndcg_at_1 value: 84.99 - type: ndcg_at_3 value: 88.71900000000001 - type: ndcg_at_5 value: 89.997 - type: ndcg_at_10 value: 91.019 - type: ndcg_at_20 value: 91.532 - type: ndcg_at_100 value: 91.92399999999999 - type: ndcg_at_1000 value: 91.977 - type: map_at_1 value: 73.833 - type: map_at_3 value: 85.117 - type: map_at_5 value: 86.85000000000001 - type: map_at_10 value: 87.875 - type: map_at_20 value: 88.256 - type: map_at_100 value: 88.44300000000001 - type: map_at_1000 value: 88.455 - type: recall_at_1 value: 73.833 - type: recall_at_3 value: 89.934 - type: recall_at_5 value: 93.795 - type: recall_at_10 value: 96.799 - type: recall_at_20 value: 98.458 - type: recall_at_100 value: 99.79299999999999 - type: recall_at_1000 value: 99.98899999999999 - type: precision_at_1 value: 84.99 - type: precision_at_3 value: 38.897 - type: precision_at_5 value: 25.407999999999998 - type: precision_at_10 value: 13.766 - type: precision_at_20 value: 7.255000000000001 - type: precision_at_100 value: 1.543 - type: precision_at_1000 value: 0.157 - type: mrr_at_1 value: 85 - type: mrr_at_3 value: 89.485 - type: mrr_at_5 value: 90.0545 - type: mrr_at_10 value: 90.256 - type: mrr_at_20 value: 90.307 - type: mrr_at_100 value: 90.3212 - type: mrr_at_1000 value: 90.3214 - type: nauc_ndcg_at_1_max value: 33.0127 - type: nauc_ndcg_at_1_std value: -59.3688 - type: nauc_ndcg_at_1_diff1 value: 81.11880000000001 - type: nauc_ndcg_at_3_max value: 29.525800000000004 - type: nauc_ndcg_at_3_std value: -71.4482 - type: nauc_ndcg_at_3_diff1 value: 78.98849999999999 - type: nauc_ndcg_at_5_max value: 30.3419 - type: nauc_ndcg_at_5_std value: -73.92490000000001 - type: nauc_ndcg_at_5_diff1 value: 79.8099 - type: nauc_ndcg_at_10_max value: 31.262800000000002 - type: nauc_ndcg_at_10_std value: -71.8798 - type: nauc_ndcg_at_10_diff1 value: 80.01310000000001 - type: nauc_ndcg_at_20_max value: 31.8336 - type: nauc_ndcg_at_20_std value: -69.852 - type: nauc_ndcg_at_20_diff1 value: 79.9131 - type: nauc_ndcg_at_100_max value: 32.351400000000005 - type: nauc_ndcg_at_100_std value: -67.02420000000001 - type: nauc_ndcg_at_100_diff1 value: 79.8222 - type: nauc_ndcg_at_1000_max value: 32.3924 - type: nauc_ndcg_at_1000_std value: -66.57690000000001 - type: nauc_ndcg_at_1000_diff1 value: 79.8063 - type: nauc_map_at_1_max value: 21.4243 - type: nauc_map_at_1_std value: -57.04900000000001 - type: nauc_map_at_1_diff1 value: 83.3378 - type: nauc_map_at_3_max value: 27.078799999999998 - type: nauc_map_at_3_std value: -73.0069 - type: nauc_map_at_3_diff1 value: 80.437 - type: nauc_map_at_5_max value: 28.931600000000003 - type: nauc_map_at_5_std value: -73.7017 - type: nauc_map_at_5_diff1 value: 80.2443 - type: nauc_map_at_10_max value: 30.246699999999997 - type: nauc_map_at_10_std value: -71.5712 - type: nauc_map_at_10_diff1 value: 80.0294 - type: nauc_map_at_20_max value: 30.6119 - type: nauc_map_at_20_std value: -70.0168 - type: nauc_map_at_20_diff1 value: 79.86619999999999 - type: nauc_map_at_100_max value: 30.778899999999997 - type: nauc_map_at_100_std value: -68.85860000000001 - type: nauc_map_at_100_diff1 value: 79.8048 - type: nauc_map_at_1000_max value: 30.798199999999998 - type: nauc_map_at_1000_std value: -68.77210000000001 - type: nauc_map_at_1000_diff1 value: 79.8039 - type: nauc_recall_at_1_max value: 21.4243 - type: nauc_recall_at_1_std value: -57.04900000000001 - type: nauc_recall_at_1_diff1 value: 83.3378 - type: nauc_recall_at_3_max value: 22.6679 - type: nauc_recall_at_3_std value: -86.9046 - type: nauc_recall_at_3_diff1 value: 77.608 - type: nauc_recall_at_5_max value: 24.4242 - type: nauc_recall_at_5_std value: -100.1963 - type: nauc_recall_at_5_diff1 value: 77.5562 - type: nauc_recall_at_10_max value: 26.995599999999996 - type: nauc_recall_at_10_std value: -110.56330000000001 - type: nauc_recall_at_10_diff1 value: 78.6007 - type: nauc_recall_at_20_max value: 27.3385 - type: nauc_recall_at_20_std value: -117.10199999999999 - type: nauc_recall_at_20_diff1 value: 77.7938 - type: nauc_recall_at_100_max value: 33.0847 - type: nauc_recall_at_100_std value: -110.4169 - type: nauc_recall_at_100_diff1 value: 76.4873 - type: nauc_recall_at_1000_max value: -16.532 - type: nauc_recall_at_1000_std value: -24.5592 - type: nauc_recall_at_1000_diff1 value: 72.233 - type: nauc_precision_at_1_max value: 33.0127 - type: nauc_precision_at_1_std value: -59.3688 - type: nauc_precision_at_1_diff1 value: 81.11880000000001 - type: nauc_precision_at_3_max value: 4.6178 - type: nauc_precision_at_3_std value: 8.1134 - type: nauc_precision_at_3_diff1 value: -27.1918 - type: nauc_precision_at_5_max value: 1.3161 - type: nauc_precision_at_5_std value: 21.8406 - type: nauc_precision_at_5_diff1 value: -37.5509 - type: nauc_precision_at_10_max value: -1.4878 - type: nauc_precision_at_10_std value: 35.5171 - type: nauc_precision_at_10_diff1 value: -43.601099999999995 - type: nauc_precision_at_20_max value: -3.0787999999999998 - type: nauc_precision_at_20_std value: 43.1194 - type: nauc_precision_at_20_diff1 value: -45.7438 - type: nauc_precision_at_100_max value: -4.3248 - type: nauc_precision_at_100_std value: 51.5534 - type: nauc_precision_at_100_diff1 value: -46.8655 - type: nauc_precision_at_1000_max value: -4.4053 - type: nauc_precision_at_1000_std value: 53.2738 - type: nauc_precision_at_1000_diff1 value: -46.8777 - type: nauc_mrr_at_1_max value: 32.994099999999996 - type: nauc_mrr_at_1_std value: -59.4653 - type: nauc_mrr_at_1_diff1 value: 81.0983 - type: nauc_mrr_at_3_max value: 33.381699999999995 - type: nauc_mrr_at_3_std value: -65.6011 - type: nauc_mrr_at_3_diff1 value: 80.3293 - type: nauc_mrr_at_5_max value: 33.5696 - type: nauc_mrr_at_5_std value: -65.3317 - type: nauc_mrr_at_5_diff1 value: 80.5711 - type: nauc_mrr_at_10_max value: 33.453500000000005 - type: nauc_mrr_at_10_std value: -64.90209999999999 - type: nauc_mrr_at_10_diff1 value: 80.5965 - type: nauc_mrr_at_20_max value: 33.414500000000004 - type: nauc_mrr_at_20_std value: -64.7197 - type: nauc_mrr_at_20_diff1 value: 80.5804 - type: nauc_mrr_at_100_max value: 33.4032 - type: nauc_mrr_at_100_std value: -64.6315 - type: nauc_mrr_at_100_diff1 value: 80.5771 - type: nauc_mrr_at_1000_max value: 33.4024 - type: nauc_mrr_at_1000_std value: -64.6301 - type: nauc_mrr_at_1000_diff1 value: 80.5769 - type: main_score value: 91.019 - task: type: Clustering dataset: name: MTEB RedditClustering (default) type: mteb/reddit-clustering config: default split: test revision: 24640382cdbf8abc73003fb0fa6d111a705499eb metrics: - type: v_measure value: 72.7484 - type: v_measure_std value: 2.9369 - type: main_score value: 72.7484 - task: type: Clustering dataset: name: MTEB RedditClusteringP2P (default) type: mteb/reddit-clustering-p2p config: default split: test revision: 385e3cb46b4cfa89021f56c4380204149d0efe33 metrics: - type: v_measure value: 73.0078 - type: v_measure_std value: 12.3013 - type: main_score value: 73.0078 - task: type: Retrieval dataset: name: MTEB SCIDOCS (default) type: mteb/scidocs config: default split: test revision: f8c2fcf00f625baaa80f62ec5bd9e1fff3b8ae88 metrics: - type: ndcg_at_1 value: 31.3 - type: ndcg_at_3 value: 26.807 - type: ndcg_at_5 value: 24.29 - type: ndcg_at_10 value: 29.189999999999998 - type: ndcg_at_20 value: 33.212 - type: ndcg_at_100 value: 41.062 - type: ndcg_at_1000 value: 46.201 - type: map_at_1 value: 6.358 - type: map_at_3 value: 12.309000000000001 - type: map_at_5 value: 15.543000000000001 - type: map_at_10 value: 18.404999999999998 - type: map_at_20 value: 20.102 - type: map_at_100 value: 22.012 - type: map_at_1000 value: 22.391 - type: recall_at_1 value: 6.358 - type: recall_at_3 value: 15.482999999999999 - type: recall_at_5 value: 22.343 - type: recall_at_10 value: 31.378 - type: recall_at_20 value: 40.797 - type: recall_at_100 value: 66.122 - type: recall_at_1000 value: 90.878 - type: precision_at_1 value: 31.3 - type: precision_at_3 value: 25.467000000000002 - type: precision_at_5 value: 22.06 - type: precision_at_10 value: 15.479999999999999 - type: precision_at_20 value: 10.059999999999999 - type: precision_at_100 value: 3.2620000000000005 - type: precision_at_1000 value: 0.44799999999999995 - type: mrr_at_1 value: 31.3 - type: mrr_at_3 value: 41.5167 - type: mrr_at_5 value: 43.8317 - type: mrr_at_10 value: 45.236900000000006 - type: mrr_at_20 value: 45.894200000000005 - type: mrr_at_100 value: 46.2296 - type: mrr_at_1000 value: 46.247 - type: nauc_ndcg_at_1_max value: 22.6455 - type: nauc_ndcg_at_1_std value: -3.4214 - type: nauc_ndcg_at_1_diff1 value: 22.2194 - type: nauc_ndcg_at_3_max value: 26.16 - type: nauc_ndcg_at_3_std value: -4.1874 - type: nauc_ndcg_at_3_diff1 value: 15.6529 - type: nauc_ndcg_at_5_max value: 29.849500000000003 - type: nauc_ndcg_at_5_std value: -3.5488 - type: nauc_ndcg_at_5_diff1 value: 15.6251 - type: nauc_ndcg_at_10_max value: 30.462600000000002 - type: nauc_ndcg_at_10_std value: -2.4431000000000003 - type: nauc_ndcg_at_10_diff1 value: 13.424700000000001 - type: nauc_ndcg_at_20_max value: 32.0054 - type: nauc_ndcg_at_20_std value: -0.9074000000000001 - type: nauc_ndcg_at_20_diff1 value: 13.2326 - type: nauc_ndcg_at_100_max value: 33.604099999999995 - type: nauc_ndcg_at_100_std value: 3.8350000000000004 - type: nauc_ndcg_at_100_diff1 value: 12.7082 - type: nauc_ndcg_at_1000_max value: 32.5997 - type: nauc_ndcg_at_1000_std value: 3.2862 - type: nauc_ndcg_at_1000_diff1 value: 13.7365 - type: nauc_map_at_1_max value: 22.3207 - type: nauc_map_at_1_std value: -3.543 - type: nauc_map_at_1_diff1 value: 21.9335 - type: nauc_map_at_3_max value: 24.9035 - type: nauc_map_at_3_std value: -5.9363 - type: nauc_map_at_3_diff1 value: 15.101 - type: nauc_map_at_5_max value: 28.5337 - type: nauc_map_at_5_std value: -6.2807 - type: nauc_map_at_5_diff1 value: 14.9171 - type: nauc_map_at_10_max value: 29.496899999999997 - type: nauc_map_at_10_std value: -5.608 - type: nauc_map_at_10_diff1 value: 12.7308 - type: nauc_map_at_20_max value: 30.4348 - type: nauc_map_at_20_std value: -4.4265 - type: nauc_map_at_20_diff1 value: 12.4533 - type: nauc_map_at_100_max value: 31.244100000000003 - type: nauc_map_at_100_std value: -2.6229999999999998 - type: nauc_map_at_100_diff1 value: 12.2408 - type: nauc_map_at_1000_max value: 31.200699999999998 - type: nauc_map_at_1000_std value: -2.5584 - type: nauc_map_at_1000_diff1 value: 12.295499999999999 - type: nauc_recall_at_1_max value: 22.3207 - type: nauc_recall_at_1_std value: -3.543 - type: nauc_recall_at_1_diff1 value: 21.9335 - type: nauc_recall_at_3_max value: 26.617800000000003 - type: nauc_recall_at_3_std value: -4.601 - type: nauc_recall_at_3_diff1 value: 12.969800000000001 - type: nauc_recall_at_5_max value: 31.523 - type: nauc_recall_at_5_std value: -2.8593 - type: nauc_recall_at_5_diff1 value: 13.077 - type: nauc_recall_at_10_max value: 30.361 - type: nauc_recall_at_10_std value: -0.7305 - type: nauc_recall_at_10_diff1 value: 8.5364 - type: nauc_recall_at_20_max value: 31.821700000000003 - type: nauc_recall_at_20_std value: 2.5871999999999997 - type: nauc_recall_at_20_diff1 value: 7.7219 - type: nauc_recall_at_100_max value: 32.658500000000004 - type: nauc_recall_at_100_std value: 17.088 - type: nauc_recall_at_100_diff1 value: 4.2962 - type: nauc_recall_at_1000_max value: 28.8568 - type: nauc_recall_at_1000_std value: 30.724400000000003 - type: nauc_recall_at_1000_diff1 value: 5.7278 - type: nauc_precision_at_1_max value: 22.6455 - type: nauc_precision_at_1_std value: -3.4214 - type: nauc_precision_at_1_diff1 value: 22.2194 - type: nauc_precision_at_3_max value: 27.0287 - type: nauc_precision_at_3_std value: -4.2745999999999995 - type: nauc_precision_at_3_diff1 value: 13.2524 - type: nauc_precision_at_5_max value: 31.798199999999998 - type: nauc_precision_at_5_std value: -2.6458 - type: nauc_precision_at_5_diff1 value: 13.1913 - type: nauc_precision_at_10_max value: 30.442700000000002 - type: nauc_precision_at_10_std value: -0.7052 - type: nauc_precision_at_10_diff1 value: 8.698500000000001 - type: nauc_precision_at_20_max value: 31.8098 - type: nauc_precision_at_20_std value: 2.6527 - type: nauc_precision_at_20_diff1 value: 7.988199999999999 - type: nauc_precision_at_100_max value: 31.9799 - type: nauc_precision_at_100_std value: 16.4552 - type: nauc_precision_at_100_diff1 value: 4.6661 - type: nauc_precision_at_1000_max value: 26.039099999999998 - type: nauc_precision_at_1000_std value: 26.8761 - type: nauc_precision_at_1000_diff1 value: 5.564299999999999 - type: nauc_mrr_at_1_max value: 22.6455 - type: nauc_mrr_at_1_std value: -3.4214 - type: nauc_mrr_at_1_diff1 value: 22.2194 - type: nauc_mrr_at_3_max value: 25.827699999999997 - type: nauc_mrr_at_3_std value: -2.0878 - type: nauc_mrr_at_3_diff1 value: 19.0105 - type: nauc_mrr_at_5_max value: 26.479799999999997 - type: nauc_mrr_at_5_std value: -1.0343 - type: nauc_mrr_at_5_diff1 value: 19.4599 - type: nauc_mrr_at_10_max value: 26.3345 - type: nauc_mrr_at_10_std value: -1.0147 - type: nauc_mrr_at_10_diff1 value: 19.572 - type: nauc_mrr_at_20_max value: 26.4581 - type: nauc_mrr_at_20_std value: -1.0342 - type: nauc_mrr_at_20_diff1 value: 19.5204 - type: nauc_mrr_at_100_max value: 26.334200000000003 - type: nauc_mrr_at_100_std value: -1.0591 - type: nauc_mrr_at_100_diff1 value: 19.5134 - type: nauc_mrr_at_1000_max value: 26.3192 - type: nauc_mrr_at_1000_std value: -1.0868 - type: nauc_mrr_at_1000_diff1 value: 19.5308 - type: main_score value: 29.189999999999998 - task: type: STS dataset: name: MTEB SICK-R (default) type: mteb/sickr-sts config: default split: test revision: 20a6d6f312dd54037fe07a32d58e5e168867909d metrics: - type: pearson value: 82.9977 - type: spearman value: 82.7264 - type: cosine_pearson value: 82.9977 - type: cosine_spearman value: 82.7264 - type: manhattan_pearson value: 79.2844 - type: manhattan_spearman value: 82.706 - type: euclidean_pearson value: 79.30319999999999 - type: euclidean_spearman value: 82.7264 - type: main_score value: 82.7264 - task: type: STS dataset: name: MTEB STS12 (default) type: mteb/sts12-sts config: default split: test revision: a0d554a64d88156834ff5ae9920b964011b16384 metrics: - type: pearson value: 86.691 - type: spearman value: 79.9669 - type: cosine_pearson value: 86.691 - type: cosine_spearman value: 79.9669 - type: manhattan_pearson value: 81.131 - type: manhattan_spearman value: 79.9913 - type: euclidean_pearson value: 81.13550000000001 - type: euclidean_spearman value: 79.9667 - type: main_score value: 79.9669 - task: type: STS dataset: name: MTEB STS13 (default) type: mteb/sts13-sts config: default split: test revision: 7e90230a92c190f1bf69ae9002b8cea547a64cca metrics: - type: pearson value: 85.1053 - type: spearman value: 83.94890000000001 - type: cosine_pearson value: 85.1053 - type: cosine_spearman value: 83.94890000000001 - type: manhattan_pearson value: 83.7957 - type: manhattan_spearman value: 83.8831 - type: euclidean_pearson value: 83.8318 - type: euclidean_spearman value: 83.94890000000001 - type: main_score value: 83.94890000000001 - task: type: STS dataset: name: MTEB STS14 (default) type: mteb/sts14-sts config: default split: test revision: 6031580fec1f6af667f0bd2da0a551cf4f0b2375 metrics: - type: pearson value: 84.23610000000001 - type: spearman value: 84.2503 - type: cosine_pearson value: 84.23610000000001 - type: cosine_spearman value: 84.2503 - type: manhattan_pearson value: 82.3061 - type: manhattan_spearman value: 84.2598 - type: euclidean_pearson value: 82.30330000000001 - type: euclidean_spearman value: 84.2503 - type: main_score value: 84.2503 - task: type: STS dataset: name: MTEB STS15 (default) type: mteb/sts15-sts config: default split: test revision: ae752c7c21bf194d8b67fd573edf7ae58183cbe3 metrics: - type: pearson value: 90.5569 - type: spearman value: 90.4496 - type: cosine_pearson value: 90.5569 - type: cosine_spearman value: 90.4496 - type: manhattan_pearson value: 88.5942 - type: manhattan_spearman value: 90.4286 - type: euclidean_pearson value: 88.6003 - type: euclidean_spearman value: 90.4496 - type: main_score value: 90.4496 - task: type: STS dataset: name: MTEB STS16 (default) type: mteb/sts16-sts config: default split: test revision: 4d8694f8f0e0100860b497b999b3dbed754a0513 metrics: - type: pearson value: 86.447 - type: spearman value: 86.4776 - type: cosine_pearson value: 86.447 - type: cosine_spearman value: 86.4776 - type: manhattan_pearson value: 85.3768 - type: manhattan_spearman value: 86.48599999999999 - type: euclidean_pearson value: 85.3792 - type: euclidean_spearman value: 86.4776 - type: main_score value: 86.4776 - task: type: STS dataset: name: MTEB STS17 (en-en) type: mteb/sts17-crosslingual-sts config: en-en split: test revision: faeb762787bd10488a50c8b5be4a3b82e411949c metrics: - type: pearson value: 90.40820000000001 - type: spearman value: 89.7495 - type: cosine_pearson value: 90.40820000000001 - type: cosine_spearman value: 89.7495 - type: manhattan_pearson value: 88.20519999999999 - type: manhattan_spearman value: 89.62689999999999 - type: euclidean_pearson value: 88.268 - type: euclidean_spearman value: 89.7495 - type: main_score value: 89.7495 - task: type: STS dataset: name: MTEB STS22 (en) type: mteb/sts22-crosslingual-sts config: en split: test revision: de9d86b3b84231dc21f76c7b7af1f28e2f57f6e3 metrics: - type: pearson value: 69.5732 - type: spearman value: 67.7261 - type: cosine_pearson value: 69.5732 - type: cosine_spearman value: 67.7261 - type: manhattan_pearson value: 69.7793 - type: manhattan_spearman value: 67.9213 - type: euclidean_pearson value: 69.6908 - type: euclidean_spearman value: 67.7261 - type: main_score value: 67.7261 - task: type: STS dataset: name: MTEB STSBenchmark (default) type: mteb/stsbenchmark-sts config: default split: test revision: b0fddb56ed78048fa8b90373c8a3cfc37b684831 metrics: - type: pearson value: 86.6914 - type: spearman value: 87.2151 - type: cosine_pearson value: 86.6914 - type: cosine_spearman value: 87.2151 - type: manhattan_pearson value: 85.8277 - type: manhattan_spearman value: 87.2492 - type: euclidean_pearson value: 85.79719999999999 - type: euclidean_spearman value: 87.2151 - type: main_score value: 87.2151 - task: type: Reranking dataset: name: MTEB SciDocsRR (default) type: mteb/scidocs-reranking config: default split: test revision: d3c5e1fc0b855ab6097bf1cda04dd73947d7caab metrics: - type: map value: 89.137 - type: mrr value: 96.7541 - type: nAUC_map_max value: 52.1481 - type: nAUC_map_std value: 72.15859999999999 - type: nAUC_map_diff1 value: -10.389 - type: nAUC_mrr_max value: 85.25160000000001 - type: nAUC_mrr_std value: 87.73570000000001 - type: nAUC_mrr_diff1 value: 30.605300000000003 - type: main_score value: 89.137 - task: type: Retrieval dataset: name: MTEB SciFact (default) type: mteb/scifact config: default split: test revision: 0228b52cf27578f30900b9e5271d331663a030d7 metrics: - type: ndcg_at_1 value: 71.667 - type: ndcg_at_3 value: 79.611 - type: ndcg_at_5 value: 81.394 - type: ndcg_at_10 value: 83.279 - type: ndcg_at_20 value: 83.8 - type: ndcg_at_100 value: 84.233 - type: ndcg_at_1000 value: 84.316 - type: map_at_1 value: 68.57799999999999 - type: map_at_3 value: 76.639 - type: map_at_5 value: 78.168 - type: map_at_10 value: 79.148 - type: map_at_20 value: 79.31 - type: map_at_100 value: 79.36800000000001 - type: map_at_1000 value: 79.37100000000001 - type: recall_at_1 value: 68.57799999999999 - type: recall_at_3 value: 85.47200000000001 - type: recall_at_5 value: 89.839 - type: recall_at_10 value: 95 - type: recall_at_20 value: 97 - type: recall_at_100 value: 99.333 - type: recall_at_1000 value: 100 - type: precision_at_1 value: 71.667 - type: precision_at_3 value: 31 - type: precision_at_5 value: 20.067 - type: precision_at_10 value: 10.767 - type: precision_at_20 value: 5.5 - type: precision_at_100 value: 1.123 - type: precision_at_1000 value: 0.11299999999999999 - type: mrr_at_1 value: 71.6667 - type: mrr_at_3 value: 78.2222 - type: mrr_at_5 value: 79.0222 - type: mrr_at_10 value: 79.7295 - type: mrr_at_20 value: 79.83879999999999 - type: mrr_at_100 value: 79.89739999999999 - type: mrr_at_1000 value: 79.9004 - type: nauc_ndcg_at_1_max value: 42.2431 - type: nauc_ndcg_at_1_std value: -2.0832 - type: nauc_ndcg_at_1_diff1 value: 76.9413 - type: nauc_ndcg_at_3_max value: 34.7709 - type: nauc_ndcg_at_3_std value: -6.3732999999999995 - type: nauc_ndcg_at_3_diff1 value: 74.6789 - type: nauc_ndcg_at_5_max value: 37.940400000000004 - type: nauc_ndcg_at_5_std value: -3.9581999999999997 - type: nauc_ndcg_at_5_diff1 value: 75.22330000000001 - type: nauc_ndcg_at_10_max value: 41.6103 - type: nauc_ndcg_at_10_std value: -0.0314 - type: nauc_ndcg_at_10_diff1 value: 75.2945 - type: nauc_ndcg_at_20_max value: 42.524 - type: nauc_ndcg_at_20_std value: 0.2979 - type: nauc_ndcg_at_20_diff1 value: 75.4989 - type: nauc_ndcg_at_100_max value: 41.727399999999996 - type: nauc_ndcg_at_100_std value: -0.4197 - type: nauc_ndcg_at_100_diff1 value: 75.7163 - type: nauc_ndcg_at_1000_max value: 41.3855 - type: nauc_ndcg_at_1000_std value: -0.6131 - type: nauc_ndcg_at_1000_diff1 value: 75.618 - type: nauc_map_at_1_max value: 32.7432 - type: nauc_map_at_1_std value: -10.6948 - type: nauc_map_at_1_diff1 value: 77.2203 - type: nauc_map_at_3_max value: 32.7526 - type: nauc_map_at_3_std value: -7.8953 - type: nauc_map_at_3_diff1 value: 75.88380000000001 - type: nauc_map_at_5_max value: 36.868 - type: nauc_map_at_5_std value: -4.5381 - type: nauc_map_at_5_diff1 value: 75.5504 - type: nauc_map_at_10_max value: 39.0762 - type: nauc_map_at_10_std value: -2.1559 - type: nauc_map_at_10_diff1 value: 75.5037 - type: nauc_map_at_20_max value: 39.3914 - type: nauc_map_at_20_std value: -2.075 - type: nauc_map_at_20_diff1 value: 75.5527 - type: nauc_map_at_100_max value: 39.2883 - type: nauc_map_at_100_std value: -2.1987 - type: nauc_map_at_100_diff1 value: 75.57979999999999 - type: nauc_map_at_1000_max value: 39.278200000000005 - type: nauc_map_at_1000_std value: -2.1991 - type: nauc_map_at_1000_diff1 value: 75.5776 - type: nauc_recall_at_1_max value: 32.7432 - type: nauc_recall_at_1_std value: -10.6948 - type: nauc_recall_at_1_diff1 value: 77.2203 - type: nauc_recall_at_3_max value: 23.718500000000002 - type: nauc_recall_at_3_std value: -14.9527 - type: nauc_recall_at_3_diff1 value: 70.99849999999999 - type: nauc_recall_at_5_max value: 34.1278 - type: nauc_recall_at_5_std value: -8.9991 - type: nauc_recall_at_5_diff1 value: 72.9131 - type: nauc_recall_at_10_max value: 53.4174 - type: nauc_recall_at_10_std value: 10.591299999999999 - type: nauc_recall_at_10_diff1 value: 72.1148 - type: nauc_recall_at_20_max value: 74.4061 - type: nauc_recall_at_20_std value: 23.5605 - type: nauc_recall_at_20_diff1 value: 74.515 - type: nauc_recall_at_100_max value: 100 - type: nauc_recall_at_100_std value: 41.4332 - type: nauc_recall_at_100_diff1 value: 93.4641 - type: nauc_recall_at_1000_max - type: nauc_recall_at_1000_std - type: nauc_recall_at_1000_diff1 - type: nauc_precision_at_1_max value: 42.2431 - type: nauc_precision_at_1_std value: -2.0832 - type: nauc_precision_at_1_diff1 value: 76.9413 - type: nauc_precision_at_3_max value: 31.2606 - type: nauc_precision_at_3_std value: 19.564300000000003 - type: nauc_precision_at_3_diff1 value: 27.538899999999998 - type: nauc_precision_at_5_max value: 36.896 - type: nauc_precision_at_5_std value: 32.9313 - type: nauc_precision_at_5_diff1 value: 5.233899999999999 - type: nauc_precision_at_10_max value: 40.0781 - type: nauc_precision_at_10_std value: 48.0555 - type: nauc_precision_at_10_diff1 value: -14.6074 - type: nauc_precision_at_20_max value: 39.3814 - type: nauc_precision_at_20_std value: 49.9845 - type: nauc_precision_at_20_diff1 value: -21.171 - type: nauc_precision_at_100_max value: 36.6046 - type: nauc_precision_at_100_std value: 53.1439 - type: nauc_precision_at_100_diff1 value: -30.216500000000003 - type: nauc_precision_at_1000_max value: 34.7361 - type: nauc_precision_at_1000_std value: 53.4891 - type: nauc_precision_at_1000_diff1 value: -33.8617 - type: nauc_mrr_at_1_max value: 42.2431 - type: nauc_mrr_at_1_std value: -2.0832 - type: nauc_mrr_at_1_diff1 value: 76.9413 - type: nauc_mrr_at_3_max value: 40.1861 - type: nauc_mrr_at_3_std value: -2.1431 - type: nauc_mrr_at_3_diff1 value: 75.3883 - type: nauc_mrr_at_5_max value: 40.9913 - type: nauc_mrr_at_5_std value: -1.6580000000000001 - type: nauc_mrr_at_5_diff1 value: 75.8294 - type: nauc_mrr_at_10_max value: 41.8035 - type: nauc_mrr_at_10_std value: -1.1311 - type: nauc_mrr_at_10_diff1 value: 75.9254 - type: nauc_mrr_at_20_max value: 41.9873 - type: nauc_mrr_at_20_std value: -1.1159000000000001 - type: nauc_mrr_at_20_diff1 value: 75.9764 - type: nauc_mrr_at_100_max value: 41.890699999999995 - type: nauc_mrr_at_100_std value: -1.239 - type: nauc_mrr_at_100_diff1 value: 76.00529999999999 - type: nauc_mrr_at_1000_max value: 41.8809 - type: nauc_mrr_at_1000_std value: -1.2392 - type: nauc_mrr_at_1000_diff1 value: 76.0031 - type: main_score value: 83.279 - task: type: PairClassification dataset: name: MTEB SprintDuplicateQuestions (default) type: mteb/sprintduplicatequestions-pairclassification config: default split: test revision: d66bd1f72af766a5cc4b0ca5e00c162f89e8cc46 metrics: - type: similarity_accuracy value: 99.8644 - type: similarity_accuracy_threshold value: 85.02 - type: similarity_f1 value: 93.1875 - type: similarity_f1_threshold value: 85.02 - type: similarity_precision value: 92.6805 - type: similarity_recall value: 93.7 - type: similarity_ap value: 96.7847 - type: cosine_accuracy value: 99.8644 - type: cosine_accuracy_threshold value: 85.02 - type: cosine_f1 value: 93.1875 - type: cosine_f1_threshold value: 85.02 - type: cosine_precision value: 92.6805 - type: cosine_recall value: 93.7 - type: cosine_ap value: 96.7847 - type: manhattan_accuracy value: 99.8634 - type: manhattan_accuracy_threshold value: 2593.8221 - type: manhattan_f1 value: 93.1275 - type: manhattan_f1_threshold value: 2593.8221 - type: manhattan_precision value: 92.7579 - type: manhattan_recall value: 93.5 - type: manhattan_ap value: 96.806 - type: euclidean_accuracy value: 99.8644 - type: euclidean_accuracy_threshold value: 54.7358 - type: euclidean_f1 value: 93.1875 - type: euclidean_f1_threshold value: 54.7358 - type: euclidean_precision value: 92.6805 - type: euclidean_recall value: 93.7 - type: euclidean_ap value: 96.7847 - type: dot_accuracy value: 99.8644 - type: dot_accuracy_threshold value: 85.02 - type: dot_f1 value: 93.1875 - type: dot_f1_threshold value: 85.02 - type: dot_precision value: 92.6805 - type: dot_recall value: 93.7 - type: dot_ap value: 96.7847 - type: max_accuracy value: 99.8644 - type: max_f1 value: 93.1875 - type: max_precision value: 92.7579 - type: max_recall value: 93.7 - type: max_ap value: 96.806 - type: main_score value: 96.806 - task: type: Clustering dataset: name: MTEB StackExchangeClustering (default) type: mteb/stackexchange-clustering config: default split: test revision: 6cbc1f7b2bc0622f2e39d2c77fa502909748c259 metrics: - type: v_measure value: 81.7075 - type: v_measure_std value: 2.4228 - type: main_score value: 81.7075 - task: type: Clustering dataset: name: MTEB StackExchangeClusteringP2P (default) type: mteb/stackexchange-clustering-p2p config: default split: test revision: 815ca46b2622cec33ccafc3735d572c266efdb44 metrics: - type: v_measure value: 39.836 - type: v_measure_std value: 1.5339 - type: main_score value: 39.836 - task: type: Reranking dataset: name: MTEB StackOverflowDupQuestions (default) type: mteb/stackoverflowdupquestions-reranking config: default split: test revision: e185fbe320c72810689fc5848eb6114e1ef5ec69 metrics: - type: map value: 62.9227 - type: mrr value: 64.1239 - type: nAUC_map_max value: 9.3055 - type: nAUC_map_std value: 3.2321000000000004 - type: nAUC_map_diff1 value: 45.2884 - type: nAUC_mrr_max value: 10.8913 - type: nAUC_mrr_std value: 4.1469 - type: nAUC_mrr_diff1 value: 45.280300000000004 - type: main_score value: 62.9227 - task: type: Summarization dataset: name: MTEB SummEval (default) type: mteb/summeval config: default split: test revision: cda12ad7615edc362dbf25a00fdd61d3b1eaf93c metrics: - type: pearson value: 45.5445 - type: spearman value: 40.5224 - type: cosine_spearman value: 40.5224 - type: cosine_pearson value: 45.5445 - type: dot_spearman value: 40.5224 - type: dot_pearson value: 45.5446 - type: main_score value: 40.5224 - task: type: Retrieval dataset: name: MTEB TRECCOVID (default) type: mteb/trec-covid config: default split: test revision: bb9466bac8153a0349341eb1b22e06409e78ef4e metrics: - type: ndcg_at_1 value: 86 - type: ndcg_at_3 value: 86.70400000000001 - type: ndcg_at_5 value: 84.654 - type: ndcg_at_10 value: 80.972 - type: ndcg_at_20 value: 76.783 - type: ndcg_at_100 value: 63.146 - type: ndcg_at_1000 value: 56.424 - type: map_at_1 value: 0.251 - type: map_at_3 value: 0.719 - type: map_at_5 value: 1.131 - type: map_at_10 value: 2.0820000000000003 - type: map_at_20 value: 3.746 - type: map_at_100 value: 13.020999999999999 - type: map_at_1000 value: 31.527 - type: recall_at_1 value: 0.251 - type: recall_at_3 value: 0.742 - type: recall_at_5 value: 1.179 - type: recall_at_10 value: 2.237 - type: recall_at_20 value: 4.144 - type: recall_at_100 value: 16.054 - type: recall_at_1000 value: 52.76 - type: precision_at_1 value: 92 - type: precision_at_3 value: 92 - type: precision_at_5 value: 88.8 - type: precision_at_10 value: 85 - type: precision_at_20 value: 79.7 - type: precision_at_100 value: 64.53999999999999 - type: precision_at_1000 value: 24.471999999999998 - type: mrr_at_1 value: 92 - type: mrr_at_3 value: 95.6667 - type: mrr_at_5 value: 95.6667 - type: mrr_at_10 value: 95.6667 - type: mrr_at_20 value: 95.6667 - type: mrr_at_100 value: 95.6667 - type: mrr_at_1000 value: 95.6667 - type: nauc_ndcg_at_1_max value: 7.0274 - type: nauc_ndcg_at_1_std value: 41.318 - type: nauc_ndcg_at_1_diff1 value: -46.5125 - type: nauc_ndcg_at_3_max value: 2.0167 - type: nauc_ndcg_at_3_std value: 36.144999999999996 - type: nauc_ndcg_at_3_diff1 value: -17.705199999999998 - type: nauc_ndcg_at_5_max value: -6.812 - type: nauc_ndcg_at_5_std value: 41.8996 - type: nauc_ndcg_at_5_diff1 value: -14.7154 - type: nauc_ndcg_at_10_max value: 15.1784 - type: nauc_ndcg_at_10_std value: 51.709799999999994 - type: nauc_ndcg_at_10_diff1 value: -5.0968 - type: nauc_ndcg_at_20_max value: 28.403200000000002 - type: nauc_ndcg_at_20_std value: 59.824299999999994 - type: nauc_ndcg_at_20_diff1 value: -14.036000000000001 - type: nauc_ndcg_at_100_max value: 35.4195 - type: nauc_ndcg_at_100_std value: 75.7747 - type: nauc_ndcg_at_100_diff1 value: -10.4627 - type: nauc_ndcg_at_1000_max value: 31.450400000000002 - type: nauc_ndcg_at_1000_std value: 78.85940000000001 - type: nauc_ndcg_at_1000_diff1 value: -1.3263 - type: nauc_map_at_1_max value: -3.8297 - type: nauc_map_at_1_std value: -10.6113 - type: nauc_map_at_1_diff1 value: 9.2146 - type: nauc_map_at_3_max value: -3.1165000000000003 - type: nauc_map_at_3_std value: -8.4396 - type: nauc_map_at_3_diff1 value: 14.183000000000002 - type: nauc_map_at_5_max value: -4.4023 - type: nauc_map_at_5_std value: -6.641500000000001 - type: nauc_map_at_5_diff1 value: 16.1186 - type: nauc_map_at_10_max value: 1.802 - type: nauc_map_at_10_std value: 0.9958 - type: nauc_map_at_10_diff1 value: 20.3485 - type: nauc_map_at_20_max value: 10.9146 - type: nauc_map_at_20_std value: 10.3413 - type: nauc_map_at_20_diff1 value: 14.7839 - type: nauc_map_at_100_max value: 25.633 - type: nauc_map_at_100_std value: 44.9724 - type: nauc_map_at_100_diff1 value: 6.572699999999999 - type: nauc_map_at_1000_max value: 33.8688 - type: nauc_map_at_1000_std value: 76.9255 - type: nauc_map_at_1000_diff1 value: -5.8205 - type: nauc_recall_at_1_max value: -3.8297 - type: nauc_recall_at_1_std value: -10.6113 - type: nauc_recall_at_1_diff1 value: 9.2146 - type: nauc_recall_at_3_max value: -6.209 - type: nauc_recall_at_3_std value: -11.3272 - type: nauc_recall_at_3_diff1 value: 16.497500000000002 - type: nauc_recall_at_5_max value: -7.6928 - type: nauc_recall_at_5_std value: -8.9985 - type: nauc_recall_at_5_diff1 value: 19.028100000000002 - type: nauc_recall_at_10_max value: -1.3407 - type: nauc_recall_at_10_std value: -2.5698 - type: nauc_recall_at_10_diff1 value: 21.570700000000002 - type: nauc_recall_at_20_max value: 6.866700000000001 - type: nauc_recall_at_20_std value: 5.7298 - type: nauc_recall_at_20_diff1 value: 16.050800000000002 - type: nauc_recall_at_100_max value: 16.4856 - type: nauc_recall_at_100_std value: 33.1774 - type: nauc_recall_at_100_diff1 value: 12.0273 - type: nauc_recall_at_1000_max value: 25.3677 - type: nauc_recall_at_1000_std value: 71.1541 - type: nauc_recall_at_1000_diff1 value: 0.796 - type: nauc_precision_at_1_max value: 57.236200000000004 - type: nauc_precision_at_1_std value: 47.7241 - type: nauc_precision_at_1_diff1 value: -57.8198 - type: nauc_precision_at_3_max value: 35.6953 - type: nauc_precision_at_3_std value: 31.414199999999997 - type: nauc_precision_at_3_diff1 value: -6.7696000000000005 - type: nauc_precision_at_5_max value: 1.699 - type: nauc_precision_at_5_std value: 37.6284 - type: nauc_precision_at_5_diff1 value: -4.9533000000000005 - type: nauc_precision_at_10_max value: 31.645400000000002 - type: nauc_precision_at_10_std value: 48.4684 - type: nauc_precision_at_10_diff1 value: 8.3324 - type: nauc_precision_at_20_max value: 45.7958 - type: nauc_precision_at_20_std value: 56.3558 - type: nauc_precision_at_20_diff1 value: -7.8348 - type: nauc_precision_at_100_max value: 40.1005 - type: nauc_precision_at_100_std value: 73.342 - type: nauc_precision_at_100_diff1 value: -7.284400000000001 - type: nauc_precision_at_1000_max value: 27.9268 - type: nauc_precision_at_1000_std value: 50.145799999999994 - type: nauc_precision_at_1000_diff1 value: -15.678700000000001 - type: nauc_mrr_at_1_max value: 57.236200000000004 - type: nauc_mrr_at_1_std value: 47.7241 - type: nauc_mrr_at_1_diff1 value: -57.8198 - type: nauc_mrr_at_3_max value: 53.7779 - type: nauc_mrr_at_3_std value: 51.74530000000001 - type: nauc_mrr_at_3_diff1 value: -49.1094 - type: nauc_mrr_at_5_max value: 53.7779 - type: nauc_mrr_at_5_std value: 51.74530000000001 - type: nauc_mrr_at_5_diff1 value: -49.1094 - type: nauc_mrr_at_10_max value: 53.7779 - type: nauc_mrr_at_10_std value: 51.74530000000001 - type: nauc_mrr_at_10_diff1 value: -49.1094 - type: nauc_mrr_at_20_max value: 53.7779 - type: nauc_mrr_at_20_std value: 51.74530000000001 - type: nauc_mrr_at_20_diff1 value: -49.1094 - type: nauc_mrr_at_100_max value: 53.7779 - type: nauc_mrr_at_100_std value: 51.74530000000001 - type: nauc_mrr_at_100_diff1 value: -49.1094 - type: nauc_mrr_at_1000_max value: 53.7779 - type: nauc_mrr_at_1000_std value: 51.74530000000001 - type: nauc_mrr_at_1000_diff1 value: -49.1094 - type: main_score value: 80.972 - task: type: Retrieval dataset: name: MTEB Touche2020 (default) type: mteb/touche2020 config: default split: test revision: a34f9a33db75fa0cbb21bb5cfc3dae8dc8bec93f metrics: - type: ndcg_at_1 value: 45.918 - type: ndcg_at_3 value: 42.414 - type: ndcg_at_5 value: 36.911 - type: ndcg_at_10 value: 34.059 - type: ndcg_at_20 value: 33.789 - type: ndcg_at_100 value: 43.24 - type: ndcg_at_1000 value: 53.028 - type: map_at_1 value: 3.711 - type: map_at_3 value: 8.031 - type: map_at_5 value: 10.174999999999999 - type: map_at_10 value: 13.745 - type: map_at_20 value: 16.833000000000002 - type: map_at_100 value: 20.534 - type: map_at_1000 value: 21.929000000000002 - type: recall_at_1 value: 3.711 - type: recall_at_3 value: 9.289 - type: recall_at_5 value: 12.469 - type: recall_at_10 value: 20.31 - type: recall_at_20 value: 28.549999999999997 - type: recall_at_100 value: 50.132 - type: recall_at_1000 value: 78.636 - type: precision_at_1 value: 48.980000000000004 - type: precision_at_3 value: 43.537 - type: precision_at_5 value: 35.510000000000005 - type: precision_at_10 value: 29.592000000000002 - type: precision_at_20 value: 21.633 - type: precision_at_100 value: 8.265 - type: precision_at_1000 value: 1.478 - type: mrr_at_1 value: 48.9796 - type: mrr_at_3 value: 61.9048 - type: mrr_at_5 value: 62.8231 - type: mrr_at_10 value: 64.78620000000001 - type: mrr_at_20 value: 64.8882 - type: mrr_at_100 value: 64.9667 - type: mrr_at_1000 value: 64.9667 - type: nauc_ndcg_at_1_max value: -14.377 - type: nauc_ndcg_at_1_std value: -24.7998 - type: nauc_ndcg_at_1_diff1 value: -2.7112000000000003 - type: nauc_ndcg_at_3_max value: -25.411 - type: nauc_ndcg_at_3_std value: -21.4105 - type: nauc_ndcg_at_3_diff1 value: 11.6233 - type: nauc_ndcg_at_5_max value: -18.7583 - type: nauc_ndcg_at_5_std value: -12.3778 - type: nauc_ndcg_at_5_diff1 value: 2.0221 - type: nauc_ndcg_at_10_max value: -20.5164 - type: nauc_ndcg_at_10_std value: -15.9037 - type: nauc_ndcg_at_10_diff1 value: 4.8377 - type: nauc_ndcg_at_20_max value: -24.3335 - type: nauc_ndcg_at_20_std value: -15.4334 - type: nauc_ndcg_at_20_diff1 value: 5.2053 - type: nauc_ndcg_at_100_max value: -27.9931 - type: nauc_ndcg_at_100_std value: -0.267 - type: nauc_ndcg_at_100_diff1 value: 8.0295 - type: nauc_ndcg_at_1000_max value: -22.2584 - type: nauc_ndcg_at_1000_std value: 16.6679 - type: nauc_ndcg_at_1000_diff1 value: -0.8999999999999999 - type: nauc_map_at_1_max value: -19.5845 - type: nauc_map_at_1_std value: -33.0644 - type: nauc_map_at_1_diff1 value: -5.815300000000001 - type: nauc_map_at_3_max value: -28.4895 - type: nauc_map_at_3_std value: -32.191199999999995 - type: nauc_map_at_3_diff1 value: 9.8452 - type: nauc_map_at_5_max value: -17.3979 - type: nauc_map_at_5_std value: -21.3281 - type: nauc_map_at_5_diff1 value: -2.7651 - type: nauc_map_at_10_max value: -16.5472 - type: nauc_map_at_10_std value: -21.7069 - type: nauc_map_at_10_diff1 value: -1.7826000000000002 - type: nauc_map_at_20_max value: -18.6049 - type: nauc_map_at_20_std value: -17.8565 - type: nauc_map_at_20_diff1 value: -0.0181 - type: nauc_map_at_100_max value: -20.030800000000003 - type: nauc_map_at_100_std value: -8.6978 - type: nauc_map_at_100_diff1 value: 1.1159000000000001 - type: nauc_map_at_1000_max value: -18.5756 - type: nauc_map_at_1000_std value: -4.4186000000000005 - type: nauc_map_at_1000_diff1 value: -0.7358 - type: nauc_recall_at_1_max value: -19.5845 - type: nauc_recall_at_1_std value: -33.0644 - type: nauc_recall_at_1_diff1 value: -5.815300000000001 - type: nauc_recall_at_3_max value: -33.051199999999994 - type: nauc_recall_at_3_std value: -30.767099999999996 - type: nauc_recall_at_3_diff1 value: 11.7941 - type: nauc_recall_at_5_max value: -18.8571 - type: nauc_recall_at_5_std value: -17.8328 - type: nauc_recall_at_5_diff1 value: -5.9348 - type: nauc_recall_at_10_max value: -20.657700000000002 - type: nauc_recall_at_10_std value: -20.5083 - type: nauc_recall_at_10_diff1 value: 0.7172999999999999 - type: nauc_recall_at_20_max value: -21.78 - type: nauc_recall_at_20_std value: -12.2194 - type: nauc_recall_at_20_diff1 value: 2.4215 - type: nauc_recall_at_100_max value: -28.1499 - type: nauc_recall_at_100_std value: 12.5616 - type: nauc_recall_at_100_diff1 value: 6.282400000000001 - type: nauc_recall_at_1000_max value: -3.4448 - type: nauc_recall_at_1000_std value: 70.2153 - type: nauc_recall_at_1000_diff1 value: -20.1278 - type: nauc_precision_at_1_max value: -16.253600000000002 - type: nauc_precision_at_1_std value: -28.961100000000002 - type: nauc_precision_at_1_diff1 value: -4.5123999999999995 - type: nauc_precision_at_3_max value: -31.231399999999997 - type: nauc_precision_at_3_std value: -21.6787 - type: nauc_precision_at_3_diff1 value: 14.080799999999998 - type: nauc_precision_at_5_max value: -18.4843 - type: nauc_precision_at_5_std value: -4.0988 - type: nauc_precision_at_5_diff1 value: -2.3491 - type: nauc_precision_at_10_max value: -21.7679 - type: nauc_precision_at_10_std value: -2.7599 - type: nauc_precision_at_10_diff1 value: 10.6409 - type: nauc_precision_at_20_max value: -17.049300000000002 - type: nauc_precision_at_20_std value: 12.609200000000001 - type: nauc_precision_at_20_diff1 value: 11.3369 - type: nauc_precision_at_100_max value: -9.675699999999999 - type: nauc_precision_at_100_std value: 44.9955 - type: nauc_precision_at_100_diff1 value: 5.7501999999999995 - type: nauc_precision_at_1000_max value: 29.789500000000004 - type: nauc_precision_at_1000_std value: 58.205200000000005 - type: nauc_precision_at_1000_diff1 value: -22.6755 - type: nauc_mrr_at_1_max value: -16.253600000000002 - type: nauc_mrr_at_1_std value: -28.961100000000002 - type: nauc_mrr_at_1_diff1 value: -4.5123999999999995 - type: nauc_mrr_at_3_max value: -30.4084 - type: nauc_mrr_at_3_std value: -29.1267 - type: nauc_mrr_at_3_diff1 value: -2.9535 - type: nauc_mrr_at_5_max value: -31.6427 - type: nauc_mrr_at_5_std value: -27.5858 - type: nauc_mrr_at_5_diff1 value: -2.032 - type: nauc_mrr_at_10_max value: -31.1008 - type: nauc_mrr_at_10_std value: -27.338099999999997 - type: nauc_mrr_at_10_diff1 value: -0.1675 - type: nauc_mrr_at_20_max value: -30.7834 - type: nauc_mrr_at_20_std value: -27.6591 - type: nauc_mrr_at_20_diff1 value: -0.3828 - type: nauc_mrr_at_100_max value: -30.3645 - type: nauc_mrr_at_100_std value: -28.003 - type: nauc_mrr_at_100_diff1 value: -0.48979999999999996 - type: nauc_mrr_at_1000_max value: -30.3645 - type: nauc_mrr_at_1000_std value: -28.003 - type: nauc_mrr_at_1000_diff1 value: -0.48979999999999996 - type: main_score value: 34.059 - task: type: Classification dataset: name: MTEB ToxicConversationsClassification (default) type: mteb/toxic_conversations_50k config: default split: test revision: edfaf9da55d3dd50d43143d90c1ac476895ae6de metrics: - type: accuracy value: 95.1318 - type: f1 value: 84.8018 - type: f1_weighted value: 95.3488 - type: ap value: 54.4247 - type: ap_weighted value: 54.4247 - type: main_score value: 95.1318 - task: type: Classification dataset: name: MTEB TweetSentimentExtractionClassification (default) type: mteb/tweet_sentiment_extraction config: default split: test revision: d604517c81ca91fe16a244d1248fc021f9ecee7a metrics: - type: accuracy value: 81.4488 - type: f1 value: 81.77990000000001 - type: f1_weighted value: 81.4677 - type: main_score value: 81.4488 - task: type: Clustering dataset: name: MTEB TwentyNewsgroupsClustering (default) type: mteb/twentynewsgroups-clustering config: default split: test revision: 6125ec4e24fa026cec8a478383ee943acfbd5449 metrics: - type: v_measure value: 71.18560000000001 - type: v_measure_std value: 1.1396 - type: main_score value: 71.18560000000001 - task: type: PairClassification dataset: name: MTEB TwitterSemEval2015 (default) type: mteb/twittersemeval2015-pairclassification config: default split: test revision: 70970daeab8776df92f5ea462b6173c0b46fd2d1 metrics: - type: similarity_accuracy value: 88.3293 - type: similarity_accuracy_threshold value: 89.5055 - type: similarity_f1 value: 72.9896 - type: similarity_f1_threshold value: 87.6934 - type: similarity_precision value: 69.5767 - type: similarity_recall value: 76.7546 - type: similarity_ap value: 80.33160000000001 - type: cosine_accuracy value: 88.3293 - type: cosine_accuracy_threshold value: 89.5055 - type: cosine_f1 value: 72.9896 - type: cosine_f1_threshold value: 87.6934 - type: cosine_precision value: 69.5767 - type: cosine_recall value: 76.7546 - type: cosine_ap value: 80.33160000000001 - type: manhattan_accuracy value: 88.29350000000001 - type: manhattan_accuracy_threshold value: 2182.2741 - type: manhattan_f1 value: 73.0484 - type: manhattan_f1_threshold value: 2329.781 - type: manhattan_precision value: 70.9245 - type: manhattan_recall value: 75.3034 - type: manhattan_ap value: 80.3871 - type: euclidean_accuracy value: 88.3293 - type: euclidean_accuracy_threshold value: 45.8136 - type: euclidean_f1 value: 72.9896 - type: euclidean_f1_threshold value: 49.6117 - type: euclidean_precision value: 69.5767 - type: euclidean_recall value: 76.7546 - type: euclidean_ap value: 80.33160000000001 - type: dot_accuracy value: 88.3293 - type: dot_accuracy_threshold value: 89.5055 - type: dot_f1 value: 72.9896 - type: dot_f1_threshold value: 87.6934 - type: dot_precision value: 69.5767 - type: dot_recall value: 76.7546 - type: dot_ap value: 80.33160000000001 - type: max_accuracy value: 88.3293 - type: max_f1 value: 73.0484 - type: max_precision value: 70.9245 - type: max_recall value: 76.7546 - type: max_ap value: 80.3871 - type: main_score value: 80.3871 - task: type: PairClassification dataset: name: MTEB TwitterURLCorpus (default) type: mteb/twitterurlcorpus-pairclassification config: default split: test revision: 8b6510b0b1fa4e4c4f879467980e9be563ec1cdf metrics: - type: similarity_accuracy value: 89.5098 - type: similarity_accuracy_threshold value: 86.3375 - type: similarity_f1 value: 79.5103 - type: similarity_f1_threshold value: 85.09649999999999 - type: similarity_precision value: 77.381 - type: similarity_recall value: 81.76010000000001 - type: similarity_ap value: 87.07770000000001 - type: cosine_accuracy value: 89.5098 - type: cosine_accuracy_threshold value: 86.3375 - type: cosine_f1 value: 79.5103 - type: cosine_f1_threshold value: 85.09649999999999 - type: cosine_precision value: 77.381 - type: cosine_recall value: 81.76010000000001 - type: cosine_ap value: 87.07770000000001 - type: manhattan_accuracy value: 89.5195 - type: manhattan_accuracy_threshold value: 2522.3334999999997 - type: manhattan_f1 value: 79.4922 - type: manhattan_f1_threshold value: 2646.0447 - type: manhattan_precision value: 75.5303 - type: manhattan_recall value: 83.8928 - type: manhattan_ap value: 87.0889 - type: euclidean_accuracy value: 89.5098 - type: euclidean_accuracy_threshold value: 52.2734 - type: euclidean_f1 value: 79.5103 - type: euclidean_f1_threshold value: 54.595800000000004 - type: euclidean_precision value: 77.381 - type: euclidean_recall value: 81.76010000000001 - type: euclidean_ap value: 87.07770000000001 - type: dot_accuracy value: 89.5098 - type: dot_accuracy_threshold value: 86.3375 - type: dot_f1 value: 79.5103 - type: dot_f1_threshold value: 85.09649999999999 - type: dot_precision value: 77.381 - type: dot_recall value: 81.76010000000001 - type: dot_ap value: 87.07770000000001 - type: max_accuracy value: 89.5195 - type: max_f1 value: 79.5103 - type: max_precision value: 77.381 - type: max_recall value: 83.8928 - type: max_ap value: 87.0889 - type: main_score value: 87.0889 --- # Gemma Embeddings v1.0 GemmaEmbed is a dense-vector embedding model, trained especially for retrieval. As of December 12, 2024, GemmaEmbed achieves the #1 position overall on the MTEB leaderboard, with a score of 72.72. # Important Notes * This is not an official Google product. * This is a research project. # Results summary Results comparing with BGE-EN-ICL and NV-Embed-v2 on each task in [MTEB](https://huggingface.co/spaces/mteb/leaderboard): Model | Total (56) |Classification (12) | Classification Pair (3) | STS (10) |Clustering (11) | Reranking (4) | Retrieval (15) | Summary (1) -- | -- | -- | -- | -- | -- | -- | -- | -- bge-en-icl | 0.7167 | 0.8895 | 0.8814 | 0.8425 | 0.5789 | 0.5986 | 0.6216 | 0.3077 NV-Embed-v2 | 0.7231 | 0.9037 | 0.8867 | 0.8431 | 0.5846 | 0.6065 | 0.6265 | 0.3070 Gemma-Embeddings-v1.0 | 0.7272 | 0.9000 | 0.8809 | 0.8423 | 0.5826 | 0.6214 | 0.6371 | 0.4052 # Model & Data Our base encoder model is [Gemma2 9B](https://huggingface.co/google/gemma-2-9b). We use the [BGE-EN-ICL training data](https://huggingface.co/datasets/cfli/bge-full-data). # Research Team * Nicholas Monath * Michael Boratko * Seungyeon Kim * Andrew McCallum * Rob Fergus * Manzil Zaheer
[ "SUMMARIZATION" ]
[ "BIOSSES", "SCIFACT" ]
robbiemu/salamandra-2b
robbiemu
text-generation
[ "transformers", "gguf", "llama", "text-generation", "bg", "ca", "code", "cs", "cy", "da", "de", "el", "en", "es", "et", "eu", "fi", "fr", "ga", "gl", "hr", "hu", "it", "lt", "lv", "mt", "nl", "nn", "oc", "pl", "pt", "ro", "ru", "sh", "sk", "sl", "sr", "sv", "uk", "dataset:oscar", "arxiv:2403.14009", "arxiv:2403.20266", "arxiv:2101.00027", "arxiv:2207.00220", "arxiv:1810.06694", "arxiv:1911.05507", "arxiv:1906.03741", "arxiv:2406.17557", "arxiv:2402.06619", "arxiv:1803.09010", "base_model:BSC-LT/salamandra-2b", "base_model:quantized:BSC-LT/salamandra-2b", "license:apache-2.0", "autotrain_compatible", "endpoints_compatible", "region:us" ]
2024-10-12T23:36:58
2024-10-18T19:20:15
131
0
--- base_model: BSC-LT/salamandra-2b datasets: - oscar language: - bg - ca - code - cs - cy - da - de - el - en - es - et - eu - fi - fr - ga - gl - hr - hu - it - lt - lv - mt - nl - nn - \no - oc - pl - pt - ro - ru - sh - sk - sl - sr - sv - uk library_name: transformers license: apache-2.0 pipeline_tag: text-generation --- source repo: [BSC-LT/salamandra](/BSC-LT/salamandra-2b) # **Quantization Summary** The base model was quantized in [llama.cpp](https://github.com/ggerganov/llama.cpp) with a substantive importance matrix over all target languages (some 34x1000 samples, 96MB of text) with samples from the [Open Super-large Crawled ALMAnaCH coRpus](/datasets/oscar-corpus/oscar) dataset. Logs of the process are included. - **IQ3_M**: At <1.8GB, the smallest model worth highlighting. - **Q4_K_S**: Good size reduction with minimal PPL impact. - **Q5_K_M**: Excellent balance above **Q4**, recommended for most applications. - **Q6_K**: Provides near-**bf16** performance with size savings. --- # Quantization ### **Perplexity Comparison Table:** | **Quantization Type** | **PPL** | **ln(PPL(Q)/PPL(bf16))** | **File Size** | **Notes** | |-----------------------|------------|--------------------------|---------------|----------------------------------------------------------------| | [**IQ3_M**](salamandra-2b_IQ3_M.gguf) | 15.1995 | 0.079131 | 1.7G | Good size efficiency with acceptable PPL increase | | [**Q3_K_L**](salamandra-2b_Q3_K_L.gguf) | 15.0444 | 0.068875 | 1.8G | Further size reduction with modest PPL increase | | [**Q4_K_S**](salamandra-2b_Q4_K_S.gguf) | 14.4338 | 0.027442 | 1.9G | Good size reduction with minimal PPL impact (**recommended**) | | [**Q5_K_M**](salamandra-2b_Q5_K_M.gguf) | 14.1299 | 0.006162 | 2.2G | Excellent balance of PPL and size (**recommended**) | | [**Q6_K**](salamandra-2b_Q6_K.gguf) | 14.0675 | 0.001736 | 2.4G | Nearly lossless performance with reduced size | | [**bf16**](salamandra-2b_bf16.gguf) | 14.0431 | 0.0 | 4.2G | Baseline | --- ### **Notes:** - **Recommended Quantizations:** - **Q4_K_S**: Represents the best of the quantization types at/below **Q4** and less than 2GB, achieving good size efficiency while maintaining low perplexity. - **Q5_K_M**: Offers the best balance between low perplexity and reduced file size above **Q4**, making it ideal for most applications. - **Non-recommended Quanizations:** - **IQ3_M**: Offers a smaller file size (1.7G) with an acceptable PPL increase, best among models below 1.8GB. A solid choice of the highly compressed models. - **Q3_K_L**: Provides a slightly larger file size (1.8G) than IQ3_M, with an even better PPL. - **Q6_K** Similar to Q8_0, offers very close perplexity to bf16. Given its smaller file size than Q8_0 (2.4G vs. 2.7G), Q6_K provides a better size-to-performance trade-off. It was selected because it is nearly lossless and less than 2.5GB. - An attempt was made to get a model below 1.5GB, using **IQ2_XS**, but it was slightly above that size and its perplexity was clearly unacceptable (more than double the 0.3 selection crteria, see next section). If you need a model below 1.7GB, you may be better served by Richard Erkhov's [quantizations](https://huggingface.co/RichardErkhov/BSC-LT_-_salamandra-2b-gguf), which seem to be a static quantization instead of using an importance matrix, so they are smaller. --- ### **Defending the Selection:** The selection of recommended models is designed to provide a spectrum of options that meet the following criteria: - **Diversity in Quantization Types:** - **I Quantization Below Q4:** **IQ3_M** is included to offer an option that uses I quantization below the **Q4** level, balancing size and performance. - **K Quantization At and Above Q4:** **Q4_K_M**, **Q5_K_M**, and **Q6_K** provide K quantization options at and above the **Q4** level, giving users choices based on their specific needs. - **Highly Compressed Quantization (Q3 and below):** **IQ3_M** and **Q3_K_L** are included as they meet the selection criteria of log PPL diff <0.3 and are not redundant with other models. - **Selection Criteria:** - **Log PPL diff <0.3:** All included models have a log PPL difference under 0.3, ensuring that they maintain acceptable performance even when highly quantized. - **No Multiple Models Within 100MB of the Same File Size:** Only one model is included per similar file size range to avoid redundancy. For example, **Q3_K_L** (1.8G) is included while other models like **IQ3_XS** (1.7G) are excluded due to overlapping file sizes and comparable PPL, ensuring a sparse yet comprehensive selection. PPL is measured (with `llama-perplexity`) from a sample of 50 of each language from the same dataset used to calculate the importance matrix. ![](./images/salamandra_header.png) # Salamandra Model Card Salamandra is a highly multilingual model pre-trained from scratch that comes in three different sizes — 2B, 7B and 40B parameters — with their respective base and instruction-tuned variants. This model card corresponds to the 7B instructed version. To visit the model cards of other Salamandra versions, please refer to the [Model Index](#model-index). The entire Salamandra family is released under a permissive [Apache 2.0 license]((https://www.apache.org/licenses/LICENSE-2.0)). Along with the open weights, all training scripts and configuration files are made publicly available in [this GitHub repository](https://github.com/langtech-bsc/salamandra). --- ## Model Details ### Description Transformer-based decoder-only language model that has been pre-trained from scratch on 7.8 trillion tokens of highly curated data. The pre-training corpus contains text in 35 European languages and code. ### Hyperparameters The full list of hyperparameters for each model can be found [here](https://github.com/langtech-bsc/salamandra/tree/main/configs). ### Architecture | | | |-------------------------|:--------------| | Total Parameters | 2,253,490,176 | | Embedding Parameters | 524,288,000 | | Layers | 24 | | Hidden size | 2,048 | | Attention heads | 16 | | Context length | 8,192 | | Vocabulary size | 256,000 | | Precision | bfloat16 | | Embedding type | RoPE | | Activation Function | SwiGLU | | Layer normalization | RMS Norm | | Flash attention | ✅ | | Grouped Query Attention | ❌ | | Num. query groups | N/A | --- ## Intended Use ### Direct Use The models are intended for both research and commercial use in any of the languages included in the training data. The base models are intended either for language generation or to be further fine-tuned for specific use-cases. The instruction-tuned variants can be used as general-purpose assistants, as long as the user is fully aware of the model’s limitations. ### Out-of-scope Use The model is not intended for malicious activities, such as harming others or violating human rights. Any downstream application must comply with current laws and regulations. Irresponsible usage in production environments without proper risk assessment and mitigation is also discouraged. --- ## Hardware and Software ### Training Framework Pre-training was conducted using NVIDIA’s [NeMo Framework](https://docs.nvidia.com/nemo-framework/index.html), which leverages PyTorch Lightning for efficient model training in highly distributed settings. The instruction-tuned versions were produced with [FastChat](https://github.com/lm-sys/FastChat). ### Compute Infrastructure All models were trained on [MareNostrum 5](https://www.bsc.es/ca/marenostrum/marenostrum-5), a pre-exascale EuroHPC supercomputer hosted and operated by Barcelona Supercomputing Center. The accelerated partition is composed of 1,120 nodes with the following specifications: - 4x Nvidia Hopper GPUs with 64 HBM2 memory - 2x Intel Sapphire Rapids 8460Y+ at 2.3Ghz and 32c each (64 cores) - 4x NDR200 (BW per node 800Gb/s) - 512 GB of Main memory (DDR5) - 460GB on NVMe storage |Model|Nodes|GPUs| |:---:|:---:|:---:| |2B|64|256| |7B|128|512| |40B|256 / 512|1,024 / 2,048| --- ## How to use This section offers examples of how to perform inference using various methods. ### Inference You'll find different techniques for running inference, including Huggingface's Text Generation Pipeline, multi-GPU configurations, and vLLM for scalable and efficient generation. #### Inference with Huggingface's Text Generation Pipeline The Huggingface Text Generation Pipeline provides a straightforward way to run inference using the Salamandra-2b model. ```bash pip install transformers torch accelerate sentencepiece protobuf ``` <details> <summary>Show code</summary> ```python from transformers import pipeline, set_seed model_id = "BSC-LT/salamandra-2b" # Sample prompts prompts = [ "Todo el mundo sabe que vivir en Barcelona es", "¿Pueblo o ciudad? Una ventaja de vivir en la ciudad es que hay muchas oportunidades de ocio y empleo, así como una gran diversidad de comercios para todos los gustos. Sin embargo, las ciudades suelen ser ", "Llegir ens proporciona", "What I find more fascinating about languages is that", "La vie peut être", "The future of AI is", ] # Create the pipeline generator = pipeline("text-generation", model_id, device_map="auto") generation_args = { "temperature": 0.1, "top_p": 0.95, "max_new_tokens": 25, "repetition_penalty": 1.2, "do_sample": True } # Fix the seed set_seed(1) # Generate texts outputs = generator(prompts, **generation_args) # Print outputs for output in outputs: print(output[0]["generated_text"]) ``` </details> #### Inference with single / multi GPU This section provides a simple example of how to run inference using Huggingface's AutoModel class. ```bash pip install transformers torch accelerate sentencepiece protobuf ``` <details> <summary>Show code</summary> ```python from transformers import AutoTokenizer, AutoModelForCausalLM import torch model_id = "BSC-LT/salamandra-2b" # Input text text = "El mercat del barri és" # Load the tokenizer tokenizer = AutoTokenizer.from_pretrained(model_id) # Load the model model = AutoModelForCausalLM.from_pretrained( model_id, device_map="auto", torch_dtype=torch.bfloat16 ) generation_args = { "temperature": 0.1, "top_p": 0.95, "max_new_tokens": 25, "repetition_penalty": 1.2, "do_sample": True } inputs = tokenizer(text, return_tensors="pt") # Generate texts output = model.generate(input_ids=inputs["input_ids"].to(model.device), attention_mask=inputs["attention_mask"], **generation_args) # Print outputs print(tokenizer.decode(output[0], skip_special_tokens=True)) ``` </details> #### Inference with vLLM vLLM is an efficient library for inference that enables faster and more scalable text generation. ```bash pip install vllm ``` <details> <summary>Show code</summary> ```python from vllm import LLM, SamplingParams model_id = "BSC-LT/salamandra-2b" # Sample prompts prompts = [ "Todo el mundo sabe que vivir en Barcelona es", "¿Pueblo o ciudad? Una ventaja de vivir en la ciudad es que hay muchas oportunidades de ocio y empleo, así como una gran diversidad de comercios para todos los gustos. Sin embargo, las ciudades suelen ser ", "Llegir ens proporciona", "What I find more fascinating about languages is that", "La vie peut être", "The future of AI is", ] # Create a sampling params object sampling_params = SamplingParams( temperature=0.1, top_p=0.95, seed=1, max_tokens=25, repetition_penalty=1.2) # Create an LLM llm = LLM(model=model_id) # Generate texts outputs = llm.generate(prompts, sampling_params) # Print outputs for output in outputs: prompt = output.prompt generated_text = output.outputs[0].text print(f"Prompt: {prompt!r}, Generated text: {generated_text!r}") ``` </details> --- ## Data ### Pretraining Data The training corpus consists of 2.4 trillion tokens, including 35 European languages and 92 programming languages. It amounts to a total of 33TB of pre-processed text. Languages were sampled manually by giving x2 oversampling to Spain's co-official languages (Spanish, Catalan, Galician and Basque), code was undersampled by half, and the rest of the languages were kept as is, resulting in the following distribution: ![lang distrib](./images/corpus_languages.png) This highly multilingual corpus is predominantly composed of data from Colossal OSCAR, which contributes a significant 66.06% of the total tokens. Following this, Starcoder provides 11.91%, and Spanish Crawling adds 3.34%. The next largest sources are French FR at 3.12% and Proof Pile at 1.98%. Other notable contributions include Macocu, Pile of Law, and Eurlex, each contributing around 1.5% to 1.3%. These major sources collectively form the bulk of the corpus, ensuring a rich and diverse dataset for training the language model. The remaining 10% comes from smaller sources in various languages. Feel free to click the expand button below to see the full list of sources. <details> <summary>Data Sources</summary> | Dataset | Language | Source | |-----------------------------------------------|---------------------------------------------------------------------------------------------------------------|-----------------------------------------------------------------------------------------------------| | Parlamint corpus | at, bg, cz, dk, ee, es, es-ga, fi, fr, gb, gr, hr, hu, it, lv, nl, no, pl, pt, rs, se, si | Erjavec et al., 2021 | | Bulgarian National Corpus | bg | [Link](http://old.dcl.bas.bg/dataset/BulNC.7z) | | Crawl of Bulgarian news websites | bg | [Link](http://old.dcl.bas.bg/dataset/Bulgarian_news.7z) | | Colossal OSCAR 1.0 | bg, ca, cs, cy, da, de, el, en, es, et, eu, fi, fr, ga, gl, hr, hu, it, lt, lv, mt, nl, nn, no, oc, pl, pt, ro, ru, sh, sk, sl, sr, sv, uk | Brack et al., 2024 | | Wikimedia dumps | bg, ca, cs, da, de, el, en, es, et, eu, fi, fr, ga, gl, hr, hu, it, lt, lv, mt, nl, nn, no, pl, pt, ro, sh, sk, sl, sr, uk | [Link](https://dumps.wikimedia.org/) | | OpenSubtitlesv2016 | bg, ca, cs, da, de, el, en, es, et, eu, fi, fr, gl, hr, it, lt, lv, nl, no, pl, pt, ro, sk, sl, sr, sv, uk | Lison & Tiedemann, 2016 | | MaCoCu web corpus | bg, ca, el, hr, mt, sl, sr, uk | Bañón et al., 2022 | | EurLEX-Resources | bg, cs, da, de, el, en, es, et, fi, fr, ga, hr, hu, it, lt, lv, mt, nl, pl, pt, ro, sk, sl, sv | [Link](https://huggingface.co/datasets/joelniklaus/eurlex_resources) | | MC4-Legal | bg, cs, da, de, el, en, es, et, fi, fr, ga, hu, it, lt, lv, mt, nl, pl, pt, ro, sk, sl, sv | [Link](https://huggingface.co/datasets/joelito/legal-mc4) | | CURLICAT Corpus | bg, hr, hu, pl, ro, sk, sl | Váradi et al., 2022 | | CATalog | ca | Palomar-Giner et al., 2024 | | Spanish Crawling | ca, es, eu, gl | Relevant Spanish websites crawling | | Starcoder | code | Li et al., 2023 | | SYN v9: large corpus of written Czech | cs | Křen et al., 2021 | | Welsh-GOV | cy | Crawling from [Link](https://www.llyw.cymru) | | DaNewsroom | da | Varab & Schluter, 2020 | | Danish GigaWord | da | Strømberg-Derczynski et al., 2021 | | DK-CLARIN Reference Corpus of General Danish | da | [Link](https://korpus.dsl.dk/clarin/) | | The Danish Parliament Corpus 2009 - 2017, v1 | da | Hansen, 2018 | | DeWaC | de | [Link](https://docs.sslmit.unibo.it/doku.php?id=corpora:dewac) | | Open Legal Data - German court decisions and laws | de | Ostendorff et al., 2020 | | Greek Legal Code | el | Papaloukas et al., 2021 | | Greek Web Corpus | el | Outsios et al., 2018 | | Auxiliary Mathematics Problems and Solutions (AMPS) dataset | en | Hendrycks et al., 2021 | | BIGPATENT | en | Sharma et al., 2019 | | FineWeb-Edu (350BT subset) | en | Penedo et al., 2024 | | peS2o | en | Soldaini & Lo, 2023 | | PG-19 | en | Rae et al., 2019 | | Pile of Law (selected subsets) | en | Henderson* et al., 2022 | | proof-pile | en | [Link](https://huggingface.co/datasets/hoskinson-center/proof-pile) | | RedPajama-Data T1 (StackExchange subset) | en | Computer, 2023 | | The Pile (PhilPapers subset) | en | Gao et al., 2021 | | Biomedical | es | Internally generated scientific dataset: Dialnet, Scielo, CSIC, TDX, BSC, UCM | | HPLTDatasets v1 - Spanish | es | de Gibert et al., 2024 | | Legal | es | Internally generated legal dataset: BOE, BORME, Senado, Congreso, Spanish court orders, DOGC | | Scientific | es | Internally generated scientific dataset: Wikipedia LS, Pubmed, MeSpEn, patents, clinical cases, medical crawler | | Spanish Legal Domain Corpora | es | Gutiérrez-Fandiño et al., 2021 | | Estonian National Corpus 2021 | et | Koppel & Kallas, 2022 | | Estonian Reference Corpus | et | [Link](https://www.cl.ut.ee/korpused/segakorpus/) | | EusCrawl (w/o Wikipedia or NC-licenses) | eu | Artetxe et al., 2022 | | Latxa Corpus v1.1 | eu | Etxaniz et al., 2024 [Link](https://huggingface.co/datasets/HiTZ/latxa-corpus-v1.1) | | Aya Dataset (w/o Evaluation Suite) | eu, hr, nl, fi, ka, hu, lt, nn, ro, sk, lv, cy, bg, cs, en, fr, de, ga, mt, pl, ru, sl, sv, ca, da, et, gl, el, it, no, pt, sr, es, uk | Singh et al., 2024 | | Yle Finnish News Archive | fi | [Link](http://urn.fi/urn:nbn:fi:lb-2021050401) | | CaBeRnet: a New French Balanced Reference Corpus | fr | Popa-Fabre et al., 2020 | | French Public Domain Books | fr | [Link](https://huggingface.co/datasets/PleIAs/French-PD-Books) | | French Public Domain Newspapers | fr | [Link](https://huggingface.co/datasets/PleIAs/French-PD-Newspapers) | | Irish Universal Dependencies | ga | [Link](https://universaldependencies.org/ga/index.html) | | The Gaois bilingual corpus of English-Irish legislation (Irish legislation) | ga | [Link](https://portulanclarin.net/repository/browse/the-gaois-bilingual-corpus-of-english-irish-legislation-processed/daeac17c9e3511ea9b7f02420a000407b83de243dc0b469aab41084386c5b80f/) | | CorpusNÓS | gl | de-Dios-Flores et al., 2024 | | Croatian web corpus hrWaC 2.1 | hr | Ljubešić & Klubička, 2014 | | ITWaC | it | [Link](https://docs.sslmit.unibo.it/doku.php?id=corpora:itwac) | | Corpus of State-related content from the Latvian Web (Processed) | lv | [Link](https://catalog.elra.info/en-us/repository/browse/ELRA-W0169/) | | Korpus Malti | mt | Micallef et al., 2022 | | SoNaR Corpus NC 1.2 | nl | [Link](https://taalmaterialen.ivdnt.org/download/tstc-sonar-corpus/) | | Norwegian Colossal Corpus | nn, no | Kummervold et al., 2021 | | Occitan Corpus | oc | Provided by [IEA](https://www.institutestudisaranesi.cat/) | | NKJP-PodkorpusMilionowy-1.2 (National Corpus of Polish) | pl | Lewandowska-Tomaszczyk et al., 2013 | | Polish Parliamentary Corpus / Korpus Dyskursu Parlamentarnego | pl | Ogrodniczuk, 2018 | | Brazilian Portuguese Web as Corpus | pt | Wagner Filho et al., 2018 | | ParlamentoPT | pt | Rodrigues et al., 2023 | | MARCELL Romanian legislative subcorpus v2 | ro | [Link](https://elrc-share.eu/reposMARCELL%20Romanian%20legislative%20subcorpus%20v2itory/browse/marcell-romanian-legislative-subcorpus-v2/2da548428b9d11eb9c1a00155d026706ce94a6b59ffc4b0e9fb5cd9cebe6889e/) | | Korpus slovenských právnych predpisov v1.9 | sk | [Link](https://www.juls.savba.sk/data/marcell/legal-sk-20220322-1.9.ver.xz) | | od-justice 2.0 | sk | [Link](https://www.juls.savba.sk/data/od-justice/od-justice-2.0.ver.xz) | | Corpus of academic Slovene KAS 2.0 | sl | Žagar et al., 2022 | | slWaC web corpus | sl | Erjavec et al., 2015 | | SrpKorSubset (news, legal, academic, conversation, literary) | sr | [Link](http://www.korpus.matf.bg.ac.rs/) | | The Swedish Culturomics Gigaword Corpus | sv | Rødven-Eide, 2016 | | Corpus of laws and legal acts of Ukraine | uk | [Link](https://lang.org.ua/en/corpora/#anchor7) | <details> <summary>References</summary> - Abadji, J., Suárez, P. J. O., Romary, L., & Sagot, B. (2021). Ungoliant: An optimized pipeline for the generation of a very large-scale multilingual web corpus (H. Lüngen, M. Kupietz, P. Bański, A. Barbaresi, S. Clematide, & I. Pisetta, Eds.; pp. 1–9). Leibniz-Institut für Deutsche Sprache. [Link](https://doi.org/10.14618/ids-pub-10468) - Artetxe, M., Aldabe, I., Agerri, R., Perez-de-Viñaspre, O., & Soroa, A. (2022). Does Corpus Quality Really Matter for Low-Resource Languages? - Bañón, M., Esplà-Gomis, M., Forcada, M. L., García-Romero, C., Kuzman, T., Ljubešić, N., van Noord, R., Sempere, L. P., Ramírez-Sánchez, G., Rupnik, P., Suchomel, V., Toral, A., van der Werff, T., & Zaragoza, J. (2022). MaCoCu: Massive collection and curation of monolingual and bilingual data: Focus on under-resourced languages. Proceedings of the 23rd Annual Conference of the European Association for Machine Translation, 303–304. [Link](https://aclanthology.org/2022.eamt-1.41) - Brack, M., Ostendorff, M., Suarez, P. O., Saiz, J. J., Castilla, I. L., Palomar-Giner, J., Shvets, A., Schramowski, P., Rehm, G., Villegas, M., & Kersting, K. (2024). Community OSCAR: A Community Effort for Multilingual Web Data. [Link](https://occiglot.eu/papers/Community_Oscar.pdf) - Computer, T. (2023). RedPajama: An Open Source Recipe to Reproduce LLaMA training dataset [Computer software]. [Link](https://github.com/togethercomputer/RedPajama-Data) - de Gibert, O., Nail, G., Arefyev, N., Bañón, M., van der Linde, J., Ji, S., Zaragoza-Bernabeu, J., Aulamo, M., Ramírez-Sánchez, G., Kutuzov, A., Pyysalo, S., Oepen, S., & Tiedemann, J. (2024). A New Massive Multilingual Dataset for High-Performance Language Technologies (arXiv:2403.14009). arXiv. [Link](http://arxiv.org/abs/2403.14009) - Dodge, J., Sap, M., Marasović, A., Agnew, W., Ilharco, G., Groeneveld, D., Mitchell, M., & Gardner, M. (2021). Documenting Large Webtext Corpora: A Case Study on the Colossal Clean Crawled Corpus. In M.-F. Moens, X. Huang, L. Specia, & S. W. Yih (Eds.), Proceedings of the 2021 Conference on Empirical Methods in Natural Language Processing (pp. 1286–1305). Association for Computational Linguistics. [Link](https://doi.org/10.18653/v1/2021.emnlp-main.98) - Erjavec, T., Ljubešić, N., & Logar, N. (2015). The slWaC corpus of the Slovene web. Informatica (Slovenia), 39, 35–42. - Erjavec, T., Ogrodniczuk, M., Osenova, P., Ljubešić, N., Simov, K., Grigorova, V., Rudolf, M., Pančur, A., Kopp, M., Barkarson, S., Steingrímsson, S. hór, van der Pol, H., Depoorter, G., de Does, J., Jongejan, B., Haltrup Hansen, D., Navarretta, C., Calzada Pérez, M., de Macedo, L. D., … Rayson, P. (2021). Linguistically annotated multilingual comparable corpora of parliamentary debates ParlaMint.ana 2.1. [Link](http://hdl.handle.net/11356/1431) - Etxaniz, J., Sainz, O., Perez, N., Aldabe, I., Rigau, G., Agirre, E., Ormazabal, A., Artetxe, M., & Soroa, A. (2024). Latxa: An Open Language Model and Evaluation Suite for Basque. [Link] (https://arxiv.org/abs/2403.20266) - Gao, L., Biderman, S., Black, S., Golding, L., Hoppe, T., Foster, C., Phang, J., He, H., Thite, A., Nabeshima, N., Presser, S., & Leahy, C. (2021). The Pile: An 800GB Dataset of Diverse Text for Language Modeling. CoRR, abs/2101.00027. [Link](https://arxiv.org/abs/2101.00027) - Gutiérrez-Fandiño, A., Armengol-Estapé, J., Gonzalez-Agirre, A., & Villegas, M. (2021). Spanish Legalese Language Model and Corpora. - Hansen, D. H. (2018). The Danish Parliament Corpus 2009—2017, v1. [Link](http://hdl.handle.net/20.500.12115/8) - Henderson*, P., Krass*, M. S., Zheng, L., Guha, N., Manning, C. D., Jurafsky, D., & Ho, D. E. (2022). Pile of Law: Learning Responsible Data Filtering from the Law and a 256GB Open-Source Legal Dataset. arXiv. [Link](https://arxiv.org/abs/2207.00220) - Hendrycks, D., Burns, C., Kadavath, S., Arora, A., Basart, S., Tang, E., Song, D., & Steinhardt, J. (2021). Measuring Mathematical Problem Solving With the MATH Dataset. NeurIPS. - Jansen, T., Tong, Y., Zevallos, V., & Suarez, P. O. (2022). Perplexed by Quality: A Perplexity-based Method for Adult and Harmful Content Detection in Multilingual Heterogeneous Web Data. - Koppel, K., & Kallas, J. (2022). Eesti keele ühendkorpuste sari 2013–2021: Mahukaim eestikeelsete digitekstide kogu. Eesti Rakenduslingvistika Ühingu Aastaraamat Estonian Papers in Applied Linguistics, 18, 207–228. [Link](https://doi.org/10.5128/erya18.12) - Křen, M., Cvrček, V., Henyš, J., Hnátková, M., Jelínek, T., Kocek, J., Kováříková, D., Křivan, J., Milička, J., Petkevič, V., Procházka, P., Skoumalová, H., Šindlerová, J., & Škrabal, M. (2021). SYN v9: Large corpus of written Czech. [Link](http://hdl.handle.net/11234/1-4635) - Kreutzer, J., Caswell, I., Wang, L., Wahab, A., van Esch, D., Ulzii-Orshikh, N., Tapo, A., Subramani, N., Sokolov, A., Sikasote, C., Setyawan, M., Sarin, S., Samb, S., Sagot, B., Rivera, C., Rios, A., Papadimitriou, I., Osei, S., Suarez, P. O., … Adeyemi, M. (2022). Quality at a Glance: An Audit of Web-Crawled Multilingual Datasets. Transactions of the Association for Computational Linguistics, 10, 50–72. [Link](https://doi.org/10.1162/tacl_a_00447) - Kummervold, P. E., De la Rosa, J., Wetjen, F., & Brygfjeld, S. A. (2021). Operationalizing a National Digital Library: The Case for a Norwegian Transformer Model. In S. Dobnik & L. Øvrelid (Eds.), Proceedings of the 23rd Nordic Conference on Computational Linguistics (NoDaLiDa) (pp. 20–29). Linköping University Electronic Press, Sweden. [Link](https://aclanthology.org/2021.nodalida-main.3) - Lewandowska-Tomaszczyk, B., Górski, R., Łaziński, M., & Przepiórkowski, A. (2013). The National Corpus of Polish (NKJP). Language use and data analysis. 309–319. - Li, R., Allal, L. B., Zi, Y., Muennighoff, N., Kocetkov, D., Mou, C., Marone, M., Akiki, C., Li, J., Chim, J., Liu, Q., Zheltonozhskii, E., Zhuo, T. Y., Wang, T., Dehaene, O., Davaadorj, M., Lamy-Poirier, J., Monteiro, J., Shliazhko, O., … Vries, H. de. (2023). StarCoder: May the source be with you! - Lison, P., & Tiedemann, J. (2016). OpenSubtitles2016: Extracting Large Parallel Corpora from Movie and TV Subtitles. In N. Calzolari, K. Choukri, T. Declerck, S. Goggi, M. Grobelnik, B. Maegaard, J. Mariani, H. Mazo, A. Moreno, J. Odijk, & S. Piperidis (Eds.), Proceedings of the Tenth International Conference on Language Resources and Evaluation (LREC’16) (pp. 923–929). European Language Resources Association (ELRA). [Link](https://aclanthology.org/L16-1147) - Ljubešić, N., & Klubička, F. (2014). Bs,hr,srWaC - Web Corpora of Bosnian, Croatian and Serbian. In F. Bildhauer & R. Schäfer (Eds.), Proceedings of the 9th Web as Corpus Workshop (WaC-9) (pp. 29–35). Association for Computational Linguistics. [Link](https://doi.org/10.3115/v1/W14-0405) - Micallef, K., Gatt, A., Tanti, M., van der Plas, L., & Borg, C. (2022). Pre-training Data Quality and Quantity for a Low-Resource Language: New Corpus and BERT Models for Maltese. Proceedings of the Third Workshop on Deep Learning for Low-Resource Natural Language Processing, 90–101. [Link](https://doi.org/10.18653/v1/2022.deeplo-1.10) - Ogrodniczuk, M. (2018). Polish Parliamentary Corpus. [Link](https://api.semanticscholar.org/CorpusID:235134113) - Ostendorff, M., Blume, T., & Ostendorff, S. (2020). Towards an Open Platform for Legal Information. Proceedings of the ACM/IEEE Joint Conference on Digital Libraries in 2020, 385–388. [Link](https://doi.org/10.1145/3383583.3398616) - Ostendorff, M., Suarez, P. O., Lage, L. F., & Rehm, G. (2024). LLM-Datasets: An Open Framework for Pretraining Datasets of Large Language Models. First Conference on Language Modeling. [Link](https://openreview.net/forum?id=5RdIMlGLXL) - Outsios, S., Skianis, K., Meladianos, P., Xypolopoulos, C., & Vazirgiannis, M. (2018). Word Embeddings from Large-Scale Greek Web content. arXiv Preprint arXiv:1810.06694. - Palomar-Giner, J., Saiz, J. J., Espuña, F., Mina, M., Da Dalt, S., Llop, J., Ostendorff, M., Ortiz Suarez, P., Rehm, G., Gonzalez-Agirre, A., & Villegas, M. (2024). A CURATEd CATalog: Rethinking the Extraction of Pretraining Corpora for Mid-Resourced Languages. In N. Calzolari, M.-Y. Kan, V. Hoste, A. Lenci, S. Sakti, & N. Xue (Eds.), Proceedings of the 2024 Joint International Conference on Computational Linguistics, Language Resources and Evaluation (LREC-COLING 2024) (pp. 335–349). ELRA and ICCL. [Link](https://aclanthology.org/2024.lrec-main.31) - Papaloukas, C., Chalkidis, I., Athinaios, K., Pantazi, D.-A., & Koubarakis, M. (2021). Multi-granular Legal Topic Classification on Greek Legislation. Proceedings of the Natural Legal Language Processing Workshop 2021, 63–75. [Link](https://doi.org/10.48550/arXiv.2109.15298) - Popa-Fabre, M., Ortiz Suárez, P. J., Sagot, B., & de la Clergerie, É. (2020). French Contextualized Word-Embeddings with a sip of CaBeRnet: A New French Balanced Reference Corpus. Proceedings of the 8th Workshop on Challenges in the Management of Large Corpora, 15–23. [Link](https://aclanthology.org/2020.cmlc-1.3) - Rae, J. W., Potapenko, A., Jayakumar, S. M., Hillier, C., & Lillicrap, T. P. (2019). Compressive Transformers for Long-Range Sequence Modelling. arXiv Preprint. [Link](https://arxiv.org/abs/1911.05507) - Rodrigues, J., Gomes, L., Silva, J., Branco, A., Santos, R., Cardoso, H. L., & Osório, T. (2023). Advancing Neural Encoding of Portuguese with Transformer Albertina PT-\*. - Rødven-Eide, S. (2016). The Swedish Culturomics Gigaword CorpusThe Swedish Culturomics Gigaword Corpus [Dataset]. Språkbanken Text. [Link](https://doi.org/10.23695/3WMV-1Z09) - Sharma, E., Li, C., & Wang, L. (2019). BIGPATENT: A Large-Scale Dataset for Abstractive and Coherent Summarization. CoRR, abs/1906.03741. [Link](http://arxiv.org/abs/1906.03741) - Soldaini, L., & Lo, K. (2023). peS2o (Pretraining Efficiently on S2ORC) Dataset. Allen Institute for AI. - Strømberg-Derczynski, L., Ciosici, M., Baglini, R., Christiansen, M. H., Dalsgaard, J. A., Fusaroli, R., Henrichsen, P. J., Hvingelby, R., Kirkedal, A., Kjeldsen, A. S., Ladefoged, C., Nielsen, F. Å., Madsen, J., Petersen, M. L., Rystrøm, J. H., & Varab, D. (2021). The Danish Gigaword Corpus. Proceedings of the 23rd Nordic Conference on Computational Linguistics (NoDaLiDa), 413–421. [Link](https://aclanthology.org/2021.nodalida-main.46) - Subramani, N., Luccioni, S., Dodge, J., & Mitchell, M. (2023). Detecting Personal Information in Training Corpora: An Analysis. 208–220. [Link](https://doi.org/10.18653/v1/2023.trustnlp-1.18) - Varab, D., & Schluter, N. (2020). DaNewsroom: A Large-scale Danish Summarisation Dataset. Proceedings of The 12th Language Resources and Evaluation Conference, 6731–6739. [Link](https://www.aclweb.org/anthology/2020.lrec-1.831) - Váradi, T., Nyéki, B., Koeva, S., Tadić, M., Štefanec, V., Ogrodniczuk, M., Nitoń, B., Pezik, P., Barbu Mititelu, V., Irimia, E., Mitrofan, M., Tufi\textcommabelows, D., Garabík, R., Krek, S., & Repar, A. (2022). Introducing the CURLICAT Corpora: Seven-language Domain Specific Annotated Corpora from Curated Sources. In N. Calzolari, F. Béchet, P. Blache, K. Choukri, C. Cieri, T. Declerck, S. Goggi, H. Isahara, B. Maegaard, J. Mariani, H. Mazo, J. Odijk, & S. Piperidis (Eds.), Proceedings of the Thirteenth Language Resources and Evaluation Conference (pp. 100–108). European Language Resources Association. [Link](https://aclanthology.org/2022.lrec-1.11) - Wagner Filho, J. A., Wilkens, R., Idiart, M., & Villavicencio, A. (2018). The brwac corpus: A new open resource for brazilian portuguese. Proceedings of the Eleventh International Conference on Language Resources and Evaluation (LREC 2018). - Žagar, A., Kavaš, M., Robnik-Šikonja, M., Erjavec, T., Fišer, D., Ljubešić, N., Ferme, M., Borovič, M., Boškovič, B., Ojsteršek, M., & Hrovat, G. (2022). Corpus of academic Slovene KAS 2.0. [Link](http://hdl.handle.net/11356/1448) - Alicia Parrish, Angelica Chen, Nikita Nangia, Vishakh Padmakumar, Jason Phang, Jana Thompson, Phu Mon Htut, and Samuel Bowman. 2022. BBQ: A hand-built bias benchmark for question answering. In Findings of the Association for Computational Linguistics: ACL 2022, pages 2086–2105, Dublin, Ireland. Association for Computational Linguistics. - Emily Sheng, Kai-Wei Chang, Premkumar Natarajan, and Nanyun Peng. 2019. The Woman Worked as a Babysitter: On Biases in Language Generation. In Proceedings of the 2019 Conference on Empirical Methods in Natural Language Processing and the 9th International Joint Conference on Natural Language Processing (EMNLP-IJCNLP), pages 3407–3412, Hong Kong, China. Association for Computational Linguistics. - Clark, P., Cowhey, I., Etzioni, O., Khot, T., Sabharwal, A., Schoenick, C., & Tafjord, O. (2018). Think you have Solved Question Answering? Try ARC, the AI2 Reasoning Challenge. arXiv:1803. 05457v1. - Richard Socher, Alex Perelygin, Jean Wu, Jason Chuang, Christopher D. Manning, Andrew Ng, and Christopher Potts. 2013. Recursive Deep Models for Semantic Compositionality Over a Sentiment Treebank. In Proceedings of the 2013 Conference on Empirical Methods in Natural Language Processing, pages 1631–1642, Seattle, Washington, USA. Association for Computational Linguistics. - Penedo, G., Kydlíček, H., allal, L. B., Lozhkov, A., Mitchell, M., Raffel, C., Von Werra, L., & Wolf, T. (2024). The FineWeb Datasets: Decanting the Web for the Finest Text Data at Scale (arXiv:2406.17557). arXiv. http://arxiv.org/abs/2406.17557 - Singh, S., Vargus, F., Dsouza, D., Karlsson, B. F., Mahendiran, A., Ko, W.-Y., Shandilya, H., Patel, J., Mataciunas, D., OMahony, L., Zhang, M., Hettiarachchi, R., Wilson, J., Machado, M., Moura, L. S., Krzemiński, D., Fadaei, H., Ergün, I., Okoh, I., … Hooker, S. (2024). Aya Dataset: An Open-Access Collection for Multilingual Instruction Tuning (arXiv:2402.06619). arXiv. http://arxiv.org/abs/2402.06619 </details> </details> The model was trained for 3 epochs, with two final rounds of 0.3B higher-quality tokens each, meaning that the total number of tokens seen during pre-training amounts to roughly 7.8 trillion tokens. We provide an extense Datasheet section following the best practices defined by [(Gebru et al., 2021)](https://arxiv.org/pdf/1803.09010). <details> <summary>Datasheet</summary> #### Motivation **For what purpose was the dataset created? Was there a specific task in mind? Was there a specific gap that needed to be filled? Please provide a description.** The purpose of creating this dataset is to pre-train the Salamandra family of multilingual models with high performance in a large number of European languages (35) and code (including 92 different programming languages). In addition, we aim to represent especially the co-official languages of Spain: Spanish, Catalan, Galician, and Basque. This is the reason why we carry out an oversampling of these languages. We detected that there is a great lack of massive multilingual data, especially in minority languages (Ostendorff & Rehm, 2023), so part of our efforts in the creation of this pre-training dataset have resulted in the contribution to large projects such as the Community OSCAR (Brack et al., 2024), which includes 151 languages and 40T words, or CATalog (Palomar-Giner et al., 2024), the largest open dataset in Catalan in the world. **Who created the dataset (e.g., which team, research group) and on behalf of which entity (e.g., company, institution, organization)?** The dataset has been created by the Language Technologies unit (LangTech) of the Barcelona Supercomputing Center - Centro Nacional de Supercomputación (BSC-CNS), which aims to advance the field of natural language processing through cutting-edge research and development and the use of HPC. In particular, it was created by the unit's data team, the main contributors being Javier Saiz, Ferran Espuña, and Jorge Palomar. However, the creation of the dataset would not have been possible without the collaboration of a large number of collaborators, partners, and public institutions, which can be found in detail in the acknowledgements. **Who funded the creation of the dataset? If there is an associated grant, please provide the name of the grantor and the grant name and number.** This work/research has been promoted and financed by the Government of Catalonia through the [Aina project](https://projecteaina.cat/). #### Composition **What do the instances that comprise the dataset represent (e.g., documents, photos, people, countries)? Are there multiple types of instances (e.g., movies, users, and ratings; people and interactions between them; nodes and edges)? Please provide a description.** The dataset consists entirely of text documents in various languages. Specifically, data was mainly sourced from the following databases and repositories: - **Common Crawl:** Repository that holds website data and is run by the Common Crawl non-profit organization. It is updated monthly and is distributed under the CC0 1.0 public domain license. - **GitHub:** Community platform that allows developers to create, store, manage, and share their code. Repositories are crawled and then distributed with their original licenses, which may vary from permissive to non-commercial licenses. - **Wikimedia:** Database that holds the collection databases managed by the Wikimedia Foundation, including Wikipedia, Wikibooks, Wikinews, Wikiquote, Wikisource, and Wikivoyage. It is updated monthly and is distributed under Creative Commons Attribution-ShareAlike License 4.0. - **EurLex:** Repository that holds the collection of legal documents from the European Union, available in all of the EU’s 24 official languages and run by the Publications Office of the European Union. It is updated daily and is distributed under the Creative Commons Attribution 4.0 International license. - **Other repositories:** Specific repositories were crawled under permission for domain-specific corpora, which include academic, legal, and newspaper repositories. We provide a complete list of dataset sources at the end of this section. **How many instances are there in total (of each type, if appropriate)?** The dataset contains a diverse range of instances across multiple languages, with notable adjustments for certain languages. English represents the largest portion, accounting for 39.08% of the total data. Spanish was upsampled by a factor of 2, bringing its share to 16.59%, while Catalan (1.84%), Basque (0.26%), and Galician (0.36%) were also upsampled by 2. On the other hand, code-related data was downsampled by half, making up 6.42% of the total. Other prominent languages include French (6.59%), Russian (5.39%), German (4.25%), and Hungarian (3.93%), with several additional languages contributing between 1% and 2%, and smaller portions represented by a variety of others. **Does the dataset contain all possible instances or is it a sample (not necessarily random) of instances from a larger set? If the dataset is a sample, then what is the larger set? Is the sample representative of the larger set (e.g., geographic coverage)? If so, please describe how this representativeness was validated/verified. If it is not representative of the larger set, please describe why not (e.g., to cover a more diverse range of instances, because instances were withheld or unavailable).** The dataset is a sample from multiple sources, with different weights based on the primary language of the content: Spanish, Catalan, Basque, and Galician content was upsampled by a factor of two, while programming languages were downsampled by a factor of half. Other sources were sampled in proportion to their occurrence. **What data does each instance consist of? “Raw” data (e.g., unprocessed text or images) or features? In either case, please provide a description.** Each instance consists of a text document processed for deduplication, language identification, and source-specific filtering. Some documents required optical character recognition (OCR) to extract text from non-text formats such as PDFs. **Is there a label or target associated with each instance? If so, please provide a description.** Each instance is labeled with a unique identifier, the primary language of the content, and the URL for web-sourced instances. Additional labels were automatically assigned to detect specific types of content —harmful or toxic content— and to assign preliminary indicators of undesired qualities —very short documents, high density of symbols, etc.— which were used for filtering instances. **Is any information missing from individual instances? If so, please provide a description, explaining why this information is missing (e.g., because it was unavailable). This does not include intentionally removed information, but might include, e.g., redacted text.** No significant information is missing from the instances. **Are relationships between individual instances made explicit (e.g., users’ movie ratings, social network links)? If so, please describe how these relationships are made explicit.** Instances are related through shared metadata, such as source and language identifiers. **Are there recommended data splits (e.g., training, development/validation, testing)? If so, please provide a description of these splits, explaining the rationale behind them.** The dataset is split randomly into training, validation, and test sets. **Are there any errors, sources of noise, or redundancies in the dataset? If so, please provide a description.** Despite removing duplicated instances within each source, redundancy remains at the paragraph and sentence levels, particularly in web-sourced instances where SEO techniques and templates contribute to repeated textual patterns. Some instances may also be duplicated across sources due to format variations. **Is the dataset self-contained, or does it link to or otherwise rely on external resources (e.g., websites, tweets, other datasets)? If it links to or relies on external resources, a) are there guarantees that they will exist, and remain constant, over time; b) are there official archival versions of the complete dataset (i.e., including the external resources as they existed at the time the dataset was created); c) are there any restrictions (e.g., licenses, fees) associated with any of the external resources that might apply to a dataset consumer? Please provide descriptions of all external resources and any restrictions associated with them, as well as links or other access points, as appropriate.** The dataset is self-contained and does not rely on external resources. **Does the dataset contain data that might be considered confidential (e.g., data that is protected by legal privilege or by doctor–patient confidentiality, data that includes the content of individuals’ non-public communications)? If so, please provide a description.** The dataset does not contain confidential data. **Does the dataset contain data that, if viewed directly, might be offensive, insulting, threatening, or might otherwise cause anxiety? If so, please describe why. If the dataset does not relate to people, you may skip the remaining questions in this section.** The dataset includes web-crawled content, which may overrepresent pornographic material across languages (Kreutzer et al., 2022). Although pre-processing techniques were applied to mitigate offensive content, the heterogeneity and scale of web-sourced data make exhaustive filtering challenging, which makes it next to impossible to identify all adult content without falling into excessive filtering, which may negatively influence certain demographic groups (Dodge et al., 2021). **Does the dataset identify any subpopulations (e.g., by age, gender)? If so, please describe how these subpopulations are identified and provide a description of their respective distributions within the dataset.** The dataset does not explicitly identify any subpopulations. **Is it possible to identify individuals (i.e., one or more natural persons), either directly or indirectly (i.e., in combination with other data) from the dataset? If so, please describe how.** Web-sourced instances in the dataset may contain personally identifiable information (PII) that is publicly available on the Web, such as names, IP addresses, email addresses, and phone numbers. While it would be possible to indirectly identify individuals through the combination of multiple data points, the nature and scale of web data makes it difficult to parse such information. In any case, efforts are made to filter or anonymize sensitive data during pre-processing, but some identifiable information may remain in the dataset. **Does the dataset contain data that might be considered sensitive in any way? If so, please provide a description.** Given that the dataset includes web-sourced content and other publicly available documents, instances may inadvertently reveal financial information, health-related details, or forms of government identification, such as social security numbers (Subramani et al., 2023), especially if the content originates from less-regulated sources or user-generated platforms. #### Collection Process **How was the data collected?** This dataset is constituted by combining several sources, whose acquisition methods can be classified into three groups: - Web-sourced datasets with some preprocessing available under permissive license (p.e. Common Crawl). - Domain-specific or language-specific raw crawls (p.e. Spanish Crawling). - Manually curated data obtained through collaborators, data providers (by means of legal assignment agreements) or open source projects (p.e. CATalog). **What mechanisms or procedures were used to collect the data? How were these mechanisms or procedures validated?** According to the three groups previously defined, these are the mechanisms used in each of them: - Open direct download. Validation: data integrity tests. - Ad-hoc scrapers or crawlers. Validation: software unit and data integrity tests. - Direct download via FTP, SFTP, API or S3. Validation: data integrity tests. **If the dataset is a sample from a larger set, what was the sampling strategy?** The sampling strategy was to use the whole dataset resulting from the filtering explained in the ‘preprocessing/cleaning/labelling’ section, with the particularity that an upsampling of 2 (i.e. twice the probability of sampling a document) was performed for the co-official languages of Spain (Spanish, Catalan, Galician, Basque), and a downsampling of 1/2 was applied for code (half the probability of sampling a code document, evenly distributed among all programming languages). **Who was involved in the data collection process and how were they compensated?** This data is generally extracted, filtered and sampled by automated processes. The code required to run these processes has been developed entirely by members of the LangTech data team, or otherwise obtained from open-source software. Furthermore, there has been no monetary consideration for acquiring data from suppliers. **Over what timeframe was the data collected? Does this timeframe match the creation timeframe of the data associated with the instances? If not, please describe the timeframe in which the data associated with the instances was created.** Data were acquired and processed from April 2023 to April 2024. However, as mentioned, much data has been obtained from open projects such as Common Crawl, which contains data from 2014, so it is the end date (04/2024) rather than the start date that is important. **Were any ethical review processes conducted? If so, please provide a description of these review processes, including the outcomes, as well as a link or other access point to any supporting documentation.** No particular ethical review process has been carried out as the data is mostly open and not particularly sensitive. However, we have an internal evaluation team and a bias team to monitor ethical issues. In addition, we work closely with ‘Observatori d'Ètica en Intel·ligència Artificial’ (OEIAC) and ‘Agencia Española de Supervisión de la Inteligencia Artificial’ (AESIA) to audit the processes we carry out from an ethical and legal point of view, respectively. #### Preprocessing **Was any preprocessing/cleaning/labeling of the data done? If so, please provide a description. If not, you may skip the remaining questions in this section.** Instances of text documents were not altered, but web-sourced documents were filtered based on specific criteria along two dimensions: - Quality: documents with a score lower than 0.8, based on undesired qualities, such as documents with low number of lines, very short sentences, presence of long footers and headers, and high percentage of punctuation, obtained through CURATE (Palomar-Giner et al., 2024) were filtered out. - Harmful or adult content: documents originating from Colossal OSCAR were filtered using LLM-Datasets (Ostendorff et al., 2024) based on the perplexity from a language model (‘harmful_pp’ field) provided by the Ungoliant pipeline (Abadji et al., 2021). **Was the “raw” data saved in addition to the preprocessed/cleaned/labeled data? If so, please provide a link or other access point to the “raw” data.** The original raw data was not kept. **Is the software that was used to preprocess/clean/label the data available? If so, please provide a link or other access point.** Yes, the preprocessing and filtering software is open-sourced. The [CURATE](https://github.com/langtech-bsc/CURATE) pipeline was used for Spanish Crawling and CATalog, and the [Ungoliant](https://github.com/oscar-project/ungoliant) pipeline was used for the OSCAR project. #### Uses **Has the dataset been used for any tasks already? If so, please provide a description.** Pre-train the Salamandra model family. **What (other) tasks could the dataset be used for?** The data can be used primarily to pre-train other language models, which can then be used for a wide range of use cases. The dataset could also be used for other tasks such as fine-tuning language models, cross-lingual NLP tasks, machine translation, domain-specific text generation, and language-specific data analysis. **Is there anything about the composition of the dataset or the way it was collected and preprocessed/cleaned/labeled that might impact future uses? Is there anything a dataset consumer could do to mitigate these risks or harms?** Web-crawled content is over-represented with standard language varieties, impacting language model performance for minority languages. Language diversity in data is crucial to avoid bias, especially in encoding non-standard dialects, preventing the exclusion of demographic groups. Moreover, despite legal uncertainties in web-scraped data, we prioritize permissive licenses and privacy protection measures, acknowledging the challenges posed by personally identifiable information (PII) within large-scale datasets. Our ongoing efforts aim to address privacy concerns and contribute to a more inclusive linguistic dataset. **Are there tasks for which the dataset should not be used?** - #### Distribution **Will the dataset be distributed to third parties outside of the entity on behalf of which the dataset was created? If so, please provide a description.** The dataset will not be released or distributed to third parties. Any related question to distribution is omitted in this section. #### Maintenance **Who will be supporting/hosting/maintaining the dataset?** The dataset will be hosted by the Language Technologies unit (LangTech) of the Barcelona Supercomputing Center (BSC). The team will ensure regular updates and monitor the dataset for any issues related to content integrity, legal compliance, and bias for the sources they are responsible for. **How can the owner/curator/manager of the dataset be contacted?** The data owner may be contacted with the email address [email protected]. **Will the dataset be updated?** The dataset will not be updated. **If the dataset relates to people, are there applicable limits on the retention of the data associated with the instances? If so, please describe these limits and explain how they will be enforced.** The dataset does not keep sensitive data that could allow direct identification of individuals, apart from the data that is publicly available in web-sourced content. Due to the sheer volume and diversity of web data, it is not feasible to notify individuals or manage data retention on an individual basis. However, efforts are made to mitigate the risks associated with sensitive information through pre-processing and filtering to remove identifiable or harmful content. Despite these measures, vigilance is maintained to address potential privacy and ethical issues. **Will older versions of the dataset continue to be supported/hosted/maintained? If so, please describe how. If not, please describe how its obsolescence will be communicated to dataset consumers.** Since the dataset will not be updated, only the final version will be kept. **If others want to extend/augment/build on/contribute to the dataset, is there a mechanism for them to do so?** The dataset does not allow for external contributions. </details> --- ## Evaluation Evaluation is done using the Language Model Evaluation Harness (Gao et al., 2024). We evaluate on a set of tasks taken from [SpanishBench](https://github.com/EleutherAI/lm-evaluation-harness/tree/main/lm_eval/tasks/spanish_bench), [CatalanBench](https://github.com/EleutherAI/lm-evaluation-harness/tree/main/lm_eval/tasks/catalan_bench), [BasqueBench](https://github.com/EleutherAI/lm-evaluation-harness/tree/main/lm_eval/tasks/basque_bench) and [GalicianBench](https://github.com/EleutherAI/lm-evaluation-harness/tree/main/lm_eval/tasks/galician_bench). We also use English tasks already available on the LM Evaluation Harness. These benchmarks include both new and existing tasks and datasets. In the tables below, we include the results in a selection of evaluation datasets that represent model's performance across a variety of tasks within these benchmarks. We only use tasks that are either human generated, human translated, or with a strong human-in-the-loop (i.e., machine translation followed by professional revision or machine generation followed by human revision and annotation). This is the reason behind the variety in number of tasks reported across languages. As more tasks that fulfill these requirements are published, we will update the presented results. We also intend to expand the evaluation to other languages, as long as the datasets meet our quality standards. During the implementation of the evaluation we observed a series of issues worth considering when replicating and interpreting the results presented. These issues include ≈1.5% variances in performance in some tasks depending on the version of the `transformers` library used, and depending on the use (or lack of use) of tensor parallelism when loading a model. When implementing existing tasks, we carry out a comprehensive quality evaluation of the dataset, the Harness task itself, and what kind of input models see during evaluation. Our implementation (see links above) addresses multiple existing problems such as errors in datasets and prompts, and lack of pre-processing. All this means that results will vary if using other Harness implementations, and may slightly vary depending on the replication setup. It should be noted that these results are subject to all the drawbacks of every current gold-standard evaluation, and that the figures do not fully represent the models capabilities and potential. We thus advise caution when reading and interpreting the results. A full list of results compared to other baselines, a discussion of the model's performance across tasks and its implications, and details regarding problem-solving with task implementation will soon be available in the technical report. All results reported below are on a 5-shot setting. #### Spanish <table><thead> <tr> <th>Category</th> <th>Task</th> <th>Metric</th> <th>Result</th> </tr></thead> <tbody> <tr> <td>Commonsense Reasoning</td> <td>xstorycloze_es</td> <td>acc</td> <td>64.92</td> </tr> <tr> <td rowspan="2">NLI</td> <td>wnli_es</td> <td>acc</td> <td>54.93</td> </tr> <tr> <td>xnli_es</td> <td>acc</td> <td>44.98</td> </tr> <tr> <td>Paraphrasing</td> <td>paws_es</td> <td>acc</td> <td>52.05</td> </tr> <tr> <td>QA</td> <td>xquad_es</td> <td>acc</td> <td>54.32</td> </tr> <tr> <td>Translation</td> <td>flores_es</td> <td>bleu</td> <td>11.46</td> </tr> </tbody> </table> #### Catalan <table><thead> <tr> <th>Category</th> <th>Task</th> <th>Metric</th> <th>Result</th> </tr></thead> <tbody> <tr> <td rowspan="2">Commonsense Reasoning</td> <td>copa_ca</td> <td>acc</td> <td>68.80</td> </tr> <tr> <td>xstorycloze_ca</td> <td>acc</td> <td>65.72</td> </tr> <tr> <td rowspan="2">NLI</td> <td>wnli_ca</td> <td>acc</td> <td>56.34</td> </tr> <tr> <td>xnli_ca</td> <td>acc</td> <td>48.07</td> </tr> <tr> <td rowspan="2">Paraphrasing</td> <td>parafraseja</td> <td>acc</td> <td>58.55</td> </tr> <tr> <td>paws_ca</td> <td>acc</td> <td>55.15</td> </tr> <tr> <td rowspan="5">QA</td> <td>arc_ca_easy</td> <td>acc</td> <td>54.76</td> </tr> <tr> <td>arc_ca_challenge</td> <td>acc</td> <td>30.55</td> </tr> <tr> <td>openbookqa_ca</td> <td>acc</td> <td>27.40</td> </tr> <tr> <td>piqa_ca</td> <td>acc</td> <td>62.89</td> </tr> <tr> <td>siqa_ca</td> <td>acc</td> <td>41.91</td> </tr> <tr> <td>Translation</td> <td>flores_ca</td> <td>bleu</td> <td>14.70</td> </tr> </tbody></table> #### Basque <table><thead> <tr> <th>Category</th> <th>Task</th> <th>Metric</th> <th>Result</th> </tr></thead> <tbody> <tr> <td rowspan="2">Commonsense Reasoning</td> <td>xcopa_eu</td> <td>acc</td> <td>55.60</td> </tr> <tr> <td>xstorycloze_eu</td> <td>acc</td> <td>57.64</td> </tr> <tr> <td rowspan="2">NLI</td> <td>wnli_eu</td> <td>acc</td> <td>56.34</td> </tr> <tr> <td>xnli_eu</td> <td>acc</td> <td>39.78</td> </tr> <tr> <td rowspan="3">QA</td> <td>eus_exams</td> <td>acc</td> <td>23.72</td> </tr> <tr> <td>eus_proficiency</td> <td>acc</td> <td>23.37</td> </tr> <tr> <td>eus_trivia</td> <td>acc</td> <td>27.58</td> </tr> <tr> <td>Reading Comprehension</td> <td>eus_reading</td> <td>acc</td> <td>27.84</td> </tr> <tr> <td>Translation</td> <td>flores_eu</td> <td>bleu</td> <td>3.58</td> </tr> </tbody></table> #### Galician <table><thead> <tr> <th>Category</th> <th>Task</th> <th>Metric</th> <th>Result</th> </tr></thead> <tbody> <tr> <td rowspan="2">Paraphrasing</td> <td>parafrases_gl</td> <td>acc</td> <td>54.08</td> </tr> <tr> <td>paws_gl</td> <td>acc</td> <td>53.30</td> </tr> <tr> <td>QA</td> <td>openbookqa_gl</td> <td>acc</td> <td>30.80</td> </tr> <tr> <td>Translation</td> <td>flores_gl</td> <td>bleu</td> <td>12.86</td> </tr> </tbody> </table> #### English <table><thead> <tr> <th>Category</th> <th>Task</th> <th>Metric</th> <th>Result</th> </tr></thead> <tbody> <tr> <td rowspan="2">Commonsense Reasoning</td> <td>copa</td> <td>acc</td> <td>83.00</td> </tr> <tr> <td>xstorycloze_en</td> <td>acc</td> <td>73.06</td> </tr> <tr> <td rowspan="2">NLI</td> <td>wnli</td> <td>acc</td> <td>56.34</td> </tr> <tr> <td>xnli_en</td> <td>acc</td> <td>47.35</td> </tr> <tr> <td>Paraphrasing</td> <td>paws *</td> <td>acc</td> <td>55.95</td> </tr> <tr> <td rowspan="6">QA</td> <td>arc_easy</td> <td>acc</td> <td>74.07</td> </tr> <tr> <td>arc_challenge</td> <td>acc</td> <td>37.63</td> </tr> <tr> <td>openbookqa</td> <td>acc</td> <td>28.00</td> </tr> <tr> <td>piqa</td> <td>acc</td> <td>74.86</td> </tr> <tr> <td>social_iqa</td> <td>acc</td> <td>46.62</td> </tr> <tr> <td>squad_en **</td> <td>acc</td> <td>44.38</td> </tr> </tbody></table> \* Current LM Evaluation Harness implementation is lacking correct pre-processing. These results are obtained with adequate pre-processing. \*\* This task is not yet available in the official Harness, we hope to add it soon. --- ## Ethical Considerations and Limitations We examine the presence of undesired societal and cognitive biases present in this model using different benchmarks. For societal biases, we test performance using the BBQ dataset (Parrish et al., 2022) in the original English and the Regard dataset (Sheng et al., 2019). We report inadequate accuracies in both ambiguous and disambiguated contexts, which is indicative of the presence of societal biases which need to be addressed in post-training phases. Our cognitive bias analysis focuses on positional effects in 0-shot settings, and majority class bias in few-shot settings. For positional effects, we leverage the ARC Multiple Choice Question dataset (Clark et al., 2018). We observe moderate to strong to very strong primacy effects, whereby the model shows a preference for answers towards the beginning of the list of provided answers. We measure effects of majority class effects in few-shot settings using SST-2 (Socher et al., 2013). We detect moderate effects, implying that outputs can be influenced by the prompts. Our analyses of these biases are by no means exhaustive and are limited by the relative scarcity of adequate resources in all languages present in the training data. We aim to gradually extend and expand our analyses in future work. We highlight that these results can be expected from a pretrained model that has not yet been instruction-tuned or aligned. These tests are performed in order to show the biases the model may contain. We urge developers to take them into account and perform safety testing and tuning tailored to their specific applications of the model. --- ## Additional information ### Author The Language Technologies Unit from Barcelona Supercomputing Center. ### Contact For further information, please send an email to <[email protected]>. ### Copyright Copyright(c) 2024 by Language Technologies Unit, Barcelona Supercomputing Center. ### Funding This work has been promoted and financed by the Government of Catalonia through the [Aina Project](https://projecteaina.cat/). This work is funded by the _Ministerio para la Transformación Digital y de la Función Pública_ - Funded by EU – NextGenerationEU within the framework of [ILENIA Project](https://proyectoilenia.es/) with reference 2022/TL22/00215337. ### Acknowledgements This project has benefited from the contributions of numerous teams and institutions, mainly through data contributions, knowledge transfer or technical support. In Catalonia, many institutions have been involved in the project. Our thanks to Òmnium Cultural, Parlament de Catalunya, Institut d'Estudis Aranesos, Racó Català, Vilaweb, ACN, Nació Digital, El món and Aquí Berguedà. At national level, we are especially grateful to our ILENIA project partners: CENID, HiTZ and CiTIUS for their participation. We also extend our genuine gratitude to the Spanish Senate and Congress, Fundación Dialnet, Fundación Elcano and the ‘Instituto Universitario de Sistemas Inteligentes y Aplicaciones Numéricas en Ingeniería (SIANI)’ of the University of Las Palmas de Gran Canaria. At the international level, we thank the Welsh government, DFKI, Occiglot project, especially Malte Ostendorff, and The Common Crawl Foundation, especially Pedro Ortiz, for their collaboration. We would also like to give special thanks to the NVIDIA team, with whom we have met regularly, specially to: Ignacio Sarasua, Adam Henryk Grzywaczewski, Oleg Sudakov, Sergio Perez, Miguel Martinez, Felipes Soares and Meriem Bendris. Their constant support has been especially appreciated throughout the entire process. Their valuable efforts have been instrumental in the development of this work. ### Disclaimer Be aware that the model may contain biases or other unintended distortions. When third parties deploy systems or provide services based on this model, or use the model themselves, they bear the responsibility for mitigating any associated risks and ensuring compliance with applicable regulations, including those governing the use of Artificial Intelligence. The Barcelona Supercomputing Center, as the owner and creator of the model, shall not be held liable for any outcomes resulting from third-party use. ### Citation Technical report and paper coming soon. ### License [Apache License, Version 2.0](https://www.apache.org/licenses/LICENSE-2.0) ## Model Index |Model|Base|Instruct| |:---:|:---:|:---:| |2B| [Link](https://huggingface.co/BSC-LT/salamandra-2b) | [Link](https://huggingface.co/BSC-LT/salamandra-2b-instruct) | |7B| [Link](https://huggingface.co/BSC-LT/salamandra-7b) | [Link](https://huggingface.co/BSC-LT/salamandra-7b-instruct) | |40B| WiP | WiP |
[ "QUESTION_ANSWERING", "TRANSLATION", "SUMMARIZATION", "PARAPHRASING" ]
[ "BEAR", "SCIELO" ]
sschet/bert-large-uncased_med-ner
sschet
token-classification
[ "transformers", "pytorch", "jax", "bert", "token-classification", "en", "dataset:tner/bc5cdr", "dataset:commanderstrife/jnlpba", "dataset:bc2gm_corpus", "dataset:drAbreu/bc4chemd_ner", "dataset:linnaeus", "dataset:chintagunta85/ncbi_disease", "autotrain_compatible", "endpoints_compatible", "region:us" ]
2023-02-01T02:02:56
2023-02-01T03:42:17
129
3
--- datasets: - tner/bc5cdr - commanderstrife/jnlpba - bc2gm_corpus - drAbreu/bc4chemd_ner - linnaeus - chintagunta85/ncbi_disease language: - en --- A Named Entity Recognition model for medication entities (`medication name`, `dosage`, `duration`, `frequency`, `reason`). The model has been trained on the i2b2 (now n2c2) dataset for the 2009 - Medication task. Please visit the n2c2 site to request access to the dataset.
[ "NAMED_ENTITY_RECOGNITION" ]
[ "BC5CDR", "JNLPBA", "LINNAEUS", "NCBI DISEASE" ]
JacopoBandoni/BioBertRelationGenesDiseases
JacopoBandoni
text-classification
[ "transformers", "pytorch", "bert", "text-classification", "license:afl-3.0", "autotrain_compatible", "endpoints_compatible", "region:us" ]
2022-05-02T10:25:29
2022-05-09T09:47:10
128
1
--- license: afl-3.0 widget: - text: The case of a 72-year-old male with @DISEASE$ with poor insulin control (fasting hyperglycemia greater than 180 mg/dl) who had a long-standing polyuric syndrome is here presented. Hypernatremia and plasma osmolality elevated together with a low urinary osmolality led to the suspicion of diabetes insipidus, which was subsequently confirmed by the dehydration test and the administration of @GENE$ sc. example_title: Example 1 - text: Hypernatremia and plasma osmolality elevated together with a low urinary osmolality led to the suspicion of diabetes insipidus, which was subsequently confirmed by the dehydration test and the administration of @GENE$ sc. With 61% increase in the calculated urinary osmolarity one hour post desmopressin s.c., @DISEASE$ was diagnosed. example_title: Example 2 --- The following is a fine-tuning of the BioBert models on the GAD dataset. The model works by masking the gene string with "@GENE$" and the disease string with "@DISEASE$". The output is a text classification that can either be: - "LABEL0" if there is no relation - "LABEL1" if there is a relation.
[ "TEXT_CLASSIFICATION" ]
[ "GAD" ]
minishlab/M2V_base_glove
minishlab
null
[ "model2vec", "safetensors", "embeddings", "static-embeddings", "mteb", "sentence-transformers", "en", "base_model:BAAI/bge-base-en-v1.5", "base_model:finetune:BAAI/bge-base-en-v1.5", "license:mit", "model-index", "region:us" ]
2024-09-19T18:01:04
2025-01-21T19:17:15
128
4
--- base_model: BAAI/bge-base-en-v1.5 language: - en library_name: model2vec license: mit tags: - embeddings - static-embeddings - mteb - sentence-transformers model-index: - name: M2V_base_glove results: - task: type: Classification dataset: name: MTEB AmazonCounterfactualClassification (en-ext) type: mteb/amazon_counterfactual config: en-ext split: test revision: e8379541af4e31359cca9fbcf4b00f2671dba205 metrics: - type: accuracy value: 65.65217391304347 - type: ap value: 17.836356075619893 - type: ap_weighted value: 17.836356075619893 - type: f1 value: 54.37306111606638 - type: f1_weighted value: 72.23675193582666 - type: main_score value: 65.65217391304347 - task: type: Classification dataset: name: MTEB AmazonCounterfactualClassification (en) type: mteb/amazon_counterfactual config: en split: test revision: e8379541af4e31359cca9fbcf4b00f2671dba205 metrics: - type: accuracy value: 67.19402985074628 - type: ap value: 30.94305233770745 - type: ap_weighted value: 30.94305233770745 - type: f1 value: 61.69517242961607 - type: f1_weighted value: 70.41137216914223 - type: main_score value: 67.19402985074628 - task: type: Classification dataset: name: MTEB AmazonPolarityClassification (default) type: mteb/amazon_polarity config: default split: test revision: e2d317d38cd51312af73b3d32a06d1a08b442046 metrics: - type: accuracy value: 68.45135 - type: ap value: 63.48441586885817 - type: ap_weighted value: 63.48441586885817 - type: f1 value: 67.81657156872735 - type: f1_weighted value: 67.81657156872735 - type: main_score value: 68.45135 - task: type: Classification dataset: name: MTEB AmazonReviewsClassification (en) type: mteb/amazon_reviews_multi config: en split: test revision: 1399c76144fd37290681b995c656ef9b2e06e26d metrics: - type: accuracy value: 31.838 - type: f1 value: 31.4067444768528 - type: f1_weighted value: 31.4067444768528 - type: main_score value: 31.838 - task: type: Retrieval dataset: name: MTEB ArguAna (default) type: mteb/arguana config: default split: test revision: c22ab2a51041ffd869aaddef7af8d8215647e41a metrics: - type: main_score value: 33.774 - type: map_at_1 value: 17.212 - type: map_at_10 value: 27.572000000000003 - type: map_at_100 value: 28.756999999999998 - type: map_at_1000 value: 28.826 - type: map_at_20 value: 28.275 - type: map_at_3 value: 24.064 - type: map_at_5 value: 25.909 - type: mrr_at_1 value: 17.56756756756757 - type: mrr_at_10 value: 27.708206326627337 - type: mrr_at_100 value: 28.89290919456729 - type: mrr_at_1000 value: 28.96196792349176 - type: mrr_at_20 value: 28.411079006850485 - type: mrr_at_3 value: 24.182076813655733 - type: mrr_at_5 value: 26.045519203413875 - type: nauc_map_at_1000_diff1 value: 8.485123367352873 - type: nauc_map_at_1000_max value: -0.9193979953494795 - type: nauc_map_at_1000_std value: 15.100068482294574 - type: nauc_map_at_100_diff1 value: 8.519325841728035 - type: nauc_map_at_100_max value: -0.8956256416288586 - type: nauc_map_at_100_std value: 15.147231104798806 - type: nauc_map_at_10_diff1 value: 8.380916599430705 - type: nauc_map_at_10_max value: -0.9917288035736084 - type: nauc_map_at_10_std value: 14.761940291815831 - type: nauc_map_at_1_diff1 value: 9.060503842089553 - type: nauc_map_at_1_max value: -4.8081298761261655 - type: nauc_map_at_1_std value: 11.125316223515181 - type: nauc_map_at_20_diff1 value: 8.516487295524888 - type: nauc_map_at_20_max value: -0.8417277704421139 - type: nauc_map_at_20_std value: 15.101334311163782 - type: nauc_map_at_3_diff1 value: 7.922067336816303 - type: nauc_map_at_3_max value: -2.2211217686219347 - type: nauc_map_at_3_std value: 12.687891894715243 - type: nauc_map_at_5_diff1 value: 7.407423493480417 - type: nauc_map_at_5_max value: -2.4578439857602445 - type: nauc_map_at_5_std value: 13.543477676837792 - type: nauc_mrr_at_1000_diff1 value: 7.318853326158743 - type: nauc_mrr_at_1000_max value: -0.74537688800884 - type: nauc_mrr_at_1000_std value: 14.72062445798488 - type: nauc_mrr_at_100_diff1 value: 7.35529594869805 - type: nauc_mrr_at_100_max value: -0.722109219876811 - type: nauc_mrr_at_100_std value: 14.768290519037613 - type: nauc_mrr_at_10_diff1 value: 7.21492350238724 - type: nauc_mrr_at_10_max value: -0.8670677275648112 - type: nauc_mrr_at_10_std value: 14.38960682092002 - type: nauc_mrr_at_1_diff1 value: 7.5570385779405775 - type: nauc_mrr_at_1_max value: -3.16483196648834 - type: nauc_mrr_at_1_std value: 10.218393597427989 - type: nauc_mrr_at_20_diff1 value: 7.335130620378978 - type: nauc_mrr_at_20_max value: -0.6993053791581448 - type: nauc_mrr_at_20_std value: 14.708624057565162 - type: nauc_mrr_at_3_diff1 value: 6.613680793028679 - type: nauc_mrr_at_3_max value: -2.2272295185954625 - type: nauc_mrr_at_3_std value: 12.388157171198323 - type: nauc_mrr_at_5_diff1 value: 6.212985461013579 - type: nauc_mrr_at_5_max value: -2.338470059431682 - type: nauc_mrr_at_5_std value: 13.240099646562145 - type: nauc_ndcg_at_1000_diff1 value: 8.840275201346746 - type: nauc_ndcg_at_1000_max value: 0.6647417450383817 - type: nauc_ndcg_at_1000_std value: 17.8564730922891 - type: nauc_ndcg_at_100_diff1 value: 9.762676212675903 - type: nauc_ndcg_at_100_max value: 1.4262377536189703 - type: nauc_ndcg_at_100_std value: 19.4795643393269 - type: nauc_ndcg_at_10_diff1 value: 9.188741734944518 - type: nauc_ndcg_at_10_max value: 1.3802584933742896 - type: nauc_ndcg_at_10_std value: 17.506067996460327 - type: nauc_ndcg_at_1_diff1 value: 9.060503842089553 - type: nauc_ndcg_at_1_max value: -4.8081298761261655 - type: nauc_ndcg_at_1_std value: 11.125316223515181 - type: nauc_ndcg_at_20_diff1 value: 9.746204603745053 - type: nauc_ndcg_at_20_max value: 1.788309512869953 - type: nauc_ndcg_at_20_std value: 18.9423764949264 - type: nauc_ndcg_at_3_diff1 value: 7.774791913420696 - type: nauc_ndcg_at_3_max value: -1.597066965567201 - type: nauc_ndcg_at_3_std value: 13.176494210176115 - type: nauc_ndcg_at_5_diff1 value: 6.842522112636893 - type: nauc_ndcg_at_5_max value: -1.973068438869888 - type: nauc_ndcg_at_5_std value: 14.57209872417026 - type: nauc_precision_at_1000_diff1 value: -3.0834719469656173 - type: nauc_precision_at_1000_max value: 14.451702830586255 - type: nauc_precision_at_1000_std value: 54.77279828687448 - type: nauc_precision_at_100_diff1 value: 18.013952140209113 - type: nauc_precision_at_100_max value: 13.365490775657305 - type: nauc_precision_at_100_std value: 46.774497474558906 - type: nauc_precision_at_10_diff1 value: 12.087879006855367 - type: nauc_precision_at_10_max value: 8.577753066338223 - type: nauc_precision_at_10_std value: 25.83055948986621 - type: nauc_precision_at_1_diff1 value: 9.060503842089553 - type: nauc_precision_at_1_max value: -4.8081298761261655 - type: nauc_precision_at_1_std value: 11.125316223515181 - type: nauc_precision_at_20_diff1 value: 14.837517107092523 - type: nauc_precision_at_20_max value: 10.825098623940823 - type: nauc_precision_at_20_std value: 33.07428383738506 - type: nauc_precision_at_3_diff1 value: 7.454307392090738 - type: nauc_precision_at_3_max value: -0.07600576651425912 - type: nauc_precision_at_3_std value: 14.401150268962715 - type: nauc_precision_at_5_diff1 value: 5.3722482323229945 - type: nauc_precision_at_5_max value: -0.8401775506949162 - type: nauc_precision_at_5_std value: 17.210282537073585 - type: nauc_recall_at_1000_diff1 value: -3.0834719469653953 - type: nauc_recall_at_1000_max value: 14.451702830586296 - type: nauc_recall_at_1000_std value: 54.77279828687437 - type: nauc_recall_at_100_diff1 value: 18.013952140209057 - type: nauc_recall_at_100_max value: 13.365490775657346 - type: nauc_recall_at_100_std value: 46.77449747455887 - type: nauc_recall_at_10_diff1 value: 12.08787900685538 - type: nauc_recall_at_10_max value: 8.577753066338186 - type: nauc_recall_at_10_std value: 25.830559489866182 - type: nauc_recall_at_1_diff1 value: 9.060503842089553 - type: nauc_recall_at_1_max value: -4.8081298761261655 - type: nauc_recall_at_1_std value: 11.125316223515181 - type: nauc_recall_at_20_diff1 value: 14.837517107092587 - type: nauc_recall_at_20_max value: 10.825098623940837 - type: nauc_recall_at_20_std value: 33.07428383738506 - type: nauc_recall_at_3_diff1 value: 7.45430739209076 - type: nauc_recall_at_3_max value: -0.07600576651424053 - type: nauc_recall_at_3_std value: 14.401150268962763 - type: nauc_recall_at_5_diff1 value: 5.372248232322972 - type: nauc_recall_at_5_max value: -0.8401775506949434 - type: nauc_recall_at_5_std value: 17.210282537073567 - type: ndcg_at_1 value: 17.212 - type: ndcg_at_10 value: 33.774 - type: ndcg_at_100 value: 39.648 - type: ndcg_at_1000 value: 41.557 - type: ndcg_at_20 value: 36.317 - type: ndcg_at_3 value: 26.439 - type: ndcg_at_5 value: 29.787000000000003 - type: precision_at_1 value: 17.212 - type: precision_at_10 value: 5.377 - type: precision_at_100 value: 0.814 - type: precision_at_1000 value: 0.097 - type: precision_at_20 value: 3.19 - type: precision_at_3 value: 11.119 - type: precision_at_5 value: 8.307 - type: recall_at_1 value: 17.212 - type: recall_at_10 value: 53.769999999999996 - type: recall_at_100 value: 81.437 - type: recall_at_1000 value: 96.65700000000001 - type: recall_at_20 value: 63.798 - type: recall_at_3 value: 33.357 - type: recall_at_5 value: 41.536 - task: type: Clustering dataset: name: MTEB ArxivClusteringP2P (default) type: mteb/arxiv-clustering-p2p config: default split: test revision: a122ad7f3f0291bf49cc6f4d32aa80929df69d5d metrics: - type: main_score value: 36.269773496288245 - type: v_measure value: 36.269773496288245 - type: v_measure_std value: 14.198119547704884 - task: type: Clustering dataset: name: MTEB ArxivClusteringS2S (default) type: mteb/arxiv-clustering-s2s config: default split: test revision: f910caf1a6075f7329cdf8c1a6135696f37dbd53 metrics: - type: main_score value: 26.49448515145058 - type: v_measure value: 26.49448515145058 - type: v_measure_std value: 14.782872832774022 - task: type: Reranking dataset: name: MTEB AskUbuntuDupQuestions (default) type: mteb/askubuntudupquestions-reranking config: default split: test revision: 2000358ca161889fa9c082cb41daa8dcfb161a54 metrics: - type: main_score value: 52.88598975544666 - type: map value: 52.88598975544666 - type: mrr value: 67.66906300839818 - type: nAUC_map_diff1 value: 10.49901867802098 - type: nAUC_map_max value: 16.22592076548971 - type: nAUC_map_std value: 8.364041971796572 - type: nAUC_mrr_diff1 value: 9.318385742328429 - type: nAUC_mrr_max value: 25.360931571595074 - type: nAUC_mrr_std value: 10.230339410350053 - task: type: STS dataset: name: MTEB BIOSSES (default) type: mteb/biosses-sts config: default split: test revision: d3fb88f8f02e40887cd149695127462bbcf29b4a metrics: - type: cosine_pearson value: 63.84878372119195 - type: cosine_spearman value: 65.856922281397 - type: euclidean_pearson value: 40.02875369629121 - type: euclidean_spearman value: 49.260760994073486 - type: main_score value: 65.856922281397 - type: manhattan_pearson value: 39.167512785706535 - type: manhattan_spearman value: 49.23786890619668 - type: pearson value: 63.84878372119195 - type: spearman value: 65.856922281397 - task: type: Classification dataset: name: MTEB Banking77Classification (default) type: mteb/banking77 config: default split: test revision: 0fd18e25b25c072e09e0d92ab615fda904d66300 metrics: - type: accuracy value: 72.38961038961038 - type: f1 value: 72.56423030958749 - type: f1_weighted value: 72.5642303095875 - type: main_score value: 72.38961038961038 - task: type: Clustering dataset: name: MTEB BiorxivClusteringP2P (default) type: mteb/biorxiv-clustering-p2p config: default split: test revision: 65b79d1d13f80053f67aca9498d9402c2d9f1f40 metrics: - type: main_score value: 32.25731264202783 - type: v_measure value: 32.25731264202783 - type: v_measure_std value: 0.6180162953967675 - task: type: Clustering dataset: name: MTEB BiorxivClusteringS2S (default) type: mteb/biorxiv-clustering-s2s config: default split: test revision: 258694dd0231531bc1fd9de6ceb52a0853c6d908 metrics: - type: main_score value: 22.338013507954145 - type: v_measure value: 22.338013507954145 - type: v_measure_std value: 0.8858915900286259 - task: type: Retrieval dataset: name: MTEB CQADupstackAndroidRetrieval (default) type: mteb/cqadupstack-android config: default split: test revision: f46a197baaae43b4f621051089b82a364682dfeb metrics: - type: main_score value: 27.422 - type: map_at_1 value: 15.805 - type: map_at_10 value: 22.539 - type: map_at_100 value: 23.580000000000002 - type: map_at_1000 value: 23.724999999999998 - type: map_at_20 value: 22.979 - type: map_at_3 value: 19.824 - type: map_at_5 value: 21.23 - type: mrr_at_1 value: 20.171673819742487 - type: mrr_at_10 value: 27.032552172037143 - type: mrr_at_100 value: 27.87771584885484 - type: mrr_at_1000 value: 27.958914975599175 - type: mrr_at_20 value: 27.424591091295408 - type: mrr_at_3 value: 24.58273724368146 - type: mrr_at_5 value: 25.82021936099191 - type: nauc_map_at_1000_diff1 value: 42.57030843164609 - type: nauc_map_at_1000_max value: 27.21303919157657 - type: nauc_map_at_1000_std value: -4.5260719947191825 - type: nauc_map_at_100_diff1 value: 42.58560170722753 - type: nauc_map_at_100_max value: 27.172621592070005 - type: nauc_map_at_100_std value: -4.608320819640847 - type: nauc_map_at_10_diff1 value: 43.20261782730561 - type: nauc_map_at_10_max value: 27.17733501002295 - type: nauc_map_at_10_std value: -4.853722954829147 - type: nauc_map_at_1_diff1 value: 50.4589534396627 - type: nauc_map_at_1_max value: 31.98243028349231 - type: nauc_map_at_1_std value: -5.994261367708959 - type: nauc_map_at_20_diff1 value: 42.738499762952614 - type: nauc_map_at_20_max value: 27.143975463265175 - type: nauc_map_at_20_std value: -4.806069582811075 - type: nauc_map_at_3_diff1 value: 44.56389669066582 - type: nauc_map_at_3_max value: 26.833926287971416 - type: nauc_map_at_3_std value: -4.955428514290965 - type: nauc_map_at_5_diff1 value: 43.86876315553915 - type: nauc_map_at_5_max value: 27.333186284565176 - type: nauc_map_at_5_std value: -5.074595359564486 - type: nauc_mrr_at_1000_diff1 value: 39.63679192264147 - type: nauc_mrr_at_1000_max value: 26.234117053729133 - type: nauc_mrr_at_1000_std value: -2.3877696058349405 - type: nauc_mrr_at_100_diff1 value: 39.60055271322061 - type: nauc_mrr_at_100_max value: 26.209241967136354 - type: nauc_mrr_at_100_std value: -2.40172379518456 - type: nauc_mrr_at_10_diff1 value: 39.91403030715458 - type: nauc_mrr_at_10_max value: 26.291376019365615 - type: nauc_mrr_at_10_std value: -2.808990142924426 - type: nauc_mrr_at_1_diff1 value: 47.28788038819518 - type: nauc_mrr_at_1_max value: 30.963706202382934 - type: nauc_mrr_at_1_std value: -3.51497869942044 - type: nauc_mrr_at_20_diff1 value: 39.632871640502756 - type: nauc_mrr_at_20_max value: 26.268767712675096 - type: nauc_mrr_at_20_std value: -2.5995012134040913 - type: nauc_mrr_at_3_diff1 value: 41.59291827397769 - type: nauc_mrr_at_3_max value: 26.377970945135985 - type: nauc_mrr_at_3_std value: -2.260424527742146 - type: nauc_mrr_at_5_diff1 value: 40.660417345775265 - type: nauc_mrr_at_5_max value: 26.53119326656918 - type: nauc_mrr_at_5_std value: -2.6138936135502435 - type: nauc_ndcg_at_1000_diff1 value: 38.09235776641414 - type: nauc_ndcg_at_1000_max value: 25.640060639600037 - type: nauc_ndcg_at_1000_std value: -1.0492521706000484 - type: nauc_ndcg_at_100_diff1 value: 37.58032591292403 - type: nauc_ndcg_at_100_max value: 25.227643635602963 - type: nauc_ndcg_at_100_std value: -2.062733211841763 - type: nauc_ndcg_at_10_diff1 value: 39.5902476515199 - type: nauc_ndcg_at_10_max value: 25.54860574123993 - type: nauc_ndcg_at_10_std value: -3.945402600781258 - type: nauc_ndcg_at_1_diff1 value: 47.28788038819518 - type: nauc_ndcg_at_1_max value: 30.963706202382934 - type: nauc_ndcg_at_1_std value: -3.51497869942044 - type: nauc_ndcg_at_20_diff1 value: 38.21420502242327 - type: nauc_ndcg_at_20_max value: 25.36312552066329 - type: nauc_ndcg_at_20_std value: -3.620006678321481 - type: nauc_ndcg_at_3_diff1 value: 41.618842500004114 - type: nauc_ndcg_at_3_max value: 24.49658271374697 - type: nauc_ndcg_at_3_std value: -3.1464626400858737 - type: nauc_ndcg_at_5_diff1 value: 40.62911850945203 - type: nauc_ndcg_at_5_max value: 25.645929097520533 - type: nauc_ndcg_at_5_std value: -3.930292192790889 - type: nauc_precision_at_1000_diff1 value: -11.247759993119494 - type: nauc_precision_at_1000_max value: 0.0520592709173242 - type: nauc_precision_at_1000_std value: 13.875737244571596 - type: nauc_precision_at_100_diff1 value: 2.9396114844829846 - type: nauc_precision_at_100_max value: 12.65311404861249 - type: nauc_precision_at_100_std value: 5.197197717403989 - type: nauc_precision_at_10_diff1 value: 21.598728649828637 - type: nauc_precision_at_10_max value: 19.600344390387036 - type: nauc_precision_at_10_std value: -2.287072109349386 - type: nauc_precision_at_1_diff1 value: 47.28788038819518 - type: nauc_precision_at_1_max value: 30.963706202382934 - type: nauc_precision_at_1_std value: -3.51497869942044 - type: nauc_precision_at_20_diff1 value: 14.033184370220674 - type: nauc_precision_at_20_max value: 18.336361358754594 - type: nauc_precision_at_20_std value: -1.129136759880148 - type: nauc_precision_at_3_diff1 value: 32.877986961799415 - type: nauc_precision_at_3_max value: 18.58314886301541 - type: nauc_precision_at_3_std value: -1.3149473444001074 - type: nauc_precision_at_5_diff1 value: 28.64823897592757 - type: nauc_precision_at_5_max value: 20.1392449105061 - type: nauc_precision_at_5_std value: -2.4972384266998424 - type: nauc_recall_at_1000_diff1 value: 20.162811892550007 - type: nauc_recall_at_1000_max value: 14.914546121550105 - type: nauc_recall_at_1000_std value: 22.679861727471952 - type: nauc_recall_at_100_diff1 value: 21.682602022946543 - type: nauc_recall_at_100_max value: 17.054270054720657 - type: nauc_recall_at_100_std value: 6.873757453857215 - type: nauc_recall_at_10_diff1 value: 31.380594956722373 - type: nauc_recall_at_10_max value: 19.869238680763793 - type: nauc_recall_at_10_std value: -2.3399003157867297 - type: nauc_recall_at_1_diff1 value: 50.4589534396627 - type: nauc_recall_at_1_max value: 31.98243028349231 - type: nauc_recall_at_1_std value: -5.994261367708959 - type: nauc_recall_at_20_diff1 value: 26.8397372221868 - type: nauc_recall_at_20_max value: 19.363005179158783 - type: nauc_recall_at_20_std value: -1.6220262325260055 - type: nauc_recall_at_3_diff1 value: 37.022991018079324 - type: nauc_recall_at_3_max value: 20.02153979328149 - type: nauc_recall_at_3_std value: -2.6647682121076017 - type: nauc_recall_at_5_diff1 value: 34.27903939519203 - type: nauc_recall_at_5_max value: 21.241055817449386 - type: nauc_recall_at_5_std value: -3.4314867128318873 - type: ndcg_at_1 value: 20.172 - type: ndcg_at_10 value: 27.422 - type: ndcg_at_100 value: 32.505 - type: ndcg_at_1000 value: 35.637 - type: ndcg_at_20 value: 28.814 - type: ndcg_at_3 value: 22.977 - type: ndcg_at_5 value: 24.765 - type: precision_at_1 value: 20.172 - type: precision_at_10 value: 5.6370000000000005 - type: precision_at_100 value: 1.027 - type: precision_at_1000 value: 0.161 - type: precision_at_20 value: 3.3259999999999996 - type: precision_at_3 value: 11.493 - type: precision_at_5 value: 8.584 - type: recall_at_1 value: 15.805 - type: recall_at_10 value: 37.374 - type: recall_at_100 value: 60.279 - type: recall_at_1000 value: 81.635 - type: recall_at_20 value: 42.439 - type: recall_at_3 value: 24.2 - type: recall_at_5 value: 29.309 - task: type: Retrieval dataset: name: MTEB CQADupstackEnglishRetrieval (default) type: mteb/cqadupstack-english config: default split: test revision: ad9991cb51e31e31e430383c75ffb2885547b5f0 metrics: - type: main_score value: 22.185 - type: map_at_1 value: 13.771 - type: map_at_10 value: 18.723 - type: map_at_100 value: 19.547 - type: map_at_1000 value: 19.67 - type: map_at_20 value: 19.144 - type: map_at_3 value: 17.039 - type: map_at_5 value: 18.07 - type: mrr_at_1 value: 17.261146496815286 - type: mrr_at_10 value: 22.57607926397735 - type: mrr_at_100 value: 23.328421262845985 - type: mrr_at_1000 value: 23.406708440207726 - type: mrr_at_20 value: 23.00513067729992 - type: mrr_at_3 value: 20.881104033970274 - type: mrr_at_5 value: 21.90976645435243 - type: nauc_map_at_1000_diff1 value: 41.81438335345437 - type: nauc_map_at_1000_max value: 8.562208130739274 - type: nauc_map_at_1000_std value: -1.137157212764164 - type: nauc_map_at_100_diff1 value: 41.85780839439892 - type: nauc_map_at_100_max value: 8.566307619578293 - type: nauc_map_at_100_std value: -1.2129732393356614 - type: nauc_map_at_10_diff1 value: 41.785746693197126 - type: nauc_map_at_10_max value: 8.77020097530979 - type: nauc_map_at_10_std value: -1.860022142851326 - type: nauc_map_at_1_diff1 value: 49.349328412728234 - type: nauc_map_at_1_max value: 9.959808327960705 - type: nauc_map_at_1_std value: -2.4979706379347015 - type: nauc_map_at_20_diff1 value: 41.99867158799419 - type: nauc_map_at_20_max value: 8.630845517982852 - type: nauc_map_at_20_std value: -1.5555880960790722 - type: nauc_map_at_3_diff1 value: 42.8531788404898 - type: nauc_map_at_3_max value: 9.38507401851082 - type: nauc_map_at_3_std value: -2.296896840269839 - type: nauc_map_at_5_diff1 value: 42.12620645186648 - type: nauc_map_at_5_max value: 9.264433745870681 - type: nauc_map_at_5_std value: -2.0693688828997736 - type: nauc_mrr_at_1000_diff1 value: 40.29012154388628 - type: nauc_mrr_at_1000_max value: 8.779701545657264 - type: nauc_mrr_at_1000_std value: -0.20014917783799155 - type: nauc_mrr_at_100_diff1 value: 40.3006068547429 - type: nauc_mrr_at_100_max value: 8.775743924193097 - type: nauc_mrr_at_100_std value: -0.20828264879030806 - type: nauc_mrr_at_10_diff1 value: 40.33534553416421 - type: nauc_mrr_at_10_max value: 8.981726859310484 - type: nauc_mrr_at_10_std value: -0.5216611931728035 - type: nauc_mrr_at_1_diff1 value: 46.65590153016528 - type: nauc_mrr_at_1_max value: 11.354410377930167 - type: nauc_mrr_at_1_std value: -0.48512368172284914 - type: nauc_mrr_at_20_diff1 value: 40.34786514439957 - type: nauc_mrr_at_20_max value: 8.832294217324495 - type: nauc_mrr_at_20_std value: -0.42924000733933554 - type: nauc_mrr_at_3_diff1 value: 41.28224899603959 - type: nauc_mrr_at_3_max value: 10.003171996897654 - type: nauc_mrr_at_3_std value: -0.8113798290447825 - type: nauc_mrr_at_5_diff1 value: 40.56541714571373 - type: nauc_mrr_at_5_max value: 9.563905395193512 - type: nauc_mrr_at_5_std value: -0.8315502471129665 - type: nauc_ndcg_at_1000_diff1 value: 38.05472732838954 - type: nauc_ndcg_at_1000_max value: 6.7845911459695305 - type: nauc_ndcg_at_1000_std value: 2.2417310333870804 - type: nauc_ndcg_at_100_diff1 value: 38.769913303134494 - type: nauc_ndcg_at_100_max value: 6.98512669077204 - type: nauc_ndcg_at_100_std value: 1.0262609809171577 - type: nauc_ndcg_at_10_diff1 value: 38.908082234801846 - type: nauc_ndcg_at_10_max value: 7.603096791364804 - type: nauc_ndcg_at_10_std value: -1.1550921794586773 - type: nauc_ndcg_at_1_diff1 value: 46.65590153016528 - type: nauc_ndcg_at_1_max value: 11.354410377930167 - type: nauc_ndcg_at_1_std value: -0.48512368172284914 - type: nauc_ndcg_at_20_diff1 value: 39.465569854802325 - type: nauc_ndcg_at_20_max value: 7.154863969387037 - type: nauc_ndcg_at_20_std value: -0.6152305686970557 - type: nauc_ndcg_at_3_diff1 value: 40.30563509474192 - type: nauc_ndcg_at_3_max value: 9.303308928291493 - type: nauc_ndcg_at_3_std value: -1.7310855429382492 - type: nauc_ndcg_at_5_diff1 value: 39.43089993856754 - type: nauc_ndcg_at_5_max value: 8.684101391653703 - type: nauc_ndcg_at_5_std value: -1.5609939178898662 - type: nauc_precision_at_1000_diff1 value: 3.0018103428187315 - type: nauc_precision_at_1000_max value: -0.12486785354520373 - type: nauc_precision_at_1000_std value: 16.595960891881056 - type: nauc_precision_at_100_diff1 value: 15.783807114606802 - type: nauc_precision_at_100_max value: 2.2692493411585826 - type: nauc_precision_at_100_std value: 12.367040550680183 - type: nauc_precision_at_10_diff1 value: 26.23103176130776 - type: nauc_precision_at_10_max value: 5.077361697939634 - type: nauc_precision_at_10_std value: 3.2548036883456657 - type: nauc_precision_at_1_diff1 value: 46.65590153016528 - type: nauc_precision_at_1_max value: 11.354410377930167 - type: nauc_precision_at_1_std value: -0.48512368172284914 - type: nauc_precision_at_20_diff1 value: 24.983738615009624 - type: nauc_precision_at_20_max value: 3.095779692318981 - type: nauc_precision_at_20_std value: 6.526918452724511 - type: nauc_precision_at_3_diff1 value: 32.03964896171193 - type: nauc_precision_at_3_max value: 10.000197471378979 - type: nauc_precision_at_3_std value: -0.3781576907697181 - type: nauc_precision_at_5_diff1 value: 29.031758722891198 - type: nauc_precision_at_5_max value: 8.97944054772189 - type: nauc_precision_at_5_std value: 0.5467561737293052 - type: nauc_recall_at_1000_diff1 value: 22.648641936528087 - type: nauc_recall_at_1000_max value: -0.43222220598242816 - type: nauc_recall_at_1000_std value: 16.242047878703833 - type: nauc_recall_at_100_diff1 value: 29.817588639346766 - type: nauc_recall_at_100_max value: 2.582453220704315 - type: nauc_recall_at_100_std value: 6.976670600001465 - type: nauc_recall_at_10_diff1 value: 32.0508451306858 - type: nauc_recall_at_10_max value: 4.398320289922377 - type: nauc_recall_at_10_std value: -1.1675629134288315 - type: nauc_recall_at_1_diff1 value: 49.349328412728234 - type: nauc_recall_at_1_max value: 9.959808327960705 - type: nauc_recall_at_1_std value: -2.4979706379347015 - type: nauc_recall_at_20_diff1 value: 33.11704220737327 - type: nauc_recall_at_20_max value: 3.1705964314148267 - type: nauc_recall_at_20_std value: 0.2300386402818066 - type: nauc_recall_at_3_diff1 value: 35.99265552818323 - type: nauc_recall_at_3_max value: 7.869051232744449 - type: nauc_recall_at_3_std value: -2.358653198443007 - type: nauc_recall_at_5_diff1 value: 33.520063454248074 - type: nauc_recall_at_5_max value: 6.8093745620610395 - type: nauc_recall_at_5_std value: -1.7390155063380721 - type: ndcg_at_1 value: 17.261000000000003 - type: ndcg_at_10 value: 22.185 - type: ndcg_at_100 value: 26.107000000000003 - type: ndcg_at_1000 value: 29.071 - type: ndcg_at_20 value: 23.56 - type: ndcg_at_3 value: 19.333 - type: ndcg_at_5 value: 20.807000000000002 - type: precision_at_1 value: 17.261000000000003 - type: precision_at_10 value: 4.223 - type: precision_at_100 value: 0.783 - type: precision_at_1000 value: 0.128 - type: precision_at_20 value: 2.608 - type: precision_at_3 value: 9.299 - type: precision_at_5 value: 6.854 - type: recall_at_1 value: 13.771 - type: recall_at_10 value: 28.508 - type: recall_at_100 value: 45.863 - type: recall_at_1000 value: 66.604 - type: recall_at_20 value: 33.588 - type: recall_at_3 value: 20.427999999999997 - type: recall_at_5 value: 24.357 - task: type: Retrieval dataset: name: MTEB CQADupstackGamingRetrieval (default) type: mteb/cqadupstack-gaming config: default split: test revision: 4885aa143210c98657558c04aaf3dc47cfb54340 metrics: - type: main_score value: 33.79 - type: map_at_1 value: 21.637999999999998 - type: map_at_10 value: 29.212 - type: map_at_100 value: 30.214999999999996 - type: map_at_1000 value: 30.312 - type: map_at_20 value: 29.757 - type: map_at_3 value: 26.734 - type: map_at_5 value: 28.131 - type: mrr_at_1 value: 25.07836990595611 - type: mrr_at_10 value: 32.23615464994772 - type: mrr_at_100 value: 33.086737260738005 - type: mrr_at_1000 value: 33.15229870932955 - type: mrr_at_20 value: 32.7018407151951 - type: mrr_at_3 value: 29.905956112852643 - type: mrr_at_5 value: 31.24137931034478 - type: nauc_map_at_1000_diff1 value: 37.12307495362121 - type: nauc_map_at_1000_max value: 21.537265058555853 - type: nauc_map_at_1000_std value: -8.738060307090839 - type: nauc_map_at_100_diff1 value: 37.1180741454758 - type: nauc_map_at_100_max value: 21.554344473420436 - type: nauc_map_at_100_std value: -8.78869495524838 - type: nauc_map_at_10_diff1 value: 37.171532078470385 - type: nauc_map_at_10_max value: 21.419973328157454 - type: nauc_map_at_10_std value: -9.225483825250098 - type: nauc_map_at_1_diff1 value: 41.21674359277609 - type: nauc_map_at_1_max value: 21.17386538449636 - type: nauc_map_at_1_std value: -10.13071651221397 - type: nauc_map_at_20_diff1 value: 37.07089391994802 - type: nauc_map_at_20_max value: 21.56668913570749 - type: nauc_map_at_20_std value: -9.063862622862095 - type: nauc_map_at_3_diff1 value: 37.685900130415895 - type: nauc_map_at_3_max value: 20.275025161152723 - type: nauc_map_at_3_std value: -10.786471610700463 - type: nauc_map_at_5_diff1 value: 36.8471833508775 - type: nauc_map_at_5_max value: 20.92621364369423 - type: nauc_map_at_5_std value: -9.950094695828529 - type: nauc_mrr_at_1000_diff1 value: 37.38260924638214 - type: nauc_mrr_at_1000_max value: 23.783503700138628 - type: nauc_mrr_at_1000_std value: -8.062115131406841 - type: nauc_mrr_at_100_diff1 value: 37.369442118264715 - type: nauc_mrr_at_100_max value: 23.786993434343938 - type: nauc_mrr_at_100_std value: -8.068423378948197 - type: nauc_mrr_at_10_diff1 value: 37.39001292590747 - type: nauc_mrr_at_10_max value: 23.888309074872616 - type: nauc_mrr_at_10_std value: -8.302475901455704 - type: nauc_mrr_at_1_diff1 value: 42.04523215156183 - type: nauc_mrr_at_1_max value: 24.284081712011343 - type: nauc_mrr_at_1_std value: -9.814660127876252 - type: nauc_mrr_at_20_diff1 value: 37.379334155540214 - type: nauc_mrr_at_20_max value: 23.844473948925106 - type: nauc_mrr_at_20_std value: -8.235356584670322 - type: nauc_mrr_at_3_diff1 value: 38.139923039533954 - type: nauc_mrr_at_3_max value: 23.56622226506994 - type: nauc_mrr_at_3_std value: -9.875475998553846 - type: nauc_mrr_at_5_diff1 value: 37.32472725762185 - type: nauc_mrr_at_5_max value: 23.678357942681288 - type: nauc_mrr_at_5_std value: -8.973665899372584 - type: nauc_ndcg_at_1000_diff1 value: 35.9269587646789 - type: nauc_ndcg_at_1000_max value: 22.032154334522335 - type: nauc_ndcg_at_1000_std value: -4.420257572893553 - type: nauc_ndcg_at_100_diff1 value: 35.70701221495438 - type: nauc_ndcg_at_100_max value: 22.385258261960903 - type: nauc_ndcg_at_100_std value: -5.218237549092405 - type: nauc_ndcg_at_10_diff1 value: 35.84180901292102 - type: nauc_ndcg_at_10_max value: 22.464645985022006 - type: nauc_ndcg_at_10_std value: -7.5341732536415975 - type: nauc_ndcg_at_1_diff1 value: 42.04523215156183 - type: nauc_ndcg_at_1_max value: 24.284081712011343 - type: nauc_ndcg_at_1_std value: -9.814660127876252 - type: nauc_ndcg_at_20_diff1 value: 35.52062094141778 - type: nauc_ndcg_at_20_max value: 22.55317967653313 - type: nauc_ndcg_at_20_std value: -7.110500864173957 - type: nauc_ndcg_at_3_diff1 value: 36.81378575758175 - type: nauc_ndcg_at_3_max value: 20.819587275808576 - type: nauc_ndcg_at_3_std value: -10.624109644518786 - type: nauc_ndcg_at_5_diff1 value: 35.36217863334981 - type: nauc_ndcg_at_5_max value: 21.612788726107834 - type: nauc_ndcg_at_5_std value: -9.18508650489183 - type: nauc_precision_at_1000_diff1 value: 5.772508569767738 - type: nauc_precision_at_1000_max value: 7.590203889721581 - type: nauc_precision_at_1000_std value: 25.20499657865677 - type: nauc_precision_at_100_diff1 value: 17.027746274944796 - type: nauc_precision_at_100_max value: 18.23112402146368 - type: nauc_precision_at_100_std value: 14.975250839963802 - type: nauc_precision_at_10_diff1 value: 27.568104882639886 - type: nauc_precision_at_10_max value: 24.523260535220405 - type: nauc_precision_at_10_std value: -0.7790401720706134 - type: nauc_precision_at_1_diff1 value: 42.04523215156183 - type: nauc_precision_at_1_max value: 24.284081712011343 - type: nauc_precision_at_1_std value: -9.814660127876252 - type: nauc_precision_at_20_diff1 value: 23.61060569911262 - type: nauc_precision_at_20_max value: 23.27474009600092 - type: nauc_precision_at_20_std value: 2.1363983504905684 - type: nauc_precision_at_3_diff1 value: 32.021133529943114 - type: nauc_precision_at_3_max value: 21.951492022009393 - type: nauc_precision_at_3_std value: -9.33081717856222 - type: nauc_precision_at_5_diff1 value: 27.781401018009493 - type: nauc_precision_at_5_max value: 23.00327374589772 - type: nauc_precision_at_5_std value: -5.582376474473184 - type: nauc_recall_at_1000_diff1 value: 28.08463704110158 - type: nauc_recall_at_1000_max value: 14.719308230994152 - type: nauc_recall_at_1000_std value: 31.09066132145234 - type: nauc_recall_at_100_diff1 value: 28.757625108969016 - type: nauc_recall_at_100_max value: 20.69402876399338 - type: nauc_recall_at_100_std value: 10.02186914341548 - type: nauc_recall_at_10_diff1 value: 30.775586269840577 - type: nauc_recall_at_10_max value: 22.4818353459375 - type: nauc_recall_at_10_std value: -3.004399664292814 - type: nauc_recall_at_1_diff1 value: 41.21674359277609 - type: nauc_recall_at_1_max value: 21.17386538449636 - type: nauc_recall_at_1_std value: -10.13071651221397 - type: nauc_recall_at_20_diff1 value: 29.12970422131222 - type: nauc_recall_at_20_max value: 22.211132247666548 - type: nauc_recall_at_20_std value: -1.6807314724407867 - type: nauc_recall_at_3_diff1 value: 33.437878690991376 - type: nauc_recall_at_3_max value: 18.621911570214518 - type: nauc_recall_at_3_std value: -10.670879405179733 - type: nauc_recall_at_5_diff1 value: 30.19398360899056 - type: nauc_recall_at_5_max value: 20.646327147212524 - type: nauc_recall_at_5_std value: -7.5225214344616615 - type: ndcg_at_1 value: 25.078 - type: ndcg_at_10 value: 33.79 - type: ndcg_at_100 value: 38.72 - type: ndcg_at_1000 value: 41.107 - type: ndcg_at_20 value: 35.609 - type: ndcg_at_3 value: 29.096 - type: ndcg_at_5 value: 31.348 - type: precision_at_1 value: 25.078 - type: precision_at_10 value: 5.618 - type: precision_at_100 value: 0.8909999999999999 - type: precision_at_1000 value: 0.117 - type: precision_at_20 value: 3.292 - type: precision_at_3 value: 13.062000000000001 - type: precision_at_5 value: 9.254 - type: recall_at_1 value: 21.637999999999998 - type: recall_at_10 value: 44.968 - type: recall_at_100 value: 67.415 - type: recall_at_1000 value: 84.88799999999999 - type: recall_at_20 value: 51.762 - type: recall_at_3 value: 32.054 - type: recall_at_5 value: 37.677 - task: type: Retrieval dataset: name: MTEB CQADupstackGisRetrieval (default) type: mteb/cqadupstack-gis config: default split: test revision: 5003b3064772da1887988e05400cf3806fe491f2 metrics: - type: main_score value: 13.854 - type: map_at_1 value: 8.437 - type: map_at_10 value: 11.713 - type: map_at_100 value: 12.398000000000001 - type: map_at_1000 value: 12.506999999999998 - type: map_at_20 value: 12.058 - type: map_at_3 value: 10.584 - type: map_at_5 value: 11.12 - type: mrr_at_1 value: 8.926553672316384 - type: mrr_at_10 value: 12.423325262308312 - type: mrr_at_100 value: 13.10586004343873 - type: mrr_at_1000 value: 13.212007969459988 - type: mrr_at_20 value: 12.758616943509635 - type: mrr_at_3 value: 11.224105461393593 - type: mrr_at_5 value: 11.828625235404894 - type: nauc_map_at_1000_diff1 value: 31.895778457147372 - type: nauc_map_at_1000_max value: 17.690415779738842 - type: nauc_map_at_1000_std value: -18.79188186045447 - type: nauc_map_at_100_diff1 value: 31.892654811209773 - type: nauc_map_at_100_max value: 17.621047925604824 - type: nauc_map_at_100_std value: -18.791276567129998 - type: nauc_map_at_10_diff1 value: 33.426989015523645 - type: nauc_map_at_10_max value: 18.050335773420002 - type: nauc_map_at_10_std value: -19.382509111730492 - type: nauc_map_at_1_diff1 value: 42.314161966032856 - type: nauc_map_at_1_max value: 22.207585066404487 - type: nauc_map_at_1_std value: -23.600059254769178 - type: nauc_map_at_20_diff1 value: 32.41734162581042 - type: nauc_map_at_20_max value: 17.85152366175027 - type: nauc_map_at_20_std value: -18.99269556017807 - type: nauc_map_at_3_diff1 value: 35.23676219675338 - type: nauc_map_at_3_max value: 19.07665145397135 - type: nauc_map_at_3_std value: -21.38726052792218 - type: nauc_map_at_5_diff1 value: 33.88523071159954 - type: nauc_map_at_5_max value: 18.023838499714422 - type: nauc_map_at_5_std value: -20.640978226500593 - type: nauc_mrr_at_1000_diff1 value: 30.084485141409704 - type: nauc_mrr_at_1000_max value: 18.463084602140174 - type: nauc_mrr_at_1000_std value: -16.96576220212689 - type: nauc_mrr_at_100_diff1 value: 30.083032361790384 - type: nauc_mrr_at_100_max value: 18.41867896211605 - type: nauc_mrr_at_100_std value: -16.941672717749174 - type: nauc_mrr_at_10_diff1 value: 31.454758915975727 - type: nauc_mrr_at_10_max value: 18.89724676766129 - type: nauc_mrr_at_10_std value: -17.494532807628087 - type: nauc_mrr_at_1_diff1 value: 40.42911498617085 - type: nauc_mrr_at_1_max value: 23.687375206668168 - type: nauc_mrr_at_1_std value: -21.73867940605904 - type: nauc_mrr_at_20_diff1 value: 30.46673282152249 - type: nauc_mrr_at_20_max value: 18.578617566395927 - type: nauc_mrr_at_20_std value: -17.093906397674257 - type: nauc_mrr_at_3_diff1 value: 33.47174891283547 - type: nauc_mrr_at_3_max value: 20.253650649438896 - type: nauc_mrr_at_3_std value: -19.54698186106603 - type: nauc_mrr_at_5_diff1 value: 31.746879870345563 - type: nauc_mrr_at_5_max value: 18.901963239215746 - type: nauc_mrr_at_5_std value: -18.621911662052824 - type: nauc_ndcg_at_1000_diff1 value: 24.09096968865543 - type: nauc_ndcg_at_1000_max value: 15.891636106534374 - type: nauc_ndcg_at_1000_std value: -13.871634842181408 - type: nauc_ndcg_at_100_diff1 value: 24.175105867882852 - type: nauc_ndcg_at_100_max value: 14.17771979280098 - type: nauc_ndcg_at_100_std value: -13.991847290428177 - type: nauc_ndcg_at_10_diff1 value: 29.77008313203033 - type: nauc_ndcg_at_10_max value: 16.49571094148876 - type: nauc_ndcg_at_10_std value: -16.42614748077505 - type: nauc_ndcg_at_1_diff1 value: 40.42911498617085 - type: nauc_ndcg_at_1_max value: 23.687375206668168 - type: nauc_ndcg_at_1_std value: -21.73867940605904 - type: nauc_ndcg_at_20_diff1 value: 26.76029443519322 - type: nauc_ndcg_at_20_max value: 15.74572558341743 - type: nauc_ndcg_at_20_std value: -15.32279872308287 - type: nauc_ndcg_at_3_diff1 value: 32.806913565642375 - type: nauc_ndcg_at_3_max value: 18.45178369596658 - type: nauc_ndcg_at_3_std value: -20.37006496685283 - type: nauc_ndcg_at_5_diff1 value: 30.494877222338364 - type: nauc_ndcg_at_5_max value: 16.541239086008822 - type: nauc_ndcg_at_5_std value: -19.015633388163188 - type: nauc_precision_at_1000_diff1 value: -0.43658726743856746 - type: nauc_precision_at_1000_max value: 17.036247228446616 - type: nauc_precision_at_1000_std value: 3.435494852675229 - type: nauc_precision_at_100_diff1 value: 6.712643480741582 - type: nauc_precision_at_100_max value: 9.614879293703039 - type: nauc_precision_at_100_std value: -3.4126404487749653 - type: nauc_precision_at_10_diff1 value: 21.510457197077496 - type: nauc_precision_at_10_max value: 16.184332818605537 - type: nauc_precision_at_10_std value: -9.294139265690534 - type: nauc_precision_at_1_diff1 value: 40.42911498617085 - type: nauc_precision_at_1_max value: 23.687375206668168 - type: nauc_precision_at_1_std value: -21.73867940605904 - type: nauc_precision_at_20_diff1 value: 13.992219269068753 - type: nauc_precision_at_20_max value: 14.937883960803463 - type: nauc_precision_at_20_std value: -7.104557238331423 - type: nauc_precision_at_3_diff1 value: 26.261517666998035 - type: nauc_precision_at_3_max value: 18.354566058660986 - type: nauc_precision_at_3_std value: -17.341338758596976 - type: nauc_precision_at_5_diff1 value: 21.742341640092196 - type: nauc_precision_at_5_max value: 15.096193569326688 - type: nauc_precision_at_5_std value: -14.266583144611857 - type: nauc_recall_at_1000_diff1 value: 4.2314167922614905 - type: nauc_recall_at_1000_max value: 12.215254827462095 - type: nauc_recall_at_1000_std value: -1.4077735592136236 - type: nauc_recall_at_100_diff1 value: 8.185574219798335 - type: nauc_recall_at_100_max value: 4.897935122753127 - type: nauc_recall_at_100_std value: -4.283502316451027 - type: nauc_recall_at_10_diff1 value: 23.28365482091351 - type: nauc_recall_at_10_max value: 12.121504428933513 - type: nauc_recall_at_10_std value: -10.957020862721302 - type: nauc_recall_at_1_diff1 value: 42.314161966032856 - type: nauc_recall_at_1_max value: 22.207585066404487 - type: nauc_recall_at_1_std value: -23.600059254769178 - type: nauc_recall_at_20_diff1 value: 15.503784848951534 - type: nauc_recall_at_20_max value: 10.217338368574987 - type: nauc_recall_at_20_std value: -8.400517193855757 - type: nauc_recall_at_3_diff1 value: 28.04652701414722 - type: nauc_recall_at_3_max value: 15.164882904887497 - type: nauc_recall_at_3_std value: -19.033698598216844 - type: nauc_recall_at_5_diff1 value: 24.268291218903475 - type: nauc_recall_at_5_max value: 11.923028154467396 - type: nauc_recall_at_5_std value: -16.82567873471909 - type: ndcg_at_1 value: 8.927 - type: ndcg_at_10 value: 13.854 - type: ndcg_at_100 value: 17.7 - type: ndcg_at_1000 value: 21.035 - type: ndcg_at_20 value: 15.059000000000001 - type: ndcg_at_3 value: 11.484 - type: ndcg_at_5 value: 12.437 - type: precision_at_1 value: 8.927 - type: precision_at_10 value: 2.271 - type: precision_at_100 value: 0.44400000000000006 - type: precision_at_1000 value: 0.078 - type: precision_at_20 value: 1.401 - type: precision_at_3 value: 4.934 - type: precision_at_5 value: 3.5479999999999996 - type: recall_at_1 value: 8.437 - type: recall_at_10 value: 19.834 - type: recall_at_100 value: 38.694 - type: recall_at_1000 value: 64.744 - type: recall_at_20 value: 24.429000000000002 - type: recall_at_3 value: 13.361999999999998 - type: recall_at_5 value: 15.540000000000001 - task: type: Retrieval dataset: name: MTEB CQADupstackMathematicaRetrieval (default) type: mteb/cqadupstack-mathematica config: default split: test revision: 90fceea13679c63fe563ded68f3b6f06e50061de metrics: - type: main_score value: 9.837 - type: map_at_1 value: 5.159 - type: map_at_10 value: 7.703 - type: map_at_100 value: 8.426 - type: map_at_1000 value: 8.519 - type: map_at_20 value: 8.047 - type: map_at_3 value: 6.654 - type: map_at_5 value: 7.091 - type: mrr_at_1 value: 6.467661691542288 - type: mrr_at_10 value: 9.478302929795472 - type: mrr_at_100 value: 10.289008336384581 - type: mrr_at_1000 value: 10.371409959945957 - type: mrr_at_20 value: 9.90206244583065 - type: mrr_at_3 value: 8.167495854063018 - type: mrr_at_5 value: 8.776948590381428 - type: nauc_map_at_1000_diff1 value: 20.991965340430756 - type: nauc_map_at_1000_max value: 5.4803410436586315 - type: nauc_map_at_1000_std value: 7.532860192395963 - type: nauc_map_at_100_diff1 value: 20.948048169848562 - type: nauc_map_at_100_max value: 5.423178225119055 - type: nauc_map_at_100_std value: 7.428927233698493 - type: nauc_map_at_10_diff1 value: 21.246222726045804 - type: nauc_map_at_10_max value: 4.63095343363466 - type: nauc_map_at_10_std value: 7.144023053962637 - type: nauc_map_at_1_diff1 value: 30.223652292546266 - type: nauc_map_at_1_max value: 4.553051882134141 - type: nauc_map_at_1_std value: 4.951308605278772 - type: nauc_map_at_20_diff1 value: 20.921761668029813 - type: nauc_map_at_20_max value: 5.030778839822774 - type: nauc_map_at_20_std value: 7.029955811602383 - type: nauc_map_at_3_diff1 value: 23.285924325381803 - type: nauc_map_at_3_max value: 5.75529272678179 - type: nauc_map_at_3_std value: 4.596117470066295 - type: nauc_map_at_5_diff1 value: 22.537257669190947 - type: nauc_map_at_5_max value: 4.121925731323751 - type: nauc_map_at_5_std value: 6.304714061098604 - type: nauc_mrr_at_1000_diff1 value: 19.173759649273233 - type: nauc_mrr_at_1000_max value: 7.621793094874906 - type: nauc_mrr_at_1000_std value: 7.8559620996974004 - type: nauc_mrr_at_100_diff1 value: 19.111228160116582 - type: nauc_mrr_at_100_max value: 7.5928006784641 - type: nauc_mrr_at_100_std value: 7.81950691444481 - type: nauc_mrr_at_10_diff1 value: 19.489946278790672 - type: nauc_mrr_at_10_max value: 6.980975854904637 - type: nauc_mrr_at_10_std value: 7.706339687745954 - type: nauc_mrr_at_1_diff1 value: 27.48148957465694 - type: nauc_mrr_at_1_max value: 7.372706481581169 - type: nauc_mrr_at_1_std value: 6.391416784537868 - type: nauc_mrr_at_20_diff1 value: 19.148965222777782 - type: nauc_mrr_at_20_max value: 7.290887679899084 - type: nauc_mrr_at_20_std value: 7.448183665979951 - type: nauc_mrr_at_3_diff1 value: 21.750601270327905 - type: nauc_mrr_at_3_max value: 8.244444667347075 - type: nauc_mrr_at_3_std value: 4.729668071892326 - type: nauc_mrr_at_5_diff1 value: 20.7897812930415 - type: nauc_mrr_at_5_max value: 6.863327307713806 - type: nauc_mrr_at_5_std value: 6.841304973729449 - type: nauc_ndcg_at_1000_diff1 value: 17.29441255932624 - type: nauc_ndcg_at_1000_max value: 7.9286798648497285 - type: nauc_ndcg_at_1000_std value: 11.877149914393652 - type: nauc_ndcg_at_100_diff1 value: 16.4336463729308 - type: nauc_ndcg_at_100_max value: 8.07229083359491 - type: nauc_ndcg_at_100_std value: 10.34506864310445 - type: nauc_ndcg_at_10_diff1 value: 17.55824567751664 - type: nauc_ndcg_at_10_max value: 4.993609073207455 - type: nauc_ndcg_at_10_std value: 8.781232299164529 - type: nauc_ndcg_at_1_diff1 value: 27.48148957465694 - type: nauc_ndcg_at_1_max value: 7.372706481581169 - type: nauc_ndcg_at_1_std value: 6.391416784537868 - type: nauc_ndcg_at_20_diff1 value: 16.87739691810417 - type: nauc_ndcg_at_20_max value: 6.326711669823591 - type: nauc_ndcg_at_20_std value: 8.193549456385835 - type: nauc_ndcg_at_3_diff1 value: 21.57747982063095 - type: nauc_ndcg_at_3_max value: 7.091503322088235 - type: nauc_ndcg_at_3_std value: 4.157156253951653 - type: nauc_ndcg_at_5_diff1 value: 20.082404601341455 - type: nauc_ndcg_at_5_max value: 4.22584316571604 - type: nauc_ndcg_at_5_std value: 7.054315761638248 - type: nauc_precision_at_1000_diff1 value: 9.317689102874894 - type: nauc_precision_at_1000_max value: 9.58782401785448 - type: nauc_precision_at_1000_std value: 10.64241217084012 - type: nauc_precision_at_100_diff1 value: 10.807229788315885 - type: nauc_precision_at_100_max value: 13.109067404516338 - type: nauc_precision_at_100_std value: 12.652461769792342 - type: nauc_precision_at_10_diff1 value: 11.747684802786821 - type: nauc_precision_at_10_max value: 5.154980926282553 - type: nauc_precision_at_10_std value: 10.96256762400505 - type: nauc_precision_at_1_diff1 value: 27.48148957465694 - type: nauc_precision_at_1_max value: 7.372706481581169 - type: nauc_precision_at_1_std value: 6.391416784537868 - type: nauc_precision_at_20_diff1 value: 10.048919763414146 - type: nauc_precision_at_20_max value: 9.457533080637551 - type: nauc_precision_at_20_std value: 8.38270502134793 - type: nauc_precision_at_3_diff1 value: 17.62648712108384 - type: nauc_precision_at_3_max value: 9.368333317681678 - type: nauc_precision_at_3_std value: 2.831364424989006 - type: nauc_precision_at_5_diff1 value: 15.244543857948454 - type: nauc_precision_at_5_max value: 4.611372441896458 - type: nauc_precision_at_5_std value: 8.947499545370727 - type: nauc_recall_at_1000_diff1 value: 11.860223591226426 - type: nauc_recall_at_1000_max value: 9.065659539526218 - type: nauc_recall_at_1000_std value: 21.369970396825007 - type: nauc_recall_at_100_diff1 value: 9.097923124619061 - type: nauc_recall_at_100_max value: 11.44262240376369 - type: nauc_recall_at_100_std value: 14.733237990671242 - type: nauc_recall_at_10_diff1 value: 11.095059312746661 - type: nauc_recall_at_10_max value: 4.459364478932909 - type: nauc_recall_at_10_std value: 11.13185668334817 - type: nauc_recall_at_1_diff1 value: 30.223652292546266 - type: nauc_recall_at_1_max value: 4.553051882134141 - type: nauc_recall_at_1_std value: 4.951308605278772 - type: nauc_recall_at_20_diff1 value: 10.810802709805385 - type: nauc_recall_at_20_max value: 7.510486361196866 - type: nauc_recall_at_20_std value: 9.447990949397933 - type: nauc_recall_at_3_diff1 value: 17.313057423204715 - type: nauc_recall_at_3_max value: 7.227652377873599 - type: nauc_recall_at_3_std value: 3.091979625029158 - type: nauc_recall_at_5_diff1 value: 15.40727532119762 - type: nauc_recall_at_5_max value: 1.8611986193155992 - type: nauc_recall_at_5_std value: 8.185241357994292 - type: ndcg_at_1 value: 6.468 - type: ndcg_at_10 value: 9.837 - type: ndcg_at_100 value: 13.825000000000001 - type: ndcg_at_1000 value: 16.592000000000002 - type: ndcg_at_20 value: 11.129 - type: ndcg_at_3 value: 7.579 - type: ndcg_at_5 value: 8.355 - type: precision_at_1 value: 6.468 - type: precision_at_10 value: 1.9900000000000002 - type: precision_at_100 value: 0.459 - type: precision_at_1000 value: 0.08 - type: precision_at_20 value: 1.331 - type: precision_at_3 value: 3.566 - type: precision_at_5 value: 2.6870000000000003 - type: recall_at_1 value: 5.159 - type: recall_at_10 value: 14.746 - type: recall_at_100 value: 32.906 - type: recall_at_1000 value: 53.25 - type: recall_at_20 value: 19.439999999999998 - type: recall_at_3 value: 8.584999999999999 - type: recall_at_5 value: 10.446 - task: type: Retrieval dataset: name: MTEB CQADupstackPhysicsRetrieval (default) type: mteb/cqadupstack-physics config: default split: test revision: 79531abbd1fb92d06c6d6315a0cbbbf5bb247ea4 metrics: - type: main_score value: 24.915000000000003 - type: map_at_1 value: 15.504999999999999 - type: map_at_10 value: 20.957 - type: map_at_100 value: 21.917 - type: map_at_1000 value: 22.066 - type: map_at_20 value: 21.443 - type: map_at_3 value: 18.995 - type: map_at_5 value: 20.096 - type: mrr_at_1 value: 19.056785370548603 - type: mrr_at_10 value: 24.990833677070444 - type: mrr_at_100 value: 25.8208720564778 - type: mrr_at_1000 value: 25.912884506186014 - type: mrr_at_20 value: 25.41772037548221 - type: mrr_at_3 value: 22.95476419634265 - type: mrr_at_5 value: 24.11453320500482 - type: nauc_map_at_1000_diff1 value: 38.076585051060675 - type: nauc_map_at_1000_max value: 18.056702089396662 - type: nauc_map_at_1000_std value: -5.671192417504087 - type: nauc_map_at_100_diff1 value: 38.08599722714999 - type: nauc_map_at_100_max value: 17.990649882140914 - type: nauc_map_at_100_std value: -5.757790211650656 - type: nauc_map_at_10_diff1 value: 38.05562481839379 - type: nauc_map_at_10_max value: 17.539054472069946 - type: nauc_map_at_10_std value: -6.387542547194047 - type: nauc_map_at_1_diff1 value: 42.98313461413959 - type: nauc_map_at_1_max value: 21.177240393143098 - type: nauc_map_at_1_std value: -7.143850386196276 - type: nauc_map_at_20_diff1 value: 37.9625452229517 - type: nauc_map_at_20_max value: 17.760857764249888 - type: nauc_map_at_20_std value: -6.1970139184556965 - type: nauc_map_at_3_diff1 value: 39.44820032223843 - type: nauc_map_at_3_max value: 16.7722965995488 - type: nauc_map_at_3_std value: -6.81542895292068 - type: nauc_map_at_5_diff1 value: 38.59443276293579 - type: nauc_map_at_5_max value: 17.371303618685445 - type: nauc_map_at_5_std value: -6.135604805438213 - type: nauc_mrr_at_1000_diff1 value: 37.43089835368739 - type: nauc_mrr_at_1000_max value: 21.04805861047155 - type: nauc_mrr_at_1000_std value: -5.068432531045453 - type: nauc_mrr_at_100_diff1 value: 37.41742475306239 - type: nauc_mrr_at_100_max value: 21.04544732019752 - type: nauc_mrr_at_100_std value: -5.095192190983453 - type: nauc_mrr_at_10_diff1 value: 37.32527292823289 - type: nauc_mrr_at_10_max value: 20.817698975783884 - type: nauc_mrr_at_10_std value: -5.556456776618353 - type: nauc_mrr_at_1_diff1 value: 42.09299252574772 - type: nauc_mrr_at_1_max value: 24.33888118839859 - type: nauc_mrr_at_1_std value: -5.666087824854275 - type: nauc_mrr_at_20_diff1 value: 37.421240074775845 - type: nauc_mrr_at_20_max value: 21.00425959939269 - type: nauc_mrr_at_20_std value: -5.335211771892977 - type: nauc_mrr_at_3_diff1 value: 38.52179702152584 - type: nauc_mrr_at_3_max value: 20.463153588780404 - type: nauc_mrr_at_3_std value: -6.209031923179788 - type: nauc_mrr_at_5_diff1 value: 37.62988493544957 - type: nauc_mrr_at_5_max value: 20.79180521338152 - type: nauc_mrr_at_5_std value: -5.258589248617482 - type: nauc_ndcg_at_1000_diff1 value: 35.73662163419835 - type: nauc_ndcg_at_1000_max value: 19.63564222331479 - type: nauc_ndcg_at_1000_std value: -1.851198141711594 - type: nauc_ndcg_at_100_diff1 value: 36.09210648152838 - type: nauc_ndcg_at_100_max value: 18.917342208415263 - type: nauc_ndcg_at_100_std value: -3.0420576298778355 - type: nauc_ndcg_at_10_diff1 value: 35.95226398653496 - type: nauc_ndcg_at_10_max value: 17.37357287979475 - type: nauc_ndcg_at_10_std value: -6.016002421388863 - type: nauc_ndcg_at_1_diff1 value: 42.09299252574772 - type: nauc_ndcg_at_1_max value: 24.33888118839859 - type: nauc_ndcg_at_1_std value: -5.666087824854275 - type: nauc_ndcg_at_20_diff1 value: 35.840674942997325 - type: nauc_ndcg_at_20_max value: 17.933986692165053 - type: nauc_ndcg_at_20_std value: -5.2137027245505205 - type: nauc_ndcg_at_3_diff1 value: 38.04420087752632 - type: nauc_ndcg_at_3_max value: 17.12908674549184 - type: nauc_ndcg_at_3_std value: -6.5879484556209595 - type: nauc_ndcg_at_5_diff1 value: 36.76262837789462 - type: nauc_ndcg_at_5_max value: 17.602322681433666 - type: nauc_ndcg_at_5_std value: -5.43250263819642 - type: nauc_precision_at_1000_diff1 value: 6.206827402965226 - type: nauc_precision_at_1000_max value: 20.518766519942027 - type: nauc_precision_at_1000_std value: 12.6849839612137 - type: nauc_precision_at_100_diff1 value: 17.95328955808249 - type: nauc_precision_at_100_max value: 24.488415170072823 - type: nauc_precision_at_100_std value: 8.318427798621334 - type: nauc_precision_at_10_diff1 value: 24.95807708093173 - type: nauc_precision_at_10_max value: 21.14345372502348 - type: nauc_precision_at_10_std value: -3.1126086789686704 - type: nauc_precision_at_1_diff1 value: 42.09299252574772 - type: nauc_precision_at_1_max value: 24.33888118839859 - type: nauc_precision_at_1_std value: -5.666087824854275 - type: nauc_precision_at_20_diff1 value: 22.984681114976453 - type: nauc_precision_at_20_max value: 21.853967424797396 - type: nauc_precision_at_20_std value: 0.07620414784835099 - type: nauc_precision_at_3_diff1 value: 31.59484266217764 - type: nauc_precision_at_3_max value: 16.983380178190778 - type: nauc_precision_at_3_std value: -5.539496681361992 - type: nauc_precision_at_5_diff1 value: 27.842741210601368 - type: nauc_precision_at_5_max value: 19.67171996161724 - type: nauc_precision_at_5_std value: -2.6999602559382043 - type: nauc_recall_at_1000_diff1 value: 19.224949841010464 - type: nauc_recall_at_1000_max value: 18.457171603445914 - type: nauc_recall_at_1000_std value: 22.347110023460264 - type: nauc_recall_at_100_diff1 value: 27.573048738296507 - type: nauc_recall_at_100_max value: 15.701035991956289 - type: nauc_recall_at_100_std value: 5.924963398447016 - type: nauc_recall_at_10_diff1 value: 29.69657755110037 - type: nauc_recall_at_10_max value: 12.97000604361471 - type: nauc_recall_at_10_std value: -5.416107045994844 - type: nauc_recall_at_1_diff1 value: 42.98313461413959 - type: nauc_recall_at_1_max value: 21.177240393143098 - type: nauc_recall_at_1_std value: -7.143850386196276 - type: nauc_recall_at_20_diff1 value: 29.040453658640118 - type: nauc_recall_at_20_max value: 14.243344703914374 - type: nauc_recall_at_20_std value: -3.015043773525295 - type: nauc_recall_at_3_diff1 value: 34.83950527042068 - type: nauc_recall_at_3_max value: 11.569623342194008 - type: nauc_recall_at_3_std value: -6.213973770001328 - type: nauc_recall_at_5_diff1 value: 32.204318355138106 - type: nauc_recall_at_5_max value: 13.42199856062887 - type: nauc_recall_at_5_std value: -4.019223300509159 - type: ndcg_at_1 value: 19.057 - type: ndcg_at_10 value: 24.915000000000003 - type: ndcg_at_100 value: 29.858 - type: ndcg_at_1000 value: 33.267 - type: ndcg_at_20 value: 26.544 - type: ndcg_at_3 value: 21.45 - type: ndcg_at_5 value: 23.089000000000002 - type: precision_at_1 value: 19.057 - type: precision_at_10 value: 4.601 - type: precision_at_100 value: 0.859 - type: precision_at_1000 value: 0.133 - type: precision_at_20 value: 2.82 - type: precision_at_3 value: 10.138 - type: precision_at_5 value: 7.430000000000001 - type: recall_at_1 value: 15.504999999999999 - type: recall_at_10 value: 33.052 - type: recall_at_100 value: 55.212 - type: recall_at_1000 value: 78.97 - type: recall_at_20 value: 38.865 - type: recall_at_3 value: 23.125 - type: recall_at_5 value: 27.357 - task: type: Retrieval dataset: name: MTEB CQADupstackProgrammersRetrieval (default) type: mteb/cqadupstack-programmers config: default split: test revision: 6184bc1440d2dbc7612be22b50686b8826d22b32 metrics: - type: main_score value: 16.933 - type: map_at_1 value: 9.391 - type: map_at_10 value: 13.785 - type: map_at_100 value: 14.832999999999998 - type: map_at_1000 value: 14.97 - type: map_at_20 value: 14.299999999999999 - type: map_at_3 value: 12.192 - type: map_at_5 value: 13.104 - type: mrr_at_1 value: 11.986301369863012 - type: mrr_at_10 value: 16.76329093281149 - type: mrr_at_100 value: 17.779884512390392 - type: mrr_at_1000 value: 17.882629462505502 - type: mrr_at_20 value: 17.298602789280416 - type: mrr_at_3 value: 15.106544901065444 - type: mrr_at_5 value: 16.059741248097417 - type: nauc_map_at_1000_diff1 value: 36.119633695463484 - type: nauc_map_at_1000_max value: 17.825358789806963 - type: nauc_map_at_1000_std value: -2.678306157407544 - type: nauc_map_at_100_diff1 value: 36.136511257676624 - type: nauc_map_at_100_max value: 17.791171504175814 - type: nauc_map_at_100_std value: -2.712161260291591 - type: nauc_map_at_10_diff1 value: 36.414283638871204 - type: nauc_map_at_10_max value: 17.33919844365509 - type: nauc_map_at_10_std value: -3.596581939565149 - type: nauc_map_at_1_diff1 value: 43.76928915162838 - type: nauc_map_at_1_max value: 18.65584868511747 - type: nauc_map_at_1_std value: -3.3203940049113925 - type: nauc_map_at_20_diff1 value: 36.390706892829236 - type: nauc_map_at_20_max value: 17.713189000561734 - type: nauc_map_at_20_std value: -3.3096832058190686 - type: nauc_map_at_3_diff1 value: 39.40208757504614 - type: nauc_map_at_3_max value: 17.734548958019822 - type: nauc_map_at_3_std value: -3.7767167790425376 - type: nauc_map_at_5_diff1 value: 37.31691641307714 - type: nauc_map_at_5_max value: 17.824917859595036 - type: nauc_map_at_5_std value: -4.341793743354893 - type: nauc_mrr_at_1000_diff1 value: 31.845850294564006 - type: nauc_mrr_at_1000_max value: 20.844113037057696 - type: nauc_mrr_at_1000_std value: -2.969718761532978 - type: nauc_mrr_at_100_diff1 value: 31.833171076952905 - type: nauc_mrr_at_100_max value: 20.832754686515557 - type: nauc_mrr_at_100_std value: -2.9684641146406743 - type: nauc_mrr_at_10_diff1 value: 31.975383495650654 - type: nauc_mrr_at_10_max value: 20.741551226715718 - type: nauc_mrr_at_10_std value: -3.308168222228622 - type: nauc_mrr_at_1_diff1 value: 36.8962724905663 - type: nauc_mrr_at_1_max value: 21.08515026265049 - type: nauc_mrr_at_1_std value: -3.324764670910975 - type: nauc_mrr_at_20_diff1 value: 31.97142874389304 - type: nauc_mrr_at_20_max value: 20.825942350517384 - type: nauc_mrr_at_20_std value: -3.3615147616814536 - type: nauc_mrr_at_3_diff1 value: 34.43852472523908 - type: nauc_mrr_at_3_max value: 21.54594535376395 - type: nauc_mrr_at_3_std value: -3.1112804192797707 - type: nauc_mrr_at_5_diff1 value: 32.874215613900375 - type: nauc_mrr_at_5_max value: 21.053271555386928 - type: nauc_mrr_at_5_std value: -3.747293302434281 - type: nauc_ndcg_at_1000_diff1 value: 31.454242290151434 - type: nauc_ndcg_at_1000_max value: 18.489639899176066 - type: nauc_ndcg_at_1000_std value: 1.3159370438460316 - type: nauc_ndcg_at_100_diff1 value: 31.25481472001158 - type: nauc_ndcg_at_100_max value: 18.086139248726578 - type: nauc_ndcg_at_100_std value: 0.7205652535273769 - type: nauc_ndcg_at_10_diff1 value: 32.52727699271849 - type: nauc_ndcg_at_10_max value: 17.237486979718312 - type: nauc_ndcg_at_10_std value: -3.0915552982078935 - type: nauc_ndcg_at_1_diff1 value: 36.8962724905663 - type: nauc_ndcg_at_1_max value: 21.08515026265049 - type: nauc_ndcg_at_1_std value: -3.324764670910975 - type: nauc_ndcg_at_20_diff1 value: 32.50052068294007 - type: nauc_ndcg_at_20_max value: 18.18091699705452 - type: nauc_ndcg_at_20_std value: -2.545082654261116 - type: nauc_ndcg_at_3_diff1 value: 36.76262984256575 - type: nauc_ndcg_at_3_max value: 18.715225732805465 - type: nauc_ndcg_at_3_std value: -3.3574761304071457 - type: nauc_ndcg_at_5_diff1 value: 34.22831050785461 - type: nauc_ndcg_at_5_max value: 18.329756369078734 - type: nauc_ndcg_at_5_std value: -4.501968061129472 - type: nauc_precision_at_1000_diff1 value: 4.627456337422589 - type: nauc_precision_at_1000_max value: 8.763785016596563 - type: nauc_precision_at_1000_std value: 5.798944013054676 - type: nauc_precision_at_100_diff1 value: 12.785405156496902 - type: nauc_precision_at_100_max value: 15.913251592907118 - type: nauc_precision_at_100_std value: 7.922950006883855 - type: nauc_precision_at_10_diff1 value: 21.671324247697545 - type: nauc_precision_at_10_max value: 16.844686528527216 - type: nauc_precision_at_10_std value: -0.8935902484243391 - type: nauc_precision_at_1_diff1 value: 36.8962724905663 - type: nauc_precision_at_1_max value: 21.08515026265049 - type: nauc_precision_at_1_std value: -3.324764670910975 - type: nauc_precision_at_20_diff1 value: 21.990648382513978 - type: nauc_precision_at_20_max value: 20.186544647997685 - type: nauc_precision_at_20_std value: 0.9473827309819518 - type: nauc_precision_at_3_diff1 value: 29.809157912293742 - type: nauc_precision_at_3_max value: 20.817234555254064 - type: nauc_precision_at_3_std value: -2.9715364332106087 - type: nauc_precision_at_5_diff1 value: 24.812305580415774 - type: nauc_precision_at_5_max value: 19.550818593102022 - type: nauc_precision_at_5_std value: -4.725734397876206 - type: nauc_recall_at_1000_diff1 value: 19.311306554927057 - type: nauc_recall_at_1000_max value: 15.928723354100303 - type: nauc_recall_at_1000_std value: 20.082823111228784 - type: nauc_recall_at_100_diff1 value: 21.25168897405789 - type: nauc_recall_at_100_max value: 15.00794104303515 - type: nauc_recall_at_100_std value: 11.12128776821777 - type: nauc_recall_at_10_diff1 value: 25.198073470444715 - type: nauc_recall_at_10_max value: 13.548174607713822 - type: nauc_recall_at_10_std value: -1.963637599241129 - type: nauc_recall_at_1_diff1 value: 43.76928915162838 - type: nauc_recall_at_1_max value: 18.65584868511747 - type: nauc_recall_at_1_std value: -3.3203940049113925 - type: nauc_recall_at_20_diff1 value: 25.428294962767577 - type: nauc_recall_at_20_max value: 16.232380758977776 - type: nauc_recall_at_20_std value: -0.6565322850593908 - type: nauc_recall_at_3_diff1 value: 35.62311837406267 - type: nauc_recall_at_3_max value: 16.099598243416118 - type: nauc_recall_at_3_std value: -4.061382736951024 - type: nauc_recall_at_5_diff1 value: 29.30259587685098 - type: nauc_recall_at_5_max value: 15.610376688031682 - type: nauc_recall_at_5_std value: -5.480201062659099 - type: ndcg_at_1 value: 11.985999999999999 - type: ndcg_at_10 value: 16.933 - type: ndcg_at_100 value: 22.411 - type: ndcg_at_1000 value: 26.038 - type: ndcg_at_20 value: 18.790000000000003 - type: ndcg_at_3 value: 13.943 - type: ndcg_at_5 value: 15.389 - type: precision_at_1 value: 11.985999999999999 - type: precision_at_10 value: 3.253 - type: precision_at_100 value: 0.726 - type: precision_at_1000 value: 0.122 - type: precision_at_20 value: 2.146 - type: precision_at_3 value: 6.773 - type: precision_at_5 value: 5.0680000000000005 - type: recall_at_1 value: 9.391 - type: recall_at_10 value: 23.697 - type: recall_at_100 value: 48.18 - type: recall_at_1000 value: 74.207 - type: recall_at_20 value: 30.489 - type: recall_at_3 value: 15.616 - type: recall_at_5 value: 19.243 - task: type: Retrieval dataset: name: MTEB CQADupstackRetrieval (default) type: CQADupstackRetrieval_is_a_combined_dataset config: default split: test revision: CQADupstackRetrieval_is_a_combined_dataset metrics: - type: main_score value: 18.395166666666665 - type: ndcg_at_10 value: 18.395166666666665 - task: type: Retrieval dataset: name: MTEB CQADupstackStatsRetrieval (default) type: mteb/cqadupstack-stats config: default split: test revision: 65ac3a16b8e91f9cee4c9828cc7c335575432a2a metrics: - type: main_score value: 13.764000000000001 - type: map_at_1 value: 7.407 - type: map_at_10 value: 11.181000000000001 - type: map_at_100 value: 11.999 - type: map_at_1000 value: 12.086 - type: map_at_20 value: 11.643 - type: map_at_3 value: 9.808 - type: map_at_5 value: 10.527000000000001 - type: mrr_at_1 value: 9.049079754601227 - type: mrr_at_10 value: 13.019281332164766 - type: mrr_at_100 value: 13.83437647561529 - type: mrr_at_1000 value: 13.90849595726279 - type: mrr_at_20 value: 13.453243790394076 - type: mrr_at_3 value: 11.605316973415134 - type: mrr_at_5 value: 12.364519427402865 - type: nauc_map_at_1000_diff1 value: 22.684386058096422 - type: nauc_map_at_1000_max value: 9.145886674872735 - type: nauc_map_at_1000_std value: -4.501342855209255 - type: nauc_map_at_100_diff1 value: 22.663345222436842 - type: nauc_map_at_100_max value: 9.14775447704162 - type: nauc_map_at_100_std value: -4.546649058281547 - type: nauc_map_at_10_diff1 value: 22.42971510386587 - type: nauc_map_at_10_max value: 9.534064523536093 - type: nauc_map_at_10_std value: -5.5964681895716275 - type: nauc_map_at_1_diff1 value: 27.011177559854126 - type: nauc_map_at_1_max value: 10.58197198530854 - type: nauc_map_at_1_std value: -6.595308662343213 - type: nauc_map_at_20_diff1 value: 22.6849314678422 - type: nauc_map_at_20_max value: 9.130124364237668 - type: nauc_map_at_20_std value: -4.6306079677985545 - type: nauc_map_at_3_diff1 value: 23.765755755951382 - type: nauc_map_at_3_max value: 8.386673560382699 - type: nauc_map_at_3_std value: -6.820598051025553 - type: nauc_map_at_5_diff1 value: 22.72172408624874 - type: nauc_map_at_5_max value: 9.94900277817333 - type: nauc_map_at_5_std value: -6.015194799527161 - type: nauc_mrr_at_1000_diff1 value: 24.19050948215309 - type: nauc_mrr_at_1000_max value: 10.461180380214094 - type: nauc_mrr_at_1000_std value: -2.9941483783767304 - type: nauc_mrr_at_100_diff1 value: 24.19585382086687 - type: nauc_mrr_at_100_max value: 10.486570575603626 - type: nauc_mrr_at_100_std value: -3.0182341126004104 - type: nauc_mrr_at_10_diff1 value: 24.190126428290462 - type: nauc_mrr_at_10_max value: 11.126417890087135 - type: nauc_mrr_at_10_std value: -3.839141693577256 - type: nauc_mrr_at_1_diff1 value: 28.571881597930947 - type: nauc_mrr_at_1_max value: 11.543441276788943 - type: nauc_mrr_at_1_std value: -5.512242856627392 - type: nauc_mrr_at_20_diff1 value: 24.203108205389672 - type: nauc_mrr_at_20_max value: 10.50497556809877 - type: nauc_mrr_at_20_std value: -3.082934311249442 - type: nauc_mrr_at_3_diff1 value: 25.98207063932455 - type: nauc_mrr_at_3_max value: 9.94844316319691 - type: nauc_mrr_at_3_std value: -5.0062389923354935 - type: nauc_mrr_at_5_diff1 value: 24.61646227495659 - type: nauc_mrr_at_5_max value: 11.648384719673203 - type: nauc_mrr_at_5_std value: -4.375379994287079 - type: nauc_ndcg_at_1000_diff1 value: 21.43768701111034 - type: nauc_ndcg_at_1000_max value: 8.273252874349057 - type: nauc_ndcg_at_1000_std value: 0.10670202820650984 - type: nauc_ndcg_at_100_diff1 value: 21.4746954073475 - type: nauc_ndcg_at_100_max value: 7.896808760471978 - type: nauc_ndcg_at_100_std value: -1.2410245357577705 - type: nauc_ndcg_at_10_diff1 value: 21.13137898867002 - type: nauc_ndcg_at_10_max value: 9.755235332270159 - type: nauc_ndcg_at_10_std value: -4.248419933008658 - type: nauc_ndcg_at_1_diff1 value: 28.571881597930947 - type: nauc_ndcg_at_1_max value: 11.543441276788943 - type: nauc_ndcg_at_1_std value: -5.512242856627392 - type: nauc_ndcg_at_20_diff1 value: 21.619408394641066 - type: nauc_ndcg_at_20_max value: 8.114217280583363 - type: nauc_ndcg_at_20_std value: -1.6730336682644353 - type: nauc_ndcg_at_3_diff1 value: 24.16497986310871 - type: nauc_ndcg_at_3_max value: 8.400666596386994 - type: nauc_ndcg_at_3_std value: -6.307687835437969 - type: nauc_ndcg_at_5_diff1 value: 21.80028821367463 - type: nauc_ndcg_at_5_max value: 11.029219640459228 - type: nauc_ndcg_at_5_std value: -5.1729515331734355 - type: nauc_precision_at_1000_diff1 value: 18.426797014316406 - type: nauc_precision_at_1000_max value: 11.215745964862423 - type: nauc_precision_at_1000_std value: 10.130362328925651 - type: nauc_precision_at_100_diff1 value: 22.777527984600223 - type: nauc_precision_at_100_max value: 8.693126368523261 - type: nauc_precision_at_100_std value: 6.849981524237866 - type: nauc_precision_at_10_diff1 value: 21.32311537782387 - type: nauc_precision_at_10_max value: 12.466768131932003 - type: nauc_precision_at_10_std value: -0.24380397765811196 - type: nauc_precision_at_1_diff1 value: 28.571881597930947 - type: nauc_precision_at_1_max value: 11.543441276788943 - type: nauc_precision_at_1_std value: -5.512242856627392 - type: nauc_precision_at_20_diff1 value: 23.320697000941518 - type: nauc_precision_at_20_max value: 9.416642932870655 - type: nauc_precision_at_20_std value: 6.117048580465784 - type: nauc_precision_at_3_diff1 value: 25.60854499214357 - type: nauc_precision_at_3_max value: 9.327816784887316 - type: nauc_precision_at_3_std value: -4.164690223373803 - type: nauc_precision_at_5_diff1 value: 22.487293449343895 - type: nauc_precision_at_5_max value: 15.554122997255721 - type: nauc_precision_at_5_std value: -2.170204158965489 - type: nauc_recall_at_1000_diff1 value: 15.20476822474712 - type: nauc_recall_at_1000_max value: 4.204822145176049 - type: nauc_recall_at_1000_std value: 12.879935852847554 - type: nauc_recall_at_100_diff1 value: 16.850604775963244 - type: nauc_recall_at_100_max value: 2.767499477935308 - type: nauc_recall_at_100_std value: 4.095047171340664 - type: nauc_recall_at_10_diff1 value: 16.689205215199248 - type: nauc_recall_at_10_max value: 8.378648312390819 - type: nauc_recall_at_10_std value: -2.8562137399428598 - type: nauc_recall_at_1_diff1 value: 27.011177559854126 - type: nauc_recall_at_1_max value: 10.58197198530854 - type: nauc_recall_at_1_std value: -6.595308662343213 - type: nauc_recall_at_20_diff1 value: 17.87665261251624 - type: nauc_recall_at_20_max value: 3.8134273552005995 - type: nauc_recall_at_20_std value: 3.3359977154662634 - type: nauc_recall_at_3_diff1 value: 20.670877063544086 - type: nauc_recall_at_3_max value: 6.248798024686606 - type: nauc_recall_at_3_std value: -7.011222642729971 - type: nauc_recall_at_5_diff1 value: 17.72007176247167 - type: nauc_recall_at_5_max value: 11.43834990123289 - type: nauc_recall_at_5_std value: -4.729213831313457 - type: ndcg_at_1 value: 9.049 - type: ndcg_at_10 value: 13.764000000000001 - type: ndcg_at_100 value: 17.992 - type: ndcg_at_1000 value: 20.558 - type: ndcg_at_20 value: 15.318999999999999 - type: ndcg_at_3 value: 11.038 - type: ndcg_at_5 value: 12.218 - type: precision_at_1 value: 9.049 - type: precision_at_10 value: 2.469 - type: precision_at_100 value: 0.505 - type: precision_at_1000 value: 0.08 - type: precision_at_20 value: 1.603 - type: precision_at_3 value: 5.061 - type: precision_at_5 value: 3.773 - type: recall_at_1 value: 7.407 - type: recall_at_10 value: 20.158 - type: recall_at_100 value: 39.701 - type: recall_at_1000 value: 59.205 - type: recall_at_20 value: 25.887999999999998 - type: recall_at_3 value: 12.626999999999999 - type: recall_at_5 value: 15.488 - task: type: Retrieval dataset: name: MTEB CQADupstackTexRetrieval (default) type: mteb/cqadupstack-tex config: default split: test revision: 46989137a86843e03a6195de44b09deda022eec7 metrics: - type: main_score value: 9.181000000000001 - type: map_at_1 value: 4.861 - type: map_at_10 value: 7.306 - type: map_at_100 value: 7.8 - type: map_at_1000 value: 7.8950000000000005 - type: map_at_20 value: 7.542 - type: map_at_3 value: 6.439 - type: map_at_5 value: 6.869 - type: mrr_at_1 value: 6.159669649002065 - type: mrr_at_10 value: 9.153789641573534 - type: mrr_at_100 value: 9.669799893317009 - type: mrr_at_1000 value: 9.752120431878343 - type: mrr_at_20 value: 9.406808091404807 - type: mrr_at_3 value: 8.138334480385415 - type: mrr_at_5 value: 8.628699242945634 - type: nauc_map_at_1000_diff1 value: 26.690576069734234 - type: nauc_map_at_1000_max value: 17.094449483335218 - type: nauc_map_at_1000_std value: -3.774366560350282 - type: nauc_map_at_100_diff1 value: 26.783605375597645 - type: nauc_map_at_100_max value: 17.080839761543665 - type: nauc_map_at_100_std value: -3.9576344084646853 - type: nauc_map_at_10_diff1 value: 27.63020311826964 - type: nauc_map_at_10_max value: 17.336825770328517 - type: nauc_map_at_10_std value: -4.814817253146819 - type: nauc_map_at_1_diff1 value: 38.766697715414516 - type: nauc_map_at_1_max value: 24.371350483475744 - type: nauc_map_at_1_std value: -8.173284901113332 - type: nauc_map_at_20_diff1 value: 27.08870073967262 - type: nauc_map_at_20_max value: 17.118664395958582 - type: nauc_map_at_20_std value: -4.446220849289796 - type: nauc_map_at_3_diff1 value: 30.66081471780112 - type: nauc_map_at_3_max value: 19.012237592897343 - type: nauc_map_at_3_std value: -6.627184332678718 - type: nauc_map_at_5_diff1 value: 29.333535675239347 - type: nauc_map_at_5_max value: 18.089350024708615 - type: nauc_map_at_5_std value: -5.72123478612525 - type: nauc_mrr_at_1000_diff1 value: 24.73352603489345 - type: nauc_mrr_at_1000_max value: 19.79900221948312 - type: nauc_mrr_at_1000_std value: -2.862077159418825 - type: nauc_mrr_at_100_diff1 value: 24.782609923929535 - type: nauc_mrr_at_100_max value: 19.805461393582963 - type: nauc_mrr_at_100_std value: -2.930689128967593 - type: nauc_mrr_at_10_diff1 value: 25.434777049042655 - type: nauc_mrr_at_10_max value: 20.181531593368128 - type: nauc_mrr_at_10_std value: -3.6076663995168774 - type: nauc_mrr_at_1_diff1 value: 35.62063854238608 - type: nauc_mrr_at_1_max value: 26.799910642533735 - type: nauc_mrr_at_1_std value: -7.609406566642959 - type: nauc_mrr_at_20_diff1 value: 24.992883725434815 - type: nauc_mrr_at_20_max value: 19.92741978259664 - type: nauc_mrr_at_20_std value: -3.2417052166595455 - type: nauc_mrr_at_3_diff1 value: 27.922046683219946 - type: nauc_mrr_at_3_max value: 21.9282015050312 - type: nauc_mrr_at_3_std value: -5.590575647868078 - type: nauc_mrr_at_5_diff1 value: 26.89070716968189 - type: nauc_mrr_at_5_max value: 21.073432913750224 - type: nauc_mrr_at_5_std value: -4.481614304446297 - type: nauc_ndcg_at_1000_diff1 value: 19.568651831011014 - type: nauc_ndcg_at_1000_max value: 14.122372407292808 - type: nauc_ndcg_at_1000_std value: 3.7957207135672597 - type: nauc_ndcg_at_100_diff1 value: 20.80268793272095 - type: nauc_ndcg_at_100_max value: 14.356177495251437 - type: nauc_ndcg_at_100_std value: 0.7863981963465579 - type: nauc_ndcg_at_10_diff1 value: 23.3461518500026 - type: nauc_ndcg_at_10_max value: 15.57326961854722 - type: nauc_ndcg_at_10_std value: -2.7445931345312284 - type: nauc_ndcg_at_1_diff1 value: 35.62063854238608 - type: nauc_ndcg_at_1_max value: 26.799910642533735 - type: nauc_ndcg_at_1_std value: -7.609406566642959 - type: nauc_ndcg_at_20_diff1 value: 22.04481909899471 - type: nauc_ndcg_at_20_max value: 14.937866014666568 - type: nauc_ndcg_at_20_std value: -1.747008165250061 - type: nauc_ndcg_at_3_diff1 value: 27.939895558816584 - type: nauc_ndcg_at_3_max value: 19.034512289670218 - type: nauc_ndcg_at_3_std value: -5.8325182778108795 - type: nauc_ndcg_at_5_diff1 value: 26.486633537077754 - type: nauc_ndcg_at_5_max value: 17.271834422924798 - type: nauc_ndcg_at_5_std value: -4.409805002517824 - type: nauc_precision_at_1000_diff1 value: 6.491111119228375 - type: nauc_precision_at_1000_max value: 18.443725307197724 - type: nauc_precision_at_1000_std value: 22.938787139825433 - type: nauc_precision_at_100_diff1 value: 10.17740447024087 - type: nauc_precision_at_100_max value: 17.049105330751306 - type: nauc_precision_at_100_std value: 12.762513963286978 - type: nauc_precision_at_10_diff1 value: 13.67439803472887 - type: nauc_precision_at_10_max value: 16.055467906792828 - type: nauc_precision_at_10_std value: 3.8405675136717323 - type: nauc_precision_at_1_diff1 value: 35.62063854238608 - type: nauc_precision_at_1_max value: 26.799910642533735 - type: nauc_precision_at_1_std value: -7.609406566642959 - type: nauc_precision_at_20_diff1 value: 12.306954777624213 - type: nauc_precision_at_20_max value: 15.96836613953479 - type: nauc_precision_at_20_std value: 6.70148311776044 - type: nauc_precision_at_3_diff1 value: 21.855506525702847 - type: nauc_precision_at_3_max value: 19.209267745003704 - type: nauc_precision_at_3_std value: -3.8119776477478413 - type: nauc_precision_at_5_diff1 value: 19.156111435062012 - type: nauc_precision_at_5_max value: 18.34440488085919 - type: nauc_precision_at_5_std value: -0.03928868519881514 - type: nauc_recall_at_1000_diff1 value: 7.849926346079982 - type: nauc_recall_at_1000_max value: 5.306371454314062 - type: nauc_recall_at_1000_std value: 17.18954803503502 - type: nauc_recall_at_100_diff1 value: 11.99060160309378 - type: nauc_recall_at_100_max value: 7.243119921489159 - type: nauc_recall_at_100_std value: 7.724576636146561 - type: nauc_recall_at_10_diff1 value: 15.951856271318244 - type: nauc_recall_at_10_max value: 9.03241092518941 - type: nauc_recall_at_10_std value: 0.2357705274357088 - type: nauc_recall_at_1_diff1 value: 38.766697715414516 - type: nauc_recall_at_1_max value: 24.371350483475744 - type: nauc_recall_at_1_std value: -8.173284901113332 - type: nauc_recall_at_20_diff1 value: 13.76245045354016 - type: nauc_recall_at_20_max value: 8.303909450838308 - type: nauc_recall_at_20_std value: 1.9797360213278055 - type: nauc_recall_at_3_diff1 value: 25.185146352227978 - type: nauc_recall_at_3_max value: 14.711935197854292 - type: nauc_recall_at_3_std value: -5.598795458243915 - type: nauc_recall_at_5_diff1 value: 21.958052739428716 - type: nauc_recall_at_5_max value: 11.804079831463127 - type: nauc_recall_at_5_std value: -3.0315767806264198 - type: ndcg_at_1 value: 6.16 - type: ndcg_at_10 value: 9.181000000000001 - type: ndcg_at_100 value: 11.946 - type: ndcg_at_1000 value: 14.71 - type: ndcg_at_20 value: 10.006 - type: ndcg_at_3 value: 7.4639999999999995 - type: ndcg_at_5 value: 8.133 - type: precision_at_1 value: 6.16 - type: precision_at_10 value: 1.844 - type: precision_at_100 value: 0.395 - type: precision_at_1000 value: 0.076 - type: precision_at_20 value: 1.173 - type: precision_at_3 value: 3.7159999999999997 - type: precision_at_5 value: 2.746 - type: recall_at_1 value: 4.861 - type: recall_at_10 value: 13.07 - type: recall_at_100 value: 25.946 - type: recall_at_1000 value: 46.434 - type: recall_at_20 value: 16.061 - type: recall_at_3 value: 8.325000000000001 - type: recall_at_5 value: 10.020999999999999 - task: type: Retrieval dataset: name: MTEB CQADupstackUnixRetrieval (default) type: mteb/cqadupstack-unix config: default split: test revision: 6c6430d3a6d36f8d2a829195bc5dc94d7e063e53 metrics: - type: main_score value: 16.589000000000002 - type: map_at_1 value: 10.348 - type: map_at_10 value: 13.808000000000002 - type: map_at_100 value: 14.443 - type: map_at_1000 value: 14.551 - type: map_at_20 value: 14.106 - type: map_at_3 value: 12.454 - type: map_at_5 value: 13.173000000000002 - type: mrr_at_1 value: 12.406716417910447 - type: mrr_at_10 value: 16.39392324093816 - type: mrr_at_100 value: 17.01727060954383 - type: mrr_at_1000 value: 17.113417907096533 - type: mrr_at_20 value: 16.68964517846198 - type: mrr_at_3 value: 14.925373134328357 - type: mrr_at_5 value: 15.69962686567164 - type: nauc_map_at_1000_diff1 value: 37.96305817045708 - type: nauc_map_at_1000_max value: 26.063580688542388 - type: nauc_map_at_1000_std value: -4.831229848566574 - type: nauc_map_at_100_diff1 value: 37.972522088343716 - type: nauc_map_at_100_max value: 26.04779581576852 - type: nauc_map_at_100_std value: -4.909662655473513 - type: nauc_map_at_10_diff1 value: 38.35929365925692 - type: nauc_map_at_10_max value: 26.201184962039935 - type: nauc_map_at_10_std value: -5.4363000965541 - type: nauc_map_at_1_diff1 value: 48.68557105246294 - type: nauc_map_at_1_max value: 32.48532434140668 - type: nauc_map_at_1_std value: -4.93862403800474 - type: nauc_map_at_20_diff1 value: 38.20690594362951 - type: nauc_map_at_20_max value: 26.03970982522202 - type: nauc_map_at_20_std value: -5.242556500809629 - type: nauc_map_at_3_diff1 value: 41.428803061011465 - type: nauc_map_at_3_max value: 27.47150922034986 - type: nauc_map_at_3_std value: -5.434283129605092 - type: nauc_map_at_5_diff1 value: 39.67254166875351 - type: nauc_map_at_5_max value: 26.989621759032655 - type: nauc_map_at_5_std value: -5.360116959183613 - type: nauc_mrr_at_1000_diff1 value: 36.163006365244435 - type: nauc_mrr_at_1000_max value: 27.963611146733218 - type: nauc_mrr_at_1000_std value: -4.276969598287321 - type: nauc_mrr_at_100_diff1 value: 36.147594105582385 - type: nauc_mrr_at_100_max value: 27.963649956111542 - type: nauc_mrr_at_100_std value: -4.3271683004962 - type: nauc_mrr_at_10_diff1 value: 36.496807206746176 - type: nauc_mrr_at_10_max value: 28.120842911547534 - type: nauc_mrr_at_10_std value: -4.87122638010671 - type: nauc_mrr_at_1_diff1 value: 46.33099860144716 - type: nauc_mrr_at_1_max value: 35.4859105639909 - type: nauc_mrr_at_1_std value: -3.2263281209085566 - type: nauc_mrr_at_20_diff1 value: 36.30705560054853 - type: nauc_mrr_at_20_max value: 27.976401984511075 - type: nauc_mrr_at_20_std value: -4.715772425909112 - type: nauc_mrr_at_3_diff1 value: 38.96056551025221 - type: nauc_mrr_at_3_max value: 29.51099966763278 - type: nauc_mrr_at_3_std value: -4.6236213229116165 - type: nauc_mrr_at_5_diff1 value: 37.77817956075975 - type: nauc_mrr_at_5_max value: 29.011475146701326 - type: nauc_mrr_at_5_std value: -4.718243588613509 - type: nauc_ndcg_at_1000_diff1 value: 31.66466628463146 - type: nauc_ndcg_at_1000_max value: 23.801406394456677 - type: nauc_ndcg_at_1000_std value: -0.8537022176476805 - type: nauc_ndcg_at_100_diff1 value: 32.12324111984138 - type: nauc_ndcg_at_100_max value: 23.531317692993255 - type: nauc_ndcg_at_100_std value: -2.5141257246667847 - type: nauc_ndcg_at_10_diff1 value: 33.65961130642343 - type: nauc_ndcg_at_10_max value: 23.852547124966375 - type: nauc_ndcg_at_10_std value: -5.694261022509329 - type: nauc_ndcg_at_1_diff1 value: 46.33099860144716 - type: nauc_ndcg_at_1_max value: 35.4859105639909 - type: nauc_ndcg_at_1_std value: -3.2263281209085566 - type: nauc_ndcg_at_20_diff1 value: 33.2596461543923 - type: nauc_ndcg_at_20_max value: 23.410367154540957 - type: nauc_ndcg_at_20_std value: -4.993438821759135 - type: nauc_ndcg_at_3_diff1 value: 38.49302702240003 - type: nauc_ndcg_at_3_max value: 26.91849498480658 - type: nauc_ndcg_at_3_std value: -5.507535655688577 - type: nauc_ndcg_at_5_diff1 value: 36.34741479071839 - type: nauc_ndcg_at_5_max value: 25.867932454692088 - type: nauc_ndcg_at_5_std value: -5.51688925853437 - type: nauc_precision_at_1000_diff1 value: 3.4780497920711326 - type: nauc_precision_at_1000_max value: 18.21263960599663 - type: nauc_precision_at_1000_std value: 14.085513436914637 - type: nauc_precision_at_100_diff1 value: 14.36699897085701 - type: nauc_precision_at_100_max value: 19.32065741728386 - type: nauc_precision_at_100_std value: 5.71990367969069 - type: nauc_precision_at_10_diff1 value: 20.767692427168015 - type: nauc_precision_at_10_max value: 19.73101890683137 - type: nauc_precision_at_10_std value: -6.069817243914008 - type: nauc_precision_at_1_diff1 value: 46.33099860144716 - type: nauc_precision_at_1_max value: 35.4859105639909 - type: nauc_precision_at_1_std value: -3.2263281209085566 - type: nauc_precision_at_20_diff1 value: 19.891096552387722 - type: nauc_precision_at_20_max value: 18.826506625959798 - type: nauc_precision_at_20_std value: -3.4269595182918033 - type: nauc_precision_at_3_diff1 value: 31.43880801440455 - type: nauc_precision_at_3_max value: 25.061447990382852 - type: nauc_precision_at_3_std value: -5.354356608479064 - type: nauc_precision_at_5_diff1 value: 26.79336768034434 - type: nauc_precision_at_5_max value: 23.477964523010396 - type: nauc_precision_at_5_std value: -5.762844318489033 - type: nauc_recall_at_1000_diff1 value: 12.463469349742812 - type: nauc_recall_at_1000_max value: 13.262333629170845 - type: nauc_recall_at_1000_std value: 15.999509660589933 - type: nauc_recall_at_100_diff1 value: 19.025729138081836 - type: nauc_recall_at_100_max value: 15.403625041530212 - type: nauc_recall_at_100_std value: 4.996443705774602 - type: nauc_recall_at_10_diff1 value: 23.168831160154607 - type: nauc_recall_at_10_max value: 16.001264079205242 - type: nauc_recall_at_10_std value: -6.242620935676047 - type: nauc_recall_at_1_diff1 value: 48.68557105246294 - type: nauc_recall_at_1_max value: 32.48532434140668 - type: nauc_recall_at_1_std value: -4.93862403800474 - type: nauc_recall_at_20_diff1 value: 22.74757458816546 - type: nauc_recall_at_20_max value: 15.196173605729458 - type: nauc_recall_at_20_std value: -4.209222520321505 - type: nauc_recall_at_3_diff1 value: 34.221157190727794 - type: nauc_recall_at_3_max value: 22.101122375914557 - type: nauc_recall_at_3_std value: -6.4312864088154855 - type: nauc_recall_at_5_diff1 value: 30.00246916642969 - type: nauc_recall_at_5_max value: 20.813647492964524 - type: nauc_recall_at_5_std value: -5.963828101389924 - type: ndcg_at_1 value: 12.407 - type: ndcg_at_10 value: 16.589000000000002 - type: ndcg_at_100 value: 20.122999999999998 - type: ndcg_at_1000 value: 23.427999999999997 - type: ndcg_at_20 value: 17.622 - type: ndcg_at_3 value: 13.911000000000001 - type: ndcg_at_5 value: 15.057 - type: precision_at_1 value: 12.407 - type: precision_at_10 value: 2.92 - type: precision_at_100 value: 0.526 - type: precision_at_1000 value: 0.09 - type: precision_at_20 value: 1.73 - type: precision_at_3 value: 6.3740000000000006 - type: precision_at_5 value: 4.6080000000000005 - type: recall_at_1 value: 10.348 - type: recall_at_10 value: 22.765 - type: recall_at_100 value: 39.311 - type: recall_at_1000 value: 64.334 - type: recall_at_20 value: 26.488 - type: recall_at_3 value: 15.137 - type: recall_at_5 value: 18.132 - task: type: Retrieval dataset: name: MTEB CQADupstackWebmastersRetrieval (default) type: mteb/cqadupstack-webmasters config: default split: test revision: 160c094312a0e1facb97e55eeddb698c0abe3571 metrics: - type: main_score value: 20.898 - type: map_at_1 value: 10.638 - type: map_at_10 value: 16.833000000000002 - type: map_at_100 value: 17.727999999999998 - type: map_at_1000 value: 17.901 - type: map_at_20 value: 17.286 - type: map_at_3 value: 15.229999999999999 - type: map_at_5 value: 15.964 - type: mrr_at_1 value: 13.83399209486166 - type: mrr_at_10 value: 20.414862914862912 - type: mrr_at_100 value: 21.1918520823834 - type: mrr_at_1000 value: 21.280393775500254 - type: mrr_at_20 value: 20.85554487149435 - type: mrr_at_3 value: 18.906455862977598 - type: mrr_at_5 value: 19.677206851119895 - type: nauc_map_at_1000_diff1 value: 35.60581498091247 - type: nauc_map_at_1000_max value: 19.807829060830564 - type: nauc_map_at_1000_std value: -12.288904191825727 - type: nauc_map_at_100_diff1 value: 35.73031568643259 - type: nauc_map_at_100_max value: 19.888921175215273 - type: nauc_map_at_100_std value: -12.394338708086611 - type: nauc_map_at_10_diff1 value: 35.90781658193538 - type: nauc_map_at_10_max value: 20.30071517139723 - type: nauc_map_at_10_std value: -13.21353855816924 - type: nauc_map_at_1_diff1 value: 48.473719657085255 - type: nauc_map_at_1_max value: 20.581982089250136 - type: nauc_map_at_1_std value: -10.702644517489851 - type: nauc_map_at_20_diff1 value: 35.94358649592004 - type: nauc_map_at_20_max value: 20.021365660458233 - type: nauc_map_at_20_std value: -12.910422224053336 - type: nauc_map_at_3_diff1 value: 39.2414174048553 - type: nauc_map_at_3_max value: 19.121286688071976 - type: nauc_map_at_3_std value: -12.720527679135701 - type: nauc_map_at_5_diff1 value: 37.65029240515698 - type: nauc_map_at_5_max value: 19.711370835818958 - type: nauc_map_at_5_std value: -12.861505993621163 - type: nauc_mrr_at_1000_diff1 value: 32.3301643796503 - type: nauc_mrr_at_1000_max value: 20.52424230492303 - type: nauc_mrr_at_1000_std value: -12.232467571800854 - type: nauc_mrr_at_100_diff1 value: 32.346634248359955 - type: nauc_mrr_at_100_max value: 20.525320060903585 - type: nauc_mrr_at_100_std value: -12.22134936297468 - type: nauc_mrr_at_10_diff1 value: 32.22480347349119 - type: nauc_mrr_at_10_max value: 20.804493218360445 - type: nauc_mrr_at_10_std value: -12.822192933749621 - type: nauc_mrr_at_1_diff1 value: 42.548653237693316 - type: nauc_mrr_at_1_max value: 23.06598695012915 - type: nauc_mrr_at_1_std value: -11.74488988266296 - type: nauc_mrr_at_20_diff1 value: 32.432937534124086 - type: nauc_mrr_at_20_max value: 20.579328500121203 - type: nauc_mrr_at_20_std value: -12.588012401985225 - type: nauc_mrr_at_3_diff1 value: 34.397571670501264 - type: nauc_mrr_at_3_max value: 20.13774876009483 - type: nauc_mrr_at_3_std value: -12.01068604428263 - type: nauc_mrr_at_5_diff1 value: 32.876382340593864 - type: nauc_mrr_at_5_max value: 20.272684252547506 - type: nauc_mrr_at_5_std value: -12.336258450312041 - type: nauc_ndcg_at_1000_diff1 value: 30.599477254817316 - type: nauc_ndcg_at_1000_max value: 19.58213768807632 - type: nauc_ndcg_at_1000_std value: -8.148938595988358 - type: nauc_ndcg_at_100_diff1 value: 30.913346166831733 - type: nauc_ndcg_at_100_max value: 19.779289804033745 - type: nauc_ndcg_at_100_std value: -9.057419136085338 - type: nauc_ndcg_at_10_diff1 value: 30.375159602708617 - type: nauc_ndcg_at_10_max value: 20.422870313571686 - type: nauc_ndcg_at_10_std value: -13.513106506566325 - type: nauc_ndcg_at_1_diff1 value: 42.548653237693316 - type: nauc_ndcg_at_1_max value: 23.06598695012915 - type: nauc_ndcg_at_1_std value: -11.74488988266296 - type: nauc_ndcg_at_20_diff1 value: 30.981127537056285 - type: nauc_ndcg_at_20_max value: 19.699283486395966 - type: nauc_ndcg_at_20_std value: -12.459362077789594 - type: nauc_ndcg_at_3_diff1 value: 34.20407067030529 - type: nauc_ndcg_at_3_max value: 18.300931170740117 - type: nauc_ndcg_at_3_std value: -12.336085516544653 - type: nauc_ndcg_at_5_diff1 value: 32.75690035095809 - type: nauc_ndcg_at_5_max value: 19.07389087899962 - type: nauc_ndcg_at_5_std value: -12.812135004055685 - type: nauc_precision_at_1000_diff1 value: -6.568927341932564 - type: nauc_precision_at_1000_max value: -3.713640032829482 - type: nauc_precision_at_1000_std value: 12.117240649009698 - type: nauc_precision_at_100_diff1 value: -2.9367632268748918 - type: nauc_precision_at_100_max value: 3.27216899405361 - type: nauc_precision_at_100_std value: 6.784184812065526 - type: nauc_precision_at_10_diff1 value: 11.519147346234265 - type: nauc_precision_at_10_max value: 18.13695487911042 - type: nauc_precision_at_10_std value: -11.804807048296718 - type: nauc_precision_at_1_diff1 value: 42.548653237693316 - type: nauc_precision_at_1_max value: 23.06598695012915 - type: nauc_precision_at_1_std value: -11.74488988266296 - type: nauc_precision_at_20_diff1 value: 9.60547736036805 - type: nauc_precision_at_20_max value: 13.830439559945646 - type: nauc_precision_at_20_std value: -8.977774434672613 - type: nauc_precision_at_3_diff1 value: 26.065405745771674 - type: nauc_precision_at_3_max value: 18.534736719384824 - type: nauc_precision_at_3_std value: -11.654717965450807 - type: nauc_precision_at_5_diff1 value: 20.066525503683547 - type: nauc_precision_at_5_max value: 19.133419951937 - type: nauc_precision_at_5_std value: -12.818467999888828 - type: nauc_recall_at_1000_diff1 value: 15.783538232295097 - type: nauc_recall_at_1000_max value: 14.071709448821176 - type: nauc_recall_at_1000_std value: 17.66158228025607 - type: nauc_recall_at_100_diff1 value: 21.162385324476695 - type: nauc_recall_at_100_max value: 17.145208604213767 - type: nauc_recall_at_100_std value: 3.9374103258567112 - type: nauc_recall_at_10_diff1 value: 20.699553866778857 - type: nauc_recall_at_10_max value: 21.282711211008866 - type: nauc_recall_at_10_std value: -14.179628995645633 - type: nauc_recall_at_1_diff1 value: 48.473719657085255 - type: nauc_recall_at_1_max value: 20.581982089250136 - type: nauc_recall_at_1_std value: -10.702644517489851 - type: nauc_recall_at_20_diff1 value: 21.419587577304537 - type: nauc_recall_at_20_max value: 17.606430632714602 - type: nauc_recall_at_20_std value: -10.993318743040348 - type: nauc_recall_at_3_diff1 value: 32.07559647496913 - type: nauc_recall_at_3_max value: 17.565170643623897 - type: nauc_recall_at_3_std value: -12.19780121817959 - type: nauc_recall_at_5_diff1 value: 27.518158043297458 - type: nauc_recall_at_5_max value: 19.04014005217722 - type: nauc_recall_at_5_std value: -12.29160434365186 - type: ndcg_at_1 value: 13.834 - type: ndcg_at_10 value: 20.898 - type: ndcg_at_100 value: 25.130999999999997 - type: ndcg_at_1000 value: 28.785 - type: ndcg_at_20 value: 22.23 - type: ndcg_at_3 value: 18.234 - type: ndcg_at_5 value: 19.127 - type: precision_at_1 value: 13.834 - type: precision_at_10 value: 4.269 - type: precision_at_100 value: 0.923 - type: precision_at_1000 value: 0.178 - type: precision_at_20 value: 2.658 - type: precision_at_3 value: 9.157 - type: precision_at_5 value: 6.4030000000000005 - type: recall_at_1 value: 10.638 - type: recall_at_10 value: 28.794999999999998 - type: recall_at_100 value: 49.277 - type: recall_at_1000 value: 74.615 - type: recall_at_20 value: 34.247 - type: recall_at_3 value: 20.183 - type: recall_at_5 value: 23.180999999999997 - task: type: Retrieval dataset: name: MTEB CQADupstackWordpressRetrieval (default) type: mteb/cqadupstack-wordpress config: default split: test revision: 4ffe81d471b1924886b33c7567bfb200e9eec5c4 metrics: - type: main_score value: 11.373999999999999 - type: map_at_1 value: 6.414000000000001 - type: map_at_10 value: 9.346 - type: map_at_100 value: 9.957 - type: map_at_1000 value: 10.068000000000001 - type: map_at_20 value: 9.695 - type: map_at_3 value: 8.404 - type: map_at_5 value: 8.915 - type: mrr_at_1 value: 7.024029574861368 - type: mrr_at_10 value: 10.393451280697127 - type: mrr_at_100 value: 11.019498040355051 - type: mrr_at_1000 value: 11.126514549307483 - type: mrr_at_20 value: 10.739949578915956 - type: mrr_at_3 value: 9.365372766481826 - type: mrr_at_5 value: 9.929143561306228 - type: nauc_map_at_1000_diff1 value: 12.814381907786581 - type: nauc_map_at_1000_max value: 29.092426026076524 - type: nauc_map_at_1000_std value: -9.255923420073602 - type: nauc_map_at_100_diff1 value: 12.878426138689159 - type: nauc_map_at_100_max value: 29.122942173430786 - type: nauc_map_at_100_std value: -9.23900341988892 - type: nauc_map_at_10_diff1 value: 12.489510614799556 - type: nauc_map_at_10_max value: 29.750605132230973 - type: nauc_map_at_10_std value: -10.891316457455034 - type: nauc_map_at_1_diff1 value: 21.979091153692785 - type: nauc_map_at_1_max value: 35.67921476025159 - type: nauc_map_at_1_std value: -14.076714572064722 - type: nauc_map_at_20_diff1 value: 12.939397189474525 - type: nauc_map_at_20_max value: 29.2420862234903 - type: nauc_map_at_20_std value: -9.697032010892098 - type: nauc_map_at_3_diff1 value: 11.71956313714658 - type: nauc_map_at_3_max value: 31.408331125347317 - type: nauc_map_at_3_std value: -12.366045347802453 - type: nauc_map_at_5_diff1 value: 11.261295189080775 - type: nauc_map_at_5_max value: 30.532438938899865 - type: nauc_map_at_5_std value: -12.056642279674733 - type: nauc_mrr_at_1000_diff1 value: 12.232878554105064 - type: nauc_mrr_at_1000_max value: 26.79489706046956 - type: nauc_mrr_at_1000_std value: -6.8992050502406315 - type: nauc_mrr_at_100_diff1 value: 12.294839666442735 - type: nauc_mrr_at_100_max value: 26.78607457984652 - type: nauc_mrr_at_100_std value: -6.878887030823078 - type: nauc_mrr_at_10_diff1 value: 12.002724356254795 - type: nauc_mrr_at_10_max value: 27.40402310816038 - type: nauc_mrr_at_10_std value: -8.162035027744258 - type: nauc_mrr_at_1_diff1 value: 20.886203498846683 - type: nauc_mrr_at_1_max value: 33.254317694509375 - type: nauc_mrr_at_1_std value: -10.117522555828865 - type: nauc_mrr_at_20_diff1 value: 12.319675769640858 - type: nauc_mrr_at_20_max value: 26.87015727907368 - type: nauc_mrr_at_20_std value: -7.2617234809484135 - type: nauc_mrr_at_3_diff1 value: 11.230701177630559 - type: nauc_mrr_at_3_max value: 29.122126861558968 - type: nauc_mrr_at_3_std value: -9.026936451805618 - type: nauc_mrr_at_5_diff1 value: 10.722689392365698 - type: nauc_mrr_at_5_max value: 27.993297554036012 - type: nauc_mrr_at_5_std value: -9.203791949467071 - type: nauc_ndcg_at_1000_diff1 value: 11.60863424444098 - type: nauc_ndcg_at_1000_max value: 24.57800369950003 - type: nauc_ndcg_at_1000_std value: -3.153398878672258 - type: nauc_ndcg_at_100_diff1 value: 12.469794088622505 - type: nauc_ndcg_at_100_max value: 25.001650897821804 - type: nauc_ndcg_at_100_std value: -3.0373993012052956 - type: nauc_ndcg_at_10_diff1 value: 11.20161781483793 - type: nauc_ndcg_at_10_max value: 26.63677144307719 - type: nauc_ndcg_at_10_std value: -8.484641569381287 - type: nauc_ndcg_at_1_diff1 value: 20.886203498846683 - type: nauc_ndcg_at_1_max value: 33.254317694509375 - type: nauc_ndcg_at_1_std value: -10.117522555828865 - type: nauc_ndcg_at_20_diff1 value: 12.525480705639607 - type: nauc_ndcg_at_20_max value: 25.305210925916516 - type: nauc_ndcg_at_20_std value: -5.310390743566156 - type: nauc_ndcg_at_3_diff1 value: 9.410259553800584 - type: nauc_ndcg_at_3_max value: 29.021903193094463 - type: nauc_ndcg_at_3_std value: -10.710588632351651 - type: nauc_ndcg_at_5_diff1 value: 8.542378256013144 - type: nauc_ndcg_at_5_max value: 27.76839928117293 - type: nauc_ndcg_at_5_std value: -10.86086606320655 - type: nauc_precision_at_1000_diff1 value: 6.970182551195389 - type: nauc_precision_at_1000_max value: 0.5412999294836751 - type: nauc_precision_at_1000_std value: 7.012494393070737 - type: nauc_precision_at_100_diff1 value: 14.185556880215492 - type: nauc_precision_at_100_max value: 13.099017338602453 - type: nauc_precision_at_100_std value: 8.819688120163907 - type: nauc_precision_at_10_diff1 value: 10.272854713458313 - type: nauc_precision_at_10_max value: 17.757675634794186 - type: nauc_precision_at_10_std value: -3.1133486120801988 - type: nauc_precision_at_1_diff1 value: 20.886203498846683 - type: nauc_precision_at_1_max value: 33.254317694509375 - type: nauc_precision_at_1_std value: -10.117522555828865 - type: nauc_precision_at_20_diff1 value: 14.088873804920043 - type: nauc_precision_at_20_max value: 14.90331907367224 - type: nauc_precision_at_20_std value: 4.034708541338394 - type: nauc_precision_at_3_diff1 value: 3.2713456006968484 - type: nauc_precision_at_3_max value: 22.242467561792488 - type: nauc_precision_at_3_std value: -7.484175123651296 - type: nauc_precision_at_5_diff1 value: 3.3283399856227054 - type: nauc_precision_at_5_max value: 19.173330923166482 - type: nauc_precision_at_5_std value: -6.826663840827791 - type: nauc_recall_at_1000_diff1 value: 8.668188603519953 - type: nauc_recall_at_1000_max value: 17.270973316398546 - type: nauc_recall_at_1000_std value: 8.465785248503957 - type: nauc_recall_at_100_diff1 value: 12.084384969430875 - type: nauc_recall_at_100_max value: 18.874679350876704 - type: nauc_recall_at_100_std value: 8.095326820740619 - type: nauc_recall_at_10_diff1 value: 9.382418742690367 - type: nauc_recall_at_10_max value: 21.96752741022579 - type: nauc_recall_at_10_std value: -4.745351078438475 - type: nauc_recall_at_1_diff1 value: 21.979091153692785 - type: nauc_recall_at_1_max value: 35.67921476025159 - type: nauc_recall_at_1_std value: -14.076714572064722 - type: nauc_recall_at_20_diff1 value: 12.502935936269866 - type: nauc_recall_at_20_max value: 18.929026175207063 - type: nauc_recall_at_20_std value: 2.5944671259870504 - type: nauc_recall_at_3_diff1 value: 4.4289380840974735 - type: nauc_recall_at_3_max value: 26.258771203699638 - type: nauc_recall_at_3_std value: -10.675231031449831 - type: nauc_recall_at_5_diff1 value: 3.359403384626283 - type: nauc_recall_at_5_max value: 23.44431771658149 - type: nauc_recall_at_5_std value: -9.89846436941446 - type: ndcg_at_1 value: 7.024 - type: ndcg_at_10 value: 11.373999999999999 - type: ndcg_at_100 value: 14.689 - type: ndcg_at_1000 value: 17.955 - type: ndcg_at_20 value: 12.587000000000002 - type: ndcg_at_3 value: 9.4 - type: ndcg_at_5 value: 10.288 - type: precision_at_1 value: 7.024 - type: precision_at_10 value: 1.959 - type: precision_at_100 value: 0.386 - type: precision_at_1000 value: 0.07200000000000001 - type: precision_at_20 value: 1.248 - type: precision_at_3 value: 4.313000000000001 - type: precision_at_5 value: 3.1419999999999995 - type: recall_at_1 value: 6.414000000000001 - type: recall_at_10 value: 16.663 - type: recall_at_100 value: 32.627 - type: recall_at_1000 value: 57.965 - type: recall_at_20 value: 21.254 - type: recall_at_3 value: 11.05 - type: recall_at_5 value: 13.306000000000001 - task: type: Retrieval dataset: name: MTEB ClimateFEVER (default) type: mteb/climate-fever config: default split: test revision: 47f2ac6acb640fc46020b02a5b59fdda04d39380 metrics: - type: main_score value: 16.171 - type: map_at_1 value: 6.49 - type: map_at_10 value: 10.86 - type: map_at_100 value: 12.029 - type: map_at_1000 value: 12.203 - type: map_at_20 value: 11.436 - type: map_at_3 value: 9.043 - type: map_at_5 value: 9.8 - type: mrr_at_1 value: 14.462540716612377 - type: mrr_at_10 value: 22.29667545628457 - type: mrr_at_100 value: 23.37683906670798 - type: mrr_at_1000 value: 23.444939100216125 - type: mrr_at_20 value: 22.96344510401194 - type: mrr_at_3 value: 19.543973941368094 - type: mrr_at_5 value: 20.856677524429962 - type: nauc_map_at_1000_diff1 value: 25.452522122995575 - type: nauc_map_at_1000_max value: 12.219315907930918 - type: nauc_map_at_1000_std value: 16.320282612741305 - type: nauc_map_at_100_diff1 value: 25.507996305601317 - type: nauc_map_at_100_max value: 12.06761857799284 - type: nauc_map_at_100_std value: 16.021924617361154 - type: nauc_map_at_10_diff1 value: 25.57683857235436 - type: nauc_map_at_10_max value: 11.583315696972235 - type: nauc_map_at_10_std value: 13.393037535035432 - type: nauc_map_at_1_diff1 value: 33.53502479094588 - type: nauc_map_at_1_max value: 13.878001277352162 - type: nauc_map_at_1_std value: 5.938822039290204 - type: nauc_map_at_20_diff1 value: 25.55600131663723 - type: nauc_map_at_20_max value: 11.767498368847294 - type: nauc_map_at_20_std value: 14.874735745830284 - type: nauc_map_at_3_diff1 value: 28.16358100998465 - type: nauc_map_at_3_max value: 11.009921978477848 - type: nauc_map_at_3_std value: 9.652386014234253 - type: nauc_map_at_5_diff1 value: 26.439156523376795 - type: nauc_map_at_5_max value: 11.895365754476197 - type: nauc_map_at_5_std value: 11.092649215492974 - type: nauc_mrr_at_1000_diff1 value: 21.97546949903572 - type: nauc_mrr_at_1000_max value: 14.261163831146325 - type: nauc_mrr_at_1000_std value: 19.72512565669776 - type: nauc_mrr_at_100_diff1 value: 21.96565333521718 - type: nauc_mrr_at_100_max value: 14.25918411052649 - type: nauc_mrr_at_100_std value: 19.72641795619631 - type: nauc_mrr_at_10_diff1 value: 21.761361506131664 - type: nauc_mrr_at_10_max value: 13.870635547681125 - type: nauc_mrr_at_10_std value: 18.899030921740913 - type: nauc_mrr_at_1_diff1 value: 28.225624779989793 - type: nauc_mrr_at_1_max value: 15.731268371038876 - type: nauc_mrr_at_1_std value: 11.817097465195838 - type: nauc_mrr_at_20_diff1 value: 21.932653013488263 - type: nauc_mrr_at_20_max value: 14.267696655537113 - type: nauc_mrr_at_20_std value: 19.57763771339346 - type: nauc_mrr_at_3_diff1 value: 22.972742704805594 - type: nauc_mrr_at_3_max value: 13.606825043059484 - type: nauc_mrr_at_3_std value: 16.66396056842737 - type: nauc_mrr_at_5_diff1 value: 21.53161336998259 - type: nauc_mrr_at_5_max value: 13.78805281788865 - type: nauc_mrr_at_5_std value: 17.48258179886329 - type: nauc_ndcg_at_1000_diff1 value: 21.510438201814555 - type: nauc_ndcg_at_1000_max value: 15.294388509269332 - type: nauc_ndcg_at_1000_std value: 29.78286871086174 - type: nauc_ndcg_at_100_diff1 value: 21.909993929792968 - type: nauc_ndcg_at_100_max value: 13.806153792247411 - type: nauc_ndcg_at_100_std value: 26.31514822578377 - type: nauc_ndcg_at_10_diff1 value: 21.814688827039973 - type: nauc_ndcg_at_10_max value: 11.74040640724938 - type: nauc_ndcg_at_10_std value: 18.95001631559189 - type: nauc_ndcg_at_1_diff1 value: 28.225624779989793 - type: nauc_ndcg_at_1_max value: 15.731268371038876 - type: nauc_ndcg_at_1_std value: 11.817097465195838 - type: nauc_ndcg_at_20_diff1 value: 22.110803283934597 - type: nauc_ndcg_at_20_max value: 12.533091773854643 - type: nauc_ndcg_at_20_std value: 22.334144596461595 - type: nauc_ndcg_at_3_diff1 value: 24.58550620567529 - type: nauc_ndcg_at_3_max value: 11.495133989089155 - type: nauc_ndcg_at_3_std value: 14.019950240046125 - type: nauc_ndcg_at_5_diff1 value: 22.63932744589355 - type: nauc_ndcg_at_5_max value: 12.210494829061583 - type: nauc_ndcg_at_5_std value: 14.986538103879571 - type: nauc_precision_at_1000_diff1 value: 3.0659507288198222 - type: nauc_precision_at_1000_max value: 17.651636411603363 - type: nauc_precision_at_1000_std value: 46.687885011722905 - type: nauc_precision_at_100_diff1 value: 9.322266673560664 - type: nauc_precision_at_100_max value: 16.453949056266676 - type: nauc_precision_at_100_std value: 41.48389095040357 - type: nauc_precision_at_10_diff1 value: 11.448954567469192 - type: nauc_precision_at_10_max value: 12.803293999157306 - type: nauc_precision_at_10_std value: 30.666747386505875 - type: nauc_precision_at_1_diff1 value: 28.225624779989793 - type: nauc_precision_at_1_max value: 15.731268371038876 - type: nauc_precision_at_1_std value: 11.817097465195838 - type: nauc_precision_at_20_diff1 value: 12.581503085208197 - type: nauc_precision_at_20_max value: 14.622144016052083 - type: nauc_precision_at_20_std value: 36.42962789257147 - type: nauc_precision_at_3_diff1 value: 18.22988167028705 - type: nauc_precision_at_3_max value: 10.434936075066396 - type: nauc_precision_at_3_std value: 20.955643678318854 - type: nauc_precision_at_5_diff1 value: 13.956844187820867 - type: nauc_precision_at_5_max value: 12.934736514145872 - type: nauc_precision_at_5_std value: 24.089671716662217 - type: nauc_recall_at_1000_diff1 value: 12.176293176713001 - type: nauc_recall_at_1000_max value: 16.64676058872791 - type: nauc_recall_at_1000_std value: 48.3590425892625 - type: nauc_recall_at_100_diff1 value: 14.099039177766926 - type: nauc_recall_at_100_max value: 11.566617068317054 - type: nauc_recall_at_100_std value: 35.558422008286676 - type: nauc_recall_at_10_diff1 value: 16.256605283273597 - type: nauc_recall_at_10_max value: 8.008716206119368 - type: nauc_recall_at_10_std value: 22.100120995196388 - type: nauc_recall_at_1_diff1 value: 33.53502479094588 - type: nauc_recall_at_1_max value: 13.878001277352162 - type: nauc_recall_at_1_std value: 5.938822039290204 - type: nauc_recall_at_20_diff1 value: 16.06215529359703 - type: nauc_recall_at_20_max value: 9.19166885836403 - type: nauc_recall_at_20_std value: 28.421149511620403 - type: nauc_recall_at_3_diff1 value: 23.305628146033623 - type: nauc_recall_at_3_max value: 8.33622109524315 - type: nauc_recall_at_3_std value: 13.031246726102937 - type: nauc_recall_at_5_diff1 value: 18.43232483383881 - type: nauc_recall_at_5_max value: 10.155413231665907 - type: nauc_recall_at_5_std value: 15.628732838871349 - type: ndcg_at_1 value: 14.463000000000001 - type: ndcg_at_10 value: 16.171 - type: ndcg_at_100 value: 21.862000000000002 - type: ndcg_at_1000 value: 25.579 - type: ndcg_at_20 value: 18.208 - type: ndcg_at_3 value: 12.65 - type: ndcg_at_5 value: 13.600999999999999 - type: precision_at_1 value: 14.463000000000001 - type: precision_at_10 value: 5.27 - type: precision_at_100 value: 1.13 - type: precision_at_1000 value: 0.181 - type: precision_at_20 value: 3.472 - type: precision_at_3 value: 9.511 - type: precision_at_5 value: 7.257 - type: recall_at_1 value: 6.49 - type: recall_at_10 value: 20.145 - type: recall_at_100 value: 40.491 - type: recall_at_1000 value: 61.954 - type: recall_at_20 value: 26.116 - type: recall_at_3 value: 11.671 - type: recall_at_5 value: 14.350999999999999 - task: type: Retrieval dataset: name: MTEB DBPedia (default) type: mteb/dbpedia config: default split: test revision: c0f706b76e590d620bd6618b3ca8efdd34e2d659 metrics: - type: main_score value: 23.314 - type: map_at_1 value: 4.1930000000000005 - type: map_at_10 value: 9.451 - type: map_at_100 value: 13.345 - type: map_at_1000 value: 14.295 - type: map_at_20 value: 10.95 - type: map_at_3 value: 6.719 - type: map_at_5 value: 7.943 - type: mrr_at_1 value: 41.25 - type: mrr_at_10 value: 52.50029761904762 - type: mrr_at_100 value: 53.04127177817847 - type: mrr_at_1000 value: 53.06001345137005 - type: mrr_at_20 value: 52.82097353648243 - type: mrr_at_3 value: 49.79166666666667 - type: mrr_at_5 value: 51.141666666666666 - type: nauc_map_at_1000_diff1 value: 29.246888751401183 - type: nauc_map_at_1000_max value: 20.992593589579332 - type: nauc_map_at_1000_std value: 27.048244101785983 - type: nauc_map_at_100_diff1 value: 29.396558524731773 - type: nauc_map_at_100_max value: 18.684709362814893 - type: nauc_map_at_100_std value: 24.42843328970449 - type: nauc_map_at_10_diff1 value: 36.612473802810264 - type: nauc_map_at_10_max value: 5.370000439081981 - type: nauc_map_at_10_std value: 10.225231933933902 - type: nauc_map_at_1_diff1 value: 47.0850596735353 - type: nauc_map_at_1_max value: -5.002455326326334 - type: nauc_map_at_1_std value: -2.5065181052556857 - type: nauc_map_at_20_diff1 value: 32.814505431185125 - type: nauc_map_at_20_max value: 10.970159569703512 - type: nauc_map_at_20_std value: 15.568239172475318 - type: nauc_map_at_3_diff1 value: 40.75862158186446 - type: nauc_map_at_3_max value: -0.4213270731304502 - type: nauc_map_at_3_std value: 1.3560087800504952 - type: nauc_map_at_5_diff1 value: 39.866935212237244 - type: nauc_map_at_5_max value: 1.4074079573507723 - type: nauc_map_at_5_std value: 4.733909226917712 - type: nauc_mrr_at_1000_diff1 value: 36.30061840549585 - type: nauc_mrr_at_1000_max value: 31.937578467317252 - type: nauc_mrr_at_1000_std value: 19.888993080628868 - type: nauc_mrr_at_100_diff1 value: 36.316041541899715 - type: nauc_mrr_at_100_max value: 31.93973398065863 - type: nauc_mrr_at_100_std value: 19.882545554953712 - type: nauc_mrr_at_10_diff1 value: 36.39340702961567 - type: nauc_mrr_at_10_max value: 31.939905747726755 - type: nauc_mrr_at_10_std value: 19.790744191534902 - type: nauc_mrr_at_1_diff1 value: 39.41242061375868 - type: nauc_mrr_at_1_max value: 33.06184450800732 - type: nauc_mrr_at_1_std value: 20.58564018034613 - type: nauc_mrr_at_20_diff1 value: 36.274937013283136 - type: nauc_mrr_at_20_max value: 31.92296521916047 - type: nauc_mrr_at_20_std value: 19.707437973673255 - type: nauc_mrr_at_3_diff1 value: 35.12784809764666 - type: nauc_mrr_at_3_max value: 31.44276928443377 - type: nauc_mrr_at_3_std value: 20.001429588478665 - type: nauc_mrr_at_5_diff1 value: 36.06783433185437 - type: nauc_mrr_at_5_max value: 31.301028441740108 - type: nauc_mrr_at_5_std value: 19.911112472798585 - type: nauc_ndcg_at_1000_diff1 value: 30.145005420806648 - type: nauc_ndcg_at_1000_max value: 28.835794569879603 - type: nauc_ndcg_at_1000_std value: 40.94262912650509 - type: nauc_ndcg_at_100_diff1 value: 31.004392045759786 - type: nauc_ndcg_at_100_max value: 22.609883734098876 - type: nauc_ndcg_at_100_std value: 32.45496883796963 - type: nauc_ndcg_at_10_diff1 value: 33.95200380763225 - type: nauc_ndcg_at_10_max value: 22.166120818189874 - type: nauc_ndcg_at_10_std value: 24.31143387763355 - type: nauc_ndcg_at_1_diff1 value: 37.09936078848664 - type: nauc_ndcg_at_1_max value: 23.177643445251952 - type: nauc_ndcg_at_1_std value: 15.644267850000382 - type: nauc_ndcg_at_20_diff1 value: 32.94916178385309 - type: nauc_ndcg_at_20_max value: 20.493565131056947 - type: nauc_ndcg_at_20_std value: 24.71465577127248 - type: nauc_ndcg_at_3_diff1 value: 31.83589559130389 - type: nauc_ndcg_at_3_max value: 25.624482222498973 - type: nauc_ndcg_at_3_std value: 22.398699425588234 - type: nauc_ndcg_at_5_diff1 value: 34.467382509530104 - type: nauc_ndcg_at_5_max value: 23.918417030607156 - type: nauc_ndcg_at_5_std value: 22.52442509626043 - type: nauc_precision_at_1000_diff1 value: 0.36979033722690385 - type: nauc_precision_at_1000_max value: 31.789852778368232 - type: nauc_precision_at_1000_std value: 19.034827076241115 - type: nauc_precision_at_100_diff1 value: 3.8616359733836267 - type: nauc_precision_at_100_max value: 42.24487879027765 - type: nauc_precision_at_100_std value: 36.10418503006711 - type: nauc_precision_at_10_diff1 value: 11.897092853884175 - type: nauc_precision_at_10_max value: 37.25079837960547 - type: nauc_precision_at_10_std value: 33.538882873177194 - type: nauc_precision_at_1_diff1 value: 39.41242061375868 - type: nauc_precision_at_1_max value: 33.06184450800732 - type: nauc_precision_at_1_std value: 20.58564018034613 - type: nauc_precision_at_20_diff1 value: 8.324631452363194 - type: nauc_precision_at_20_max value: 40.095554658189535 - type: nauc_precision_at_20_std value: 35.32627161609494 - type: nauc_precision_at_3_diff1 value: 21.480845765105535 - type: nauc_precision_at_3_max value: 34.404510137957296 - type: nauc_precision_at_3_std value: 25.88702664185785 - type: nauc_precision_at_5_diff1 value: 20.854535571676504 - type: nauc_precision_at_5_max value: 36.5652373884139 - type: nauc_precision_at_5_std value: 29.91773461835973 - type: nauc_recall_at_1000_diff1 value: 17.160938178557515 - type: nauc_recall_at_1000_max value: 18.745040668172734 - type: nauc_recall_at_1000_std value: 47.77808860970952 - type: nauc_recall_at_100_diff1 value: 19.663135115365638 - type: nauc_recall_at_100_max value: 11.5792555702293 - type: nauc_recall_at_100_std value: 29.34316489509291 - type: nauc_recall_at_10_diff1 value: 26.460992406759853 - type: nauc_recall_at_10_max value: -4.0242727391107 - type: nauc_recall_at_10_std value: 6.854471190594813 - type: nauc_recall_at_1_diff1 value: 47.0850596735353 - type: nauc_recall_at_1_max value: -5.002455326326334 - type: nauc_recall_at_1_std value: -2.5065181052556857 - type: nauc_recall_at_20_diff1 value: 20.815109658844243 - type: nauc_recall_at_20_max value: 2.97987494189501 - type: nauc_recall_at_20_std value: 13.735624155054865 - type: nauc_recall_at_3_diff1 value: 34.69852227923236 - type: nauc_recall_at_3_max value: -4.575310462476451 - type: nauc_recall_at_3_std value: -2.0162790496939738 - type: nauc_recall_at_5_diff1 value: 30.573111849961087 - type: nauc_recall_at_5_max value: -6.852781921434314 - type: nauc_recall_at_5_std value: -0.12386515905111516 - type: ndcg_at_1 value: 29.25 - type: ndcg_at_10 value: 23.314 - type: ndcg_at_100 value: 27.039 - type: ndcg_at_1000 value: 33.547 - type: ndcg_at_20 value: 22.908 - type: ndcg_at_3 value: 26.067 - type: ndcg_at_5 value: 24.41 - type: precision_at_1 value: 41.25 - type: precision_at_10 value: 20.8 - type: precision_at_100 value: 6.7250000000000005 - type: precision_at_1000 value: 1.345 - type: precision_at_20 value: 15.437000000000001 - type: precision_at_3 value: 32.917 - type: precision_at_5 value: 26.8 - type: recall_at_1 value: 4.1930000000000005 - type: recall_at_10 value: 14.66 - type: recall_at_100 value: 34.512 - type: recall_at_1000 value: 56.525999999999996 - type: recall_at_20 value: 19.41 - type: recall_at_3 value: 7.993 - type: recall_at_5 value: 10.836 - task: type: Classification dataset: name: MTEB EmotionClassification (default) type: mteb/emotion config: default split: test revision: 4f58c6b202a23cf9a4da393831edf4f9183cad37 metrics: - type: accuracy value: 40.955000000000005 - type: f1 value: 37.3982202431314 - type: f1_weighted value: 42.96026705692032 - type: main_score value: 40.955000000000005 - task: type: Retrieval dataset: name: MTEB FEVER (default) type: mteb/fever config: default split: test revision: bea83ef9e8fb933d90a2f1d5515737465d613e12 metrics: - type: main_score value: 27.395999999999997 - type: map_at_1 value: 14.455000000000002 - type: map_at_10 value: 22.406000000000002 - type: map_at_100 value: 23.513 - type: map_at_1000 value: 23.592 - type: map_at_20 value: 23.049 - type: map_at_3 value: 19.664 - type: map_at_5 value: 21.15 - type: mrr_at_1 value: 15.391539153915392 - type: mrr_at_10 value: 23.74293381719119 - type: mrr_at_100 value: 24.851964008010718 - type: mrr_at_1000 value: 24.92195076388346 - type: mrr_at_20 value: 24.40141458580929 - type: mrr_at_3 value: 20.854585458545802 - type: mrr_at_5 value: 22.427492749274947 - type: nauc_map_at_1000_diff1 value: 20.2790469606637 - type: nauc_map_at_1000_max value: 0.8826386989029893 - type: nauc_map_at_1000_std value: -17.05582143283508 - type: nauc_map_at_100_diff1 value: 20.264642559395448 - type: nauc_map_at_100_max value: 0.8772383427105086 - type: nauc_map_at_100_std value: -17.08005716127888 - type: nauc_map_at_10_diff1 value: 20.42750070187778 - type: nauc_map_at_10_max value: 0.4502295866975133 - type: nauc_map_at_10_std value: -17.695115627220385 - type: nauc_map_at_1_diff1 value: 25.402387231266548 - type: nauc_map_at_1_max value: 1.6424815864829863 - type: nauc_map_at_1_std value: -18.49910519910091 - type: nauc_map_at_20_diff1 value: 20.26688144985526 - type: nauc_map_at_20_max value: 0.7087392636384585 - type: nauc_map_at_20_std value: -17.375993165109765 - type: nauc_map_at_3_diff1 value: 20.765888356127807 - type: nauc_map_at_3_max value: -0.07059496556722329 - type: nauc_map_at_3_std value: -18.419231946592017 - type: nauc_map_at_5_diff1 value: 20.905440664217693 - type: nauc_map_at_5_max value: 0.3584165899677387 - type: nauc_map_at_5_std value: -17.960238537840485 - type: nauc_mrr_at_1000_diff1 value: 19.949709649869035 - type: nauc_mrr_at_1000_max value: 1.0702665531867555 - type: nauc_mrr_at_1000_std value: -17.180355615691344 - type: nauc_mrr_at_100_diff1 value: 19.93346406381515 - type: nauc_mrr_at_100_max value: 1.0727094729339137 - type: nauc_mrr_at_100_std value: -17.19274247521704 - type: nauc_mrr_at_10_diff1 value: 20.024945509689047 - type: nauc_mrr_at_10_max value: 0.6906042096382804 - type: nauc_mrr_at_10_std value: -17.74983173883923 - type: nauc_mrr_at_1_diff1 value: 24.940895245977828 - type: nauc_mrr_at_1_max value: 1.5525079921719245 - type: nauc_mrr_at_1_std value: -18.925250181715437 - type: nauc_mrr_at_20_diff1 value: 19.90257349289708 - type: nauc_mrr_at_20_max value: 0.925879628869193 - type: nauc_mrr_at_20_std value: -17.44500121630808 - type: nauc_mrr_at_3_diff1 value: 20.26325171483128 - type: nauc_mrr_at_3_max value: 0.018857144836432145 - type: nauc_mrr_at_3_std value: -18.432656313618555 - type: nauc_mrr_at_5_diff1 value: 20.445492658201456 - type: nauc_mrr_at_5_max value: 0.5462571868453703 - type: nauc_mrr_at_5_std value: -17.973089271207673 - type: nauc_ndcg_at_1000_diff1 value: 18.71462836235728 - type: nauc_ndcg_at_1000_max value: 2.289345161963916 - type: nauc_ndcg_at_1000_std value: -13.38521466871558 - type: nauc_ndcg_at_100_diff1 value: 18.472939661147674 - type: nauc_ndcg_at_100_max value: 2.3580056588353764 - type: nauc_ndcg_at_100_std value: -13.916898857530924 - type: nauc_ndcg_at_10_diff1 value: 18.90039313740843 - type: nauc_ndcg_at_10_max value: 0.5795427442774991 - type: nauc_ndcg_at_10_std value: -16.988099731346406 - type: nauc_ndcg_at_1_diff1 value: 24.940895245977828 - type: nauc_ndcg_at_1_max value: 1.5525079921719245 - type: nauc_ndcg_at_1_std value: -18.925250181715437 - type: nauc_ndcg_at_20_diff1 value: 18.423913612064656 - type: nauc_ndcg_at_20_max value: 1.4014988518484526 - type: nauc_ndcg_at_20_std value: -15.904079487263198 - type: nauc_ndcg_at_3_diff1 value: 19.52338368556581 - type: nauc_ndcg_at_3_max value: -0.4303556520640412 - type: nauc_ndcg_at_3_std value: -18.355382024512902 - type: nauc_ndcg_at_5_diff1 value: 19.902542455553938 - type: nauc_ndcg_at_5_max value: 0.36819962108400217 - type: nauc_ndcg_at_5_std value: -17.534941004258688 - type: nauc_precision_at_1000_diff1 value: 3.426985078198139 - type: nauc_precision_at_1000_max value: 12.054081264569234 - type: nauc_precision_at_1000_std value: 16.607672572475924 - type: nauc_precision_at_100_diff1 value: 10.247998444310676 - type: nauc_precision_at_100_max value: 10.546078762132966 - type: nauc_precision_at_100_std value: 2.841310743504355 - type: nauc_precision_at_10_diff1 value: 15.008646835319983 - type: nauc_precision_at_10_max value: 1.7859018920625784 - type: nauc_precision_at_10_std value: -15.012353469423603 - type: nauc_precision_at_1_diff1 value: 24.940895245977828 - type: nauc_precision_at_1_max value: 1.5525079921719245 - type: nauc_precision_at_1_std value: -18.925250181715437 - type: nauc_precision_at_20_diff1 value: 12.913065542433785 - type: nauc_precision_at_20_max value: 4.79535972667671 - type: nauc_precision_at_20_std value: -10.959665280880227 - type: nauc_precision_at_3_diff1 value: 16.554046135988255 - type: nauc_precision_at_3_max value: -1.016842829460215 - type: nauc_precision_at_3_std value: -18.30131063437463 - type: nauc_precision_at_5_diff1 value: 17.57056354388634 - type: nauc_precision_at_5_max value: 0.9206722905039284 - type: nauc_precision_at_5_std value: -16.555200700131984 - type: nauc_recall_at_1000_diff1 value: 9.780796880192254 - type: nauc_recall_at_1000_max value: 10.84645095035794 - type: nauc_recall_at_1000_std value: 19.834658134619517 - type: nauc_recall_at_100_diff1 value: 11.918081129843214 - type: nauc_recall_at_100_max value: 8.243549025564546 - type: nauc_recall_at_100_std value: 1.2262445969338627 - type: nauc_recall_at_10_diff1 value: 14.974454983131913 - type: nauc_recall_at_10_max value: 0.5959180392097884 - type: nauc_recall_at_10_std value: -14.540087182122505 - type: nauc_recall_at_1_diff1 value: 25.402387231266548 - type: nauc_recall_at_1_max value: 1.6424815864829863 - type: nauc_recall_at_1_std value: -18.49910519910091 - type: nauc_recall_at_20_diff1 value: 13.228115653538545 - type: nauc_recall_at_20_max value: 3.1832213036031716 - type: nauc_recall_at_20_std value: -10.777691355829218 - type: nauc_recall_at_3_diff1 value: 16.842366692783443 - type: nauc_recall_at_3_max value: -1.2399637926309348 - type: nauc_recall_at_3_std value: -18.035740863838644 - type: nauc_recall_at_5_diff1 value: 17.647020591600743 - type: nauc_recall_at_5_max value: 0.2358327920644874 - type: nauc_recall_at_5_std value: -16.09390361188663 - type: ndcg_at_1 value: 15.392 - type: ndcg_at_10 value: 27.395999999999997 - type: ndcg_at_100 value: 33.0 - type: ndcg_at_1000 value: 35.163 - type: ndcg_at_20 value: 29.720000000000002 - type: ndcg_at_3 value: 21.666 - type: ndcg_at_5 value: 24.352999999999998 - type: precision_at_1 value: 15.392 - type: precision_at_10 value: 4.539 - type: precision_at_100 value: 0.752 - type: precision_at_1000 value: 0.096 - type: precision_at_20 value: 2.7720000000000002 - type: precision_at_3 value: 9.415999999999999 - type: precision_at_5 value: 7.051 - type: recall_at_1 value: 14.455000000000002 - type: recall_at_10 value: 41.898 - type: recall_at_100 value: 67.97 - type: recall_at_1000 value: 84.625 - type: recall_at_20 value: 50.829 - type: recall_at_3 value: 26.262999999999998 - type: recall_at_5 value: 32.708 - task: type: Retrieval dataset: name: MTEB FiQA2018 (default) type: mteb/fiqa config: default split: test revision: 27a168819829fe9bcd655c2df245fb19452e8e06 metrics: - type: main_score value: 11.824 - type: map_at_1 value: 5.218 - type: map_at_10 value: 8.517 - type: map_at_100 value: 9.504 - type: map_at_1000 value: 9.689 - type: map_at_20 value: 9.031 - type: map_at_3 value: 7.319000000000001 - type: map_at_5 value: 8.04 - type: mrr_at_1 value: 10.339506172839506 - type: mrr_at_10 value: 15.349059376837152 - type: mrr_at_100 value: 16.371061402440464 - type: mrr_at_1000 value: 16.477656097290016 - type: mrr_at_20 value: 15.873015674142271 - type: mrr_at_3 value: 13.708847736625513 - type: mrr_at_5 value: 14.627057613168724 - type: nauc_map_at_1000_diff1 value: 27.174948207247112 - type: nauc_map_at_1000_max value: -4.311649899079759 - type: nauc_map_at_1000_std value: -0.38160090900318006 - type: nauc_map_at_100_diff1 value: 27.20326811103734 - type: nauc_map_at_100_max value: -4.543211866111751 - type: nauc_map_at_100_std value: -0.470958286287458 - type: nauc_map_at_10_diff1 value: 26.80055164419936 - type: nauc_map_at_10_max value: -4.700092406624881 - type: nauc_map_at_10_std value: -1.6579549226632215 - type: nauc_map_at_1_diff1 value: 36.99801644748919 - type: nauc_map_at_1_max value: -4.735306328466845 - type: nauc_map_at_1_std value: -1.5185777718681428 - type: nauc_map_at_20_diff1 value: 27.191899496821808 - type: nauc_map_at_20_max value: -4.789787607757763 - type: nauc_map_at_20_std value: -0.37380992660144197 - type: nauc_map_at_3_diff1 value: 29.824570230088295 - type: nauc_map_at_3_max value: -4.7671276715969 - type: nauc_map_at_3_std value: -2.0071047077213735 - type: nauc_map_at_5_diff1 value: 27.113522744317482 - type: nauc_map_at_5_max value: -5.899491935203268 - type: nauc_map_at_5_std value: -1.6940652864715724 - type: nauc_mrr_at_1000_diff1 value: 25.11585795715029 - type: nauc_mrr_at_1000_max value: 1.263060510525359 - type: nauc_mrr_at_1000_std value: -5.651601210106509 - type: nauc_mrr_at_100_diff1 value: 25.09849428067031 - type: nauc_mrr_at_100_max value: 1.241067672315683 - type: nauc_mrr_at_100_std value: -5.651970573797591 - type: nauc_mrr_at_10_diff1 value: 25.09970797279417 - type: nauc_mrr_at_10_max value: 1.1160065013935783 - type: nauc_mrr_at_10_std value: -5.883816075117658 - type: nauc_mrr_at_1_diff1 value: 35.87989567542961 - type: nauc_mrr_at_1_max value: -1.708274202076274 - type: nauc_mrr_at_1_std value: -5.429683562346062 - type: nauc_mrr_at_20_diff1 value: 25.161658193108927 - type: nauc_mrr_at_20_max value: 0.8942408750943778 - type: nauc_mrr_at_20_std value: -5.463735477362075 - type: nauc_mrr_at_3_diff1 value: 27.16757629929729 - type: nauc_mrr_at_3_max value: 1.1880945252092139 - type: nauc_mrr_at_3_std value: -5.877922676220352 - type: nauc_mrr_at_5_diff1 value: 25.12071281597739 - type: nauc_mrr_at_5_max value: 0.2569960461323651 - type: nauc_mrr_at_5_std value: -6.005515013860767 - type: nauc_ndcg_at_1000_diff1 value: 23.978466543769482 - type: nauc_ndcg_at_1000_max value: 2.805359295752355 - type: nauc_ndcg_at_1000_std value: 1.0616890270012553 - type: nauc_ndcg_at_100_diff1 value: 23.99617912744036 - type: nauc_ndcg_at_100_max value: -0.7181236112221313 - type: nauc_ndcg_at_100_std value: -0.31486558248308577 - type: nauc_ndcg_at_10_diff1 value: 23.157725666617054 - type: nauc_ndcg_at_10_max value: -2.253117805034787 - type: nauc_ndcg_at_10_std value: -2.5314388467670876 - type: nauc_ndcg_at_1_diff1 value: 35.87989567542961 - type: nauc_ndcg_at_1_max value: -1.708274202076274 - type: nauc_ndcg_at_1_std value: -5.429683562346062 - type: nauc_ndcg_at_20_diff1 value: 23.94491858984704 - type: nauc_ndcg_at_20_max value: -2.9102672551128395 - type: nauc_ndcg_at_20_std value: 0.47314952026471774 - type: nauc_ndcg_at_3_diff1 value: 26.861761378919986 - type: nauc_ndcg_at_3_max value: -1.4394121081704851 - type: nauc_ndcg_at_3_std value: -4.314220567441007 - type: nauc_ndcg_at_5_diff1 value: 23.37960242039838 - type: nauc_ndcg_at_5_max value: -4.179520743567826 - type: nauc_ndcg_at_5_std value: -3.3461517847684927 - type: nauc_precision_at_1000_diff1 value: 6.291560162072213 - type: nauc_precision_at_1000_max value: 25.654664482767476 - type: nauc_precision_at_1000_std value: -4.225784971712635 - type: nauc_precision_at_100_diff1 value: 15.766457101276988 - type: nauc_precision_at_100_max value: 13.799257676950424 - type: nauc_precision_at_100_std value: -2.6687074263027637 - type: nauc_precision_at_10_diff1 value: 16.154300406544458 - type: nauc_precision_at_10_max value: 3.99046730755771 - type: nauc_precision_at_10_std value: -5.320813807322365 - type: nauc_precision_at_1_diff1 value: 35.87989567542961 - type: nauc_precision_at_1_max value: -1.708274202076274 - type: nauc_precision_at_1_std value: -5.429683562346062 - type: nauc_precision_at_20_diff1 value: 17.072700429792718 - type: nauc_precision_at_20_max value: 3.7459960911748342 - type: nauc_precision_at_20_std value: 2.4170643350876366 - type: nauc_precision_at_3_diff1 value: 21.920250469492693 - type: nauc_precision_at_3_max value: 1.288094387802318 - type: nauc_precision_at_3_std value: -6.971791140710122 - type: nauc_precision_at_5_diff1 value: 15.607428903096954 - type: nauc_precision_at_5_max value: -1.6787818995588515 - type: nauc_precision_at_5_std value: -4.9868070952519705 - type: nauc_recall_at_1000_diff1 value: 15.881415973787488 - type: nauc_recall_at_1000_max value: 11.992945268618186 - type: nauc_recall_at_1000_std value: 14.69434950594517 - type: nauc_recall_at_100_diff1 value: 16.51233439080111 - type: nauc_recall_at_100_max value: 0.7034983345680653 - type: nauc_recall_at_100_std value: 3.795850397771235 - type: nauc_recall_at_10_diff1 value: 15.855812500754347 - type: nauc_recall_at_10_max value: -2.2964819259752907 - type: nauc_recall_at_10_std value: -0.5884784023926211 - type: nauc_recall_at_1_diff1 value: 36.99801644748919 - type: nauc_recall_at_1_max value: -4.735306328466845 - type: nauc_recall_at_1_std value: -1.5185777718681428 - type: nauc_recall_at_20_diff1 value: 17.088583717871973 - type: nauc_recall_at_20_max value: -3.7389792709745606 - type: nauc_recall_at_20_std value: 5.575038898074453 - type: nauc_recall_at_3_diff1 value: 22.80760100670119 - type: nauc_recall_at_3_max value: -4.354868233467814 - type: nauc_recall_at_3_std value: -1.1976882420463235 - type: nauc_recall_at_5_diff1 value: 16.357277072848227 - type: nauc_recall_at_5_max value: -7.564166391276693 - type: nauc_recall_at_5_std value: -1.6477511903507516 - type: ndcg_at_1 value: 10.34 - type: ndcg_at_10 value: 11.824 - type: ndcg_at_100 value: 17.009 - type: ndcg_at_1000 value: 21.413 - type: ndcg_at_20 value: 13.569999999999999 - type: ndcg_at_3 value: 10.043000000000001 - type: ndcg_at_5 value: 10.813 - type: precision_at_1 value: 10.34 - type: precision_at_10 value: 3.3329999999999997 - type: precision_at_100 value: 0.823 - type: precision_at_1000 value: 0.158 - type: precision_at_20 value: 2.253 - type: precision_at_3 value: 6.584 - type: precision_at_5 value: 5.154 - type: recall_at_1 value: 5.218 - type: recall_at_10 value: 14.967 - type: recall_at_100 value: 35.966 - type: recall_at_1000 value: 63.283 - type: recall_at_20 value: 20.888 - type: recall_at_3 value: 9.497 - type: recall_at_5 value: 12.062000000000001 - task: type: Retrieval dataset: name: MTEB HotpotQA (default) type: mteb/hotpotqa config: default split: test revision: ab518f4d6fcca38d87c25209f94beba119d02014 metrics: - type: main_score value: 33.601 - type: map_at_1 value: 19.055 - type: map_at_10 value: 26.378 - type: map_at_100 value: 27.247 - type: map_at_1000 value: 27.339999999999996 - type: map_at_20 value: 26.85 - type: map_at_3 value: 24.363 - type: map_at_5 value: 25.531 - type: mrr_at_1 value: 38.12288993923025 - type: mrr_at_10 value: 45.347303945210896 - type: mrr_at_100 value: 45.98374132303968 - type: mrr_at_1000 value: 46.03787042497575 - type: mrr_at_20 value: 45.72374246944037 - type: mrr_at_3 value: 43.47062795408526 - type: mrr_at_5 value: 44.59216745442278 - type: nauc_map_at_1000_diff1 value: 49.61029676574756 - type: nauc_map_at_1000_max value: 14.306274178948069 - type: nauc_map_at_1000_std value: 12.828823677673965 - type: nauc_map_at_100_diff1 value: 49.61757758727525 - type: nauc_map_at_100_max value: 14.28014014417826 - type: nauc_map_at_100_std value: 12.771713251916466 - type: nauc_map_at_10_diff1 value: 49.80331695458256 - type: nauc_map_at_10_max value: 14.38736362353793 - type: nauc_map_at_10_std value: 11.837285174819034 - type: nauc_map_at_1_diff1 value: 60.89518556432222 - type: nauc_map_at_1_max value: 16.869499979966342 - type: nauc_map_at_1_std value: 5.873762883474129 - type: nauc_map_at_20_diff1 value: 49.71622258645394 - type: nauc_map_at_20_max value: 14.282324513024822 - type: nauc_map_at_20_std value: 12.316040359062425 - type: nauc_map_at_3_diff1 value: 51.594969142787484 - type: nauc_map_at_3_max value: 15.065531124955248 - type: nauc_map_at_3_std value: 10.096442801776883 - type: nauc_map_at_5_diff1 value: 50.371800688950955 - type: nauc_map_at_5_max value: 14.767527550172844 - type: nauc_map_at_5_std value: 10.988483248294253 - type: nauc_mrr_at_1000_diff1 value: 57.30221544434927 - type: nauc_mrr_at_1000_max value: 16.017793859581293 - type: nauc_mrr_at_1000_std value: 9.906436579683318 - type: nauc_mrr_at_100_diff1 value: 57.29432417111391 - type: nauc_mrr_at_100_max value: 16.011699462071864 - type: nauc_mrr_at_100_std value: 9.917014518140116 - type: nauc_mrr_at_10_diff1 value: 57.33765612722791 - type: nauc_mrr_at_10_max value: 16.118319101536276 - type: nauc_mrr_at_10_std value: 9.622460440078608 - type: nauc_mrr_at_1_diff1 value: 60.85369395522825 - type: nauc_mrr_at_1_max value: 16.933775694516058 - type: nauc_mrr_at_1_std value: 5.894558768949606 - type: nauc_mrr_at_20_diff1 value: 57.31592299977897 - type: nauc_mrr_at_20_max value: 16.031475898617764 - type: nauc_mrr_at_20_std value: 9.843331976335788 - type: nauc_mrr_at_3_diff1 value: 57.6124650775418 - type: nauc_mrr_at_3_max value: 16.36290710838045 - type: nauc_mrr_at_3_std value: 8.780577988221042 - type: nauc_mrr_at_5_diff1 value: 57.403485675292984 - type: nauc_mrr_at_5_max value: 16.161063703023103 - type: nauc_mrr_at_5_std value: 9.20219673432289 - type: nauc_ndcg_at_1000_diff1 value: 49.07891077830242 - type: nauc_ndcg_at_1000_max value: 14.08537399855222 - type: nauc_ndcg_at_1000_std value: 17.569960709164604 - type: nauc_ndcg_at_100_diff1 value: 49.14669859772792 - type: nauc_ndcg_at_100_max value: 13.638325073892574 - type: nauc_ndcg_at_100_std value: 16.723458541804803 - type: nauc_ndcg_at_10_diff1 value: 50.1392784710198 - type: nauc_ndcg_at_10_max value: 14.185608590705648 - type: nauc_ndcg_at_10_std value: 13.288189091203812 - type: nauc_ndcg_at_1_diff1 value: 60.89518556432222 - type: nauc_ndcg_at_1_max value: 16.869499979966342 - type: nauc_ndcg_at_1_std value: 5.873762883474129 - type: nauc_ndcg_at_20_diff1 value: 49.797268454104085 - type: nauc_ndcg_at_20_max value: 13.806890902979502 - type: nauc_ndcg_at_20_std value: 14.563147882771915 - type: nauc_ndcg_at_3_diff1 value: 52.47918114895865 - type: nauc_ndcg_at_3_max value: 15.238760898280686 - type: nauc_ndcg_at_3_std value: 10.514287406793875 - type: nauc_ndcg_at_5_diff1 value: 50.99120023315429 - type: nauc_ndcg_at_5_max value: 14.745429496324105 - type: nauc_ndcg_at_5_std value: 11.695862264070552 - type: nauc_precision_at_1000_diff1 value: 22.60878662068159 - type: nauc_precision_at_1000_max value: 8.207557591294677 - type: nauc_precision_at_1000_std value: 35.83506280458338 - type: nauc_precision_at_100_diff1 value: 29.330460503143463 - type: nauc_precision_at_100_max value: 7.081726910359633 - type: nauc_precision_at_100_std value: 29.048691055349412 - type: nauc_precision_at_10_diff1 value: 39.486573373792034 - type: nauc_precision_at_10_max value: 10.785943661786202 - type: nauc_precision_at_10_std value: 18.552704575254396 - type: nauc_precision_at_1_diff1 value: 60.89518556432222 - type: nauc_precision_at_1_max value: 16.869499979966342 - type: nauc_precision_at_1_std value: 5.873762883474129 - type: nauc_precision_at_20_diff1 value: 36.26651772775864 - type: nauc_precision_at_20_max value: 8.997813417199513 - type: nauc_precision_at_20_std value: 21.686796650707645 - type: nauc_precision_at_3_diff1 value: 47.335106889322965 - type: nauc_precision_at_3_max value: 14.069889708331901 - type: nauc_precision_at_3_std value: 12.964427322213885 - type: nauc_precision_at_5_diff1 value: 42.926481319467364 - type: nauc_precision_at_5_max value: 12.611254884223259 - type: nauc_precision_at_5_std value: 14.993681046665309 - type: nauc_recall_at_1000_diff1 value: 22.608786620681652 - type: nauc_recall_at_1000_max value: 8.207557591294764 - type: nauc_recall_at_1000_std value: 35.83506280458342 - type: nauc_recall_at_100_diff1 value: 29.330460503143378 - type: nauc_recall_at_100_max value: 7.081726910359586 - type: nauc_recall_at_100_std value: 29.04869105534933 - type: nauc_recall_at_10_diff1 value: 39.486573373792055 - type: nauc_recall_at_10_max value: 10.7859436617862 - type: nauc_recall_at_10_std value: 18.55270457525437 - type: nauc_recall_at_1_diff1 value: 60.89518556432222 - type: nauc_recall_at_1_max value: 16.869499979966342 - type: nauc_recall_at_1_std value: 5.873762883474129 - type: nauc_recall_at_20_diff1 value: 36.266517727758604 - type: nauc_recall_at_20_max value: 8.997813417199502 - type: nauc_recall_at_20_std value: 21.68679665070765 - type: nauc_recall_at_3_diff1 value: 47.33510688932295 - type: nauc_recall_at_3_max value: 14.069889708331843 - type: nauc_recall_at_3_std value: 12.964427322213865 - type: nauc_recall_at_5_diff1 value: 42.926481319467385 - type: nauc_recall_at_5_max value: 12.611254884223289 - type: nauc_recall_at_5_std value: 14.993681046665271 - type: ndcg_at_1 value: 38.109 - type: ndcg_at_10 value: 33.601 - type: ndcg_at_100 value: 37.509 - type: ndcg_at_1000 value: 39.778999999999996 - type: ndcg_at_20 value: 35.081 - type: ndcg_at_3 value: 29.865000000000002 - type: ndcg_at_5 value: 31.772 - type: precision_at_1 value: 38.109 - type: precision_at_10 value: 7.3069999999999995 - type: precision_at_100 value: 1.043 - type: precision_at_1000 value: 0.135 - type: precision_at_20 value: 4.132000000000001 - type: precision_at_3 value: 18.758 - type: precision_at_5 value: 12.762 - type: recall_at_1 value: 19.055 - type: recall_at_10 value: 36.536 - type: recall_at_100 value: 52.14 - type: recall_at_1000 value: 67.292 - type: recall_at_20 value: 41.317 - type: recall_at_3 value: 28.136 - type: recall_at_5 value: 31.904 - task: type: Classification dataset: name: MTEB ImdbClassification (default) type: mteb/imdb config: default split: test revision: 3d86128a09e091d6018b6d26cad27f2739fc2db7 metrics: - type: accuracy value: 66.998 - type: ap value: 61.60195055467346 - type: ap_weighted value: 61.60195055467346 - type: f1 value: 66.64801043272058 - type: f1_weighted value: 66.64801043272058 - type: main_score value: 66.998 - task: type: Retrieval dataset: name: MTEB MSMARCO (default) type: mteb/msmarco config: default split: test revision: c5a29a104738b98a9e76336939199e264163d4a0 metrics: - type: main_score value: 32.273 - type: map_at_1 value: 1.26 - type: map_at_10 value: 4.999 - type: map_at_100 value: 15.226 - type: map_at_1000 value: 19.525000000000002 - type: map_at_20 value: 7.811999999999999 - type: map_at_3 value: 2.2399999999999998 - type: map_at_5 value: 2.979 - type: mrr_at_1 value: 55.81395348837209 - type: mrr_at_10 value: 66.2984496124031 - type: mrr_at_100 value: 66.6375968992248 - type: mrr_at_1000 value: 66.65521494009866 - type: mrr_at_20 value: 66.4922480620155 - type: mrr_at_3 value: 63.565891472868216 - type: mrr_at_5 value: 65.7751937984496 - type: nauc_map_at_1000_diff1 value: -7.2384335338206025 - type: nauc_map_at_1000_max value: 27.86853077487615 - type: nauc_map_at_1000_std value: 49.70595401843686 - type: nauc_map_at_100_diff1 value: -7.72485953952569 - type: nauc_map_at_100_max value: 19.20475909334588 - type: nauc_map_at_100_std value: 44.76591945108152 - type: nauc_map_at_10_diff1 value: -10.826583560194884 - type: nauc_map_at_10_max value: 2.650169169054273 - type: nauc_map_at_10_std value: 20.28538688095811 - type: nauc_map_at_1_diff1 value: -9.773620541845721 - type: nauc_map_at_1_max value: -20.826181521207314 - type: nauc_map_at_1_std value: -4.296956852843888 - type: nauc_map_at_20_diff1 value: -8.87167940389222 - type: nauc_map_at_20_max value: 2.10698700232561 - type: nauc_map_at_20_std value: 27.745198156102624 - type: nauc_map_at_3_diff1 value: -7.538911646351858 - type: nauc_map_at_3_max value: -14.478056522087797 - type: nauc_map_at_3_std value: 8.250414681220754 - type: nauc_map_at_5_diff1 value: -13.487161747832765 - type: nauc_map_at_5_max value: -8.599591544293332 - type: nauc_map_at_5_std value: 11.302372902416588 - type: nauc_mrr_at_1000_diff1 value: -2.6094212196500024 - type: nauc_mrr_at_1000_max value: 0.5699375390842714 - type: nauc_mrr_at_1000_std value: 11.718583899813463 - type: nauc_mrr_at_100_diff1 value: -2.541354648659204 - type: nauc_mrr_at_100_max value: 0.6358950634973857 - type: nauc_mrr_at_100_std value: 11.698858347680059 - type: nauc_mrr_at_10_diff1 value: -2.389722705210953 - type: nauc_mrr_at_10_max value: 1.2913990554540207 - type: nauc_mrr_at_10_std value: 11.723807899071335 - type: nauc_mrr_at_1_diff1 value: -14.649463318849538 - type: nauc_mrr_at_1_max value: 4.896933275281175 - type: nauc_mrr_at_1_std value: 12.335386931120064 - type: nauc_mrr_at_20_diff1 value: -2.8062407786548187 - type: nauc_mrr_at_20_max value: 0.6676774553193409 - type: nauc_mrr_at_20_std value: 11.92870604036784 - type: nauc_mrr_at_3_diff1 value: 1.901943928764881 - type: nauc_mrr_at_3_max value: -1.8167841832954255 - type: nauc_mrr_at_3_std value: 10.706350197135121 - type: nauc_mrr_at_5_diff1 value: -2.3869587133856207 - type: nauc_mrr_at_5_max value: 0.8324715604145814 - type: nauc_mrr_at_5_std value: 10.794575823199688 - type: nauc_ndcg_at_1000_diff1 value: -5.093074373086338 - type: nauc_ndcg_at_1000_max value: 20.21533175919078 - type: nauc_ndcg_at_1000_std value: 51.83170866559663 - type: nauc_ndcg_at_100_diff1 value: -3.0055499108417907 - type: nauc_ndcg_at_100_max value: 20.495514159769606 - type: nauc_ndcg_at_100_std value: 42.28335606844607 - type: nauc_ndcg_at_10_diff1 value: -9.31362813850403 - type: nauc_ndcg_at_10_max value: 11.639829087077636 - type: nauc_ndcg_at_10_std value: 23.700161245825203 - type: nauc_ndcg_at_1_diff1 value: -15.333044223926 - type: nauc_ndcg_at_1_max value: -3.07500235529693 - type: nauc_ndcg_at_1_std value: 4.065256907415142 - type: nauc_ndcg_at_20_diff1 value: -5.905354138570243 - type: nauc_ndcg_at_20_max value: 12.085986560371456 - type: nauc_ndcg_at_20_std value: 25.28689139221832 - type: nauc_ndcg_at_3_diff1 value: -10.415519964418783 - type: nauc_ndcg_at_3_max value: 1.6178148316801775 - type: nauc_ndcg_at_3_std value: 13.723458782792381 - type: nauc_ndcg_at_5_diff1 value: -13.702228447081 - type: nauc_ndcg_at_5_max value: 4.662322121311934 - type: nauc_ndcg_at_5_std value: 12.12051616033404 - type: nauc_precision_at_1000_diff1 value: -6.359048701961788 - type: nauc_precision_at_1000_max value: 37.759780329316115 - type: nauc_precision_at_1000_std value: 32.535116564859784 - type: nauc_precision_at_100_diff1 value: -5.409853919421071 - type: nauc_precision_at_100_max value: 40.56178678248257 - type: nauc_precision_at_100_std value: 35.03605883167913 - type: nauc_precision_at_10_diff1 value: -4.54973873421614 - type: nauc_precision_at_10_max value: 31.664713405424006 - type: nauc_precision_at_10_std value: 28.0962707062409 - type: nauc_precision_at_1_diff1 value: -14.649463318849538 - type: nauc_precision_at_1_max value: 4.896933275281175 - type: nauc_precision_at_1_std value: 12.335386931120064 - type: nauc_precision_at_20_diff1 value: -6.368495736631144 - type: nauc_precision_at_20_max value: 25.11537831434175 - type: nauc_precision_at_20_std value: 27.15664396037663 - type: nauc_precision_at_3_diff1 value: -2.9130836707474783 - type: nauc_precision_at_3_max value: 15.82702993935027 - type: nauc_precision_at_3_std value: 17.54204029259862 - type: nauc_precision_at_5_diff1 value: -10.543537476043058 - type: nauc_precision_at_5_max value: 21.81864230117124 - type: nauc_precision_at_5_std value: 14.995823609759157 - type: nauc_recall_at_1000_diff1 value: 0.04484840659316002 - type: nauc_recall_at_1000_max value: 18.57758900605008 - type: nauc_recall_at_1000_std value: 56.51584043897145 - type: nauc_recall_at_100_diff1 value: -2.6465347405870925 - type: nauc_recall_at_100_max value: 10.068426683331985 - type: nauc_recall_at_100_std value: 47.9088546197608 - type: nauc_recall_at_10_diff1 value: -0.735486088869469 - type: nauc_recall_at_10_max value: -3.023335649004929 - type: nauc_recall_at_10_std value: 21.523714385342487 - type: nauc_recall_at_1_diff1 value: -9.773620541845721 - type: nauc_recall_at_1_max value: -20.826181521207314 - type: nauc_recall_at_1_std value: -4.296956852843888 - type: nauc_recall_at_20_diff1 value: 0.06451530856365675 - type: nauc_recall_at_20_max value: -8.796315209894445 - type: nauc_recall_at_20_std value: 25.022537467009226 - type: nauc_recall_at_3_diff1 value: 0.5965893705640235 - type: nauc_recall_at_3_max value: -18.544318702564468 - type: nauc_recall_at_3_std value: 6.682746927691809 - type: nauc_recall_at_5_diff1 value: -8.288707721672186 - type: nauc_recall_at_5_max value: -13.446678885535496 - type: nauc_recall_at_5_std value: 9.069991549746273 - type: ndcg_at_1 value: 37.984 - type: ndcg_at_10 value: 32.273 - type: ndcg_at_100 value: 33.036 - type: ndcg_at_1000 value: 42.653 - type: ndcg_at_20 value: 31.826 - type: ndcg_at_3 value: 33.64 - type: ndcg_at_5 value: 32.012 - type: precision_at_1 value: 55.814 - type: precision_at_10 value: 42.791000000000004 - type: precision_at_100 value: 23.233 - type: precision_at_1000 value: 4.986 - type: precision_at_20 value: 38.836999999999996 - type: precision_at_3 value: 45.736 - type: precision_at_5 value: 43.721 - type: recall_at_1 value: 1.26 - type: recall_at_10 value: 6.729 - type: recall_at_100 value: 28.89 - type: recall_at_1000 value: 55.614 - type: recall_at_20 value: 11.544 - type: recall_at_3 value: 2.5250000000000004 - type: recall_at_5 value: 3.459 - task: type: Classification dataset: name: MTEB MTOPDomainClassification (en) type: mteb/mtop_domain config: en split: test revision: d80d48c1eb48d3562165c59d59d0034df9fff0bf metrics: - type: accuracy value: 84.31828545371637 - type: f1 value: 83.38736418641668 - type: f1_weighted value: 84.43991713227709 - type: main_score value: 84.31828545371637 - task: type: Classification dataset: name: MTEB MTOPIntentClassification (en) type: mteb/mtop_intent config: en split: test revision: ae001d0e6b1228650b7bd1c2c65fb50ad11a8aba metrics: - type: accuracy value: 59.261285909712726 - type: f1 value: 43.335913061506425 - type: f1_weighted value: 63.36236251957159 - type: main_score value: 59.261285909712726 - task: type: Classification dataset: name: MTEB MassiveIntentClassification (en) type: mteb/amazon_massive_intent config: en split: test revision: 4672e20407010da34463acc759c162ca9734bca6 metrics: - type: accuracy value: 61.160053799596504 - type: f1 value: 59.94993764150179 - type: f1_weighted value: 61.52985711688419 - type: main_score value: 61.160053799596504 - task: type: Classification dataset: name: MTEB MassiveScenarioClassification (en) type: mteb/amazon_massive_scenario config: en split: test revision: fad2c6e8459f9e1c45d9315f4953d921437d70f8 metrics: - type: accuracy value: 65.94485541358439 - type: f1 value: 65.59924532700467 - type: f1_weighted value: 66.16311668638237 - type: main_score value: 65.94485541358439 - task: type: Clustering dataset: name: MTEB MedrxivClusteringP2P (default) type: mteb/medrxiv-clustering-p2p config: default split: test revision: e7a26af6f3ae46b30dde8737f02c07b1505bcc73 metrics: - type: main_score value: 27.595368037128686 - type: v_measure value: 27.595368037128686 - type: v_measure_std value: 1.424950486137983 - task: type: Clustering dataset: name: MTEB MedrxivClusteringS2S (default) type: mteb/medrxiv-clustering-s2s config: default split: test revision: 35191c8c0dca72d8ff3efcd72aa802307d469663 metrics: - type: main_score value: 23.37909671745749 - type: v_measure value: 23.37909671745749 - type: v_measure_std value: 1.365434600850458 - task: type: Reranking dataset: name: MTEB MindSmallReranking (default) type: mteb/mind_small config: default split: test revision: 59042f120c80e8afa9cdbb224f67076cec0fc9a7 metrics: - type: main_score value: 28.832500600750205 - type: map value: 28.832500600750205 - type: mrr value: 29.45215573894843 - type: nAUC_map_diff1 value: 15.070236209014364 - type: nAUC_map_max value: -28.95562119862306 - type: nAUC_map_std value: -7.8946917703248385 - type: nAUC_mrr_diff1 value: 14.21025189133838 - type: nAUC_mrr_max value: -22.81583363058566 - type: nAUC_mrr_std value: -6.04422773844616 - task: type: Retrieval dataset: name: MTEB NFCorpus (default) type: mteb/nfcorpus config: default split: test revision: ec0fa4fe99da2ff19ca1214b7966684033a58814 metrics: - type: main_score value: 23.152 - type: map_at_1 value: 4.041 - type: map_at_10 value: 7.568 - type: map_at_100 value: 9.533 - type: map_at_1000 value: 10.712000000000002 - type: map_at_20 value: 8.334 - type: map_at_3 value: 5.872 - type: map_at_5 value: 6.679 - type: mrr_at_1 value: 30.959752321981426 - type: mrr_at_10 value: 40.11794191360757 - type: mrr_at_100 value: 40.96415295410306 - type: mrr_at_1000 value: 41.01344555816548 - type: mrr_at_20 value: 40.61172778764487 - type: mrr_at_3 value: 37.358101135190914 - type: mrr_at_5 value: 38.95252837977296 - type: nauc_map_at_1000_diff1 value: 33.54119008935543 - type: nauc_map_at_1000_max value: 11.521923728847444 - type: nauc_map_at_1000_std value: 5.7915311031695005 - type: nauc_map_at_100_diff1 value: 35.34097867168893 - type: nauc_map_at_100_max value: 10.293000640541555 - type: nauc_map_at_100_std value: 1.6114123867702002 - type: nauc_map_at_10_diff1 value: 38.2230641089626 - type: nauc_map_at_10_max value: 5.823159065588302 - type: nauc_map_at_10_std value: -4.0377551934635125 - type: nauc_map_at_1_diff1 value: 43.866715304866595 - type: nauc_map_at_1_max value: 0.5010754384304841 - type: nauc_map_at_1_std value: -9.927174446247756 - type: nauc_map_at_20_diff1 value: 36.815049468511454 - type: nauc_map_at_20_max value: 8.252195357694598 - type: nauc_map_at_20_std value: -2.086668040926134 - type: nauc_map_at_3_diff1 value: 42.40019878591813 - type: nauc_map_at_3_max value: 4.410646237192043 - type: nauc_map_at_3_std value: -7.439403782153504 - type: nauc_map_at_5_diff1 value: 40.02451834303521 - type: nauc_map_at_5_max value: 4.135859554033173 - type: nauc_map_at_5_std value: -6.456961843430078 - type: nauc_mrr_at_1000_diff1 value: 36.7884723808441 - type: nauc_mrr_at_1000_max value: 20.848427790316478 - type: nauc_mrr_at_1000_std value: 19.562761101041676 - type: nauc_mrr_at_100_diff1 value: 36.75109367086827 - type: nauc_mrr_at_100_max value: 20.871949905852215 - type: nauc_mrr_at_100_std value: 19.541351397172342 - type: nauc_mrr_at_10_diff1 value: 36.840624198794956 - type: nauc_mrr_at_10_max value: 20.61066223363542 - type: nauc_mrr_at_10_std value: 19.438339856525495 - type: nauc_mrr_at_1_diff1 value: 40.12320713918803 - type: nauc_mrr_at_1_max value: 16.666642505956347 - type: nauc_mrr_at_1_std value: 14.562805568383885 - type: nauc_mrr_at_20_diff1 value: 36.64580049384978 - type: nauc_mrr_at_20_max value: 20.8197921402343 - type: nauc_mrr_at_20_std value: 19.734791294250666 - type: nauc_mrr_at_3_diff1 value: 36.55294783551202 - type: nauc_mrr_at_3_max value: 18.31132758585394 - type: nauc_mrr_at_3_std value: 18.218759713045383 - type: nauc_mrr_at_5_diff1 value: 36.479761476413756 - type: nauc_mrr_at_5_max value: 19.3318833719133 - type: nauc_mrr_at_5_std value: 18.704945221576054 - type: nauc_ndcg_at_1000_diff1 value: 30.42463887019783 - type: nauc_ndcg_at_1000_max value: 25.571139022199485 - type: nauc_ndcg_at_1000_std value: 19.145905780063103 - type: nauc_ndcg_at_100_diff1 value: 29.54893059977665 - type: nauc_ndcg_at_100_max value: 17.931888215362786 - type: nauc_ndcg_at_100_std value: 14.721254007566573 - type: nauc_ndcg_at_10_diff1 value: 28.80709651674213 - type: nauc_ndcg_at_10_max value: 13.60382339701216 - type: nauc_ndcg_at_10_std value: 22.343929058733426 - type: nauc_ndcg_at_1_diff1 value: 40.2604547168291 - type: nauc_ndcg_at_1_max value: 13.523690473148378 - type: nauc_ndcg_at_1_std value: 15.731723260682553 - type: nauc_ndcg_at_20_diff1 value: 27.052259594555288 - type: nauc_ndcg_at_20_max value: 14.72949156111374 - type: nauc_ndcg_at_20_std value: 20.65264608081379 - type: nauc_ndcg_at_3_diff1 value: 31.86880514374516 - type: nauc_ndcg_at_3_max value: 11.146091717211744 - type: nauc_ndcg_at_3_std value: 19.396513614203307 - type: nauc_ndcg_at_5_diff1 value: 28.832688177959675 - type: nauc_ndcg_at_5_max value: 12.716745963611547 - type: nauc_ndcg_at_5_std value: 20.097816900179126 - type: nauc_precision_at_1000_diff1 value: -5.95583251269089 - type: nauc_precision_at_1000_max value: 7.864853642254841 - type: nauc_precision_at_1000_std value: 42.43587460739192 - type: nauc_precision_at_100_diff1 value: -0.8101434464468288 - type: nauc_precision_at_100_max value: 12.482960665388665 - type: nauc_precision_at_100_std value: 40.29983942211701 - type: nauc_precision_at_10_diff1 value: 14.516673067770029 - type: nauc_precision_at_10_max value: 17.00131648608557 - type: nauc_precision_at_10_std value: 33.816435534051095 - type: nauc_precision_at_1_diff1 value: 40.12320713918803 - type: nauc_precision_at_1_max value: 16.666642505956347 - type: nauc_precision_at_1_std value: 14.562805568383885 - type: nauc_precision_at_20_diff1 value: 7.550704205767464 - type: nauc_precision_at_20_max value: 16.6653194708243 - type: nauc_precision_at_20_std value: 36.01533911600929 - type: nauc_precision_at_3_diff1 value: 25.705703136131085 - type: nauc_precision_at_3_max value: 14.330289120785821 - type: nauc_precision_at_3_std value: 23.553863921052418 - type: nauc_precision_at_5_diff1 value: 18.417359763504866 - type: nauc_precision_at_5_max value: 16.720785167958933 - type: nauc_precision_at_5_std value: 26.478694310948626 - type: nauc_recall_at_1000_diff1 value: 18.182749094845686 - type: nauc_recall_at_1000_max value: 18.65705566700086 - type: nauc_recall_at_1000_std value: 15.976652123107685 - type: nauc_recall_at_100_diff1 value: 20.848414719124168 - type: nauc_recall_at_100_max value: 9.722630796539269 - type: nauc_recall_at_100_std value: 0.6085664546618689 - type: nauc_recall_at_10_diff1 value: 26.315549356381844 - type: nauc_recall_at_10_max value: 5.287792137906281 - type: nauc_recall_at_10_std value: -4.559402898630484 - type: nauc_recall_at_1_diff1 value: 43.866715304866595 - type: nauc_recall_at_1_max value: 0.5010754384304841 - type: nauc_recall_at_1_std value: -9.927174446247756 - type: nauc_recall_at_20_diff1 value: 21.760679130522295 - type: nauc_recall_at_20_max value: 10.435867401402477 - type: nauc_recall_at_20_std value: -2.870896499573999 - type: nauc_recall_at_3_diff1 value: 36.72536047988738 - type: nauc_recall_at_3_max value: 4.727132198726495 - type: nauc_recall_at_3_std value: -7.001349236625052 - type: nauc_recall_at_5_diff1 value: 29.990201626305417 - type: nauc_recall_at_5_max value: 1.9555151957998211 - type: nauc_recall_at_5_std value: -7.3844386270164435 - type: ndcg_at_1 value: 29.412 - type: ndcg_at_10 value: 23.152 - type: ndcg_at_100 value: 22.144 - type: ndcg_at_1000 value: 31.35 - type: ndcg_at_20 value: 21.926000000000002 - type: ndcg_at_3 value: 26.108999999999998 - type: ndcg_at_5 value: 25.008000000000003 - type: precision_at_1 value: 30.959999999999997 - type: precision_at_10 value: 17.058999999999997 - type: precision_at_100 value: 5.985 - type: precision_at_1000 value: 1.867 - type: precision_at_20 value: 13.019 - type: precision_at_3 value: 24.252000000000002 - type: precision_at_5 value: 21.486 - type: recall_at_1 value: 4.041 - type: recall_at_10 value: 11.052 - type: recall_at_100 value: 24.703 - type: recall_at_1000 value: 56.974000000000004 - type: recall_at_20 value: 14.393 - type: recall_at_3 value: 6.739000000000001 - type: recall_at_5 value: 8.527999999999999 - task: type: Retrieval dataset: name: MTEB NQ (default) type: mteb/nq config: default split: test revision: b774495ed302d8c44a3a7ea25c90dbce03968f31 metrics: - type: main_score value: 19.243 - type: map_at_1 value: 8.476 - type: map_at_10 value: 14.923 - type: map_at_100 value: 16.166 - type: map_at_1000 value: 16.262999999999998 - type: map_at_20 value: 15.633 - type: map_at_3 value: 12.458 - type: map_at_5 value: 13.782 - type: mrr_at_1 value: 9.878331402085747 - type: mrr_at_10 value: 16.654136548400757 - type: mrr_at_100 value: 17.80367931211397 - type: mrr_at_1000 value: 17.884114934492434 - type: mrr_at_20 value: 17.326085402172655 - type: mrr_at_3 value: 14.14156044804941 - type: mrr_at_5 value: 15.52336809578985 - type: nauc_map_at_1000_diff1 value: 18.957886469855463 - type: nauc_map_at_1000_max value: 9.162492690006172 - type: nauc_map_at_1000_std value: 4.697683384857226 - type: nauc_map_at_100_diff1 value: 18.97104499890075 - type: nauc_map_at_100_max value: 9.161552789663508 - type: nauc_map_at_100_std value: 4.640104656084954 - type: nauc_map_at_10_diff1 value: 18.824214120526587 - type: nauc_map_at_10_max value: 8.416234530426614 - type: nauc_map_at_10_std value: 3.1622854981256263 - type: nauc_map_at_1_diff1 value: 20.817556457171833 - type: nauc_map_at_1_max value: 5.72281479959865 - type: nauc_map_at_1_std value: -1.1538921076884687 - type: nauc_map_at_20_diff1 value: 18.89809228556843 - type: nauc_map_at_20_max value: 8.818504246900403 - type: nauc_map_at_20_std value: 4.046315916162477 - type: nauc_map_at_3_diff1 value: 19.443548158566454 - type: nauc_map_at_3_max value: 8.040171381977375 - type: nauc_map_at_3_std value: 1.3664909428229102 - type: nauc_map_at_5_diff1 value: 18.948047131870503 - type: nauc_map_at_5_max value: 8.030083239833372 - type: nauc_map_at_5_std value: 2.0932465891795187 - type: nauc_mrr_at_1000_diff1 value: 17.999043488714815 - type: nauc_mrr_at_1000_max value: 8.975961343791447 - type: nauc_mrr_at_1000_std value: 5.41214531019527 - type: nauc_mrr_at_100_diff1 value: 17.997286292696472 - type: nauc_mrr_at_100_max value: 8.98176390048409 - type: nauc_mrr_at_100_std value: 5.380554404936453 - type: nauc_mrr_at_10_diff1 value: 18.017924894121382 - type: nauc_mrr_at_10_max value: 8.436187270178298 - type: nauc_mrr_at_10_std value: 4.3429546626180775 - type: nauc_mrr_at_1_diff1 value: 19.42324725920124 - type: nauc_mrr_at_1_max value: 5.813177253135981 - type: nauc_mrr_at_1_std value: 0.623588505136798 - type: nauc_mrr_at_20_diff1 value: 18.004638104594935 - type: nauc_mrr_at_20_max value: 8.829594116095835 - type: nauc_mrr_at_20_std value: 4.996153623181649 - type: nauc_mrr_at_3_diff1 value: 18.34188673011434 - type: nauc_mrr_at_3_max value: 8.04955848263908 - type: nauc_mrr_at_3_std value: 2.7463770148884996 - type: nauc_mrr_at_5_diff1 value: 18.15710576876441 - type: nauc_mrr_at_5_max value: 7.906975864218543 - type: nauc_mrr_at_5_std value: 3.434143063102149 - type: nauc_ndcg_at_1000_diff1 value: 17.90599463931129 - type: nauc_ndcg_at_1000_max value: 12.220713351710225 - type: nauc_ndcg_at_1000_std value: 12.16870242222485 - type: nauc_ndcg_at_100_diff1 value: 18.066720901418616 - type: nauc_ndcg_at_100_max value: 12.265976972493421 - type: nauc_ndcg_at_100_std value: 11.041165269563532 - type: nauc_ndcg_at_10_diff1 value: 17.964556255464483 - type: nauc_ndcg_at_10_max value: 9.221841616925076 - type: nauc_ndcg_at_10_std value: 5.327940012848466 - type: nauc_ndcg_at_1_diff1 value: 19.42324725920124 - type: nauc_ndcg_at_1_max value: 5.813177253135981 - type: nauc_ndcg_at_1_std value: 0.623588505136798 - type: nauc_ndcg_at_20_diff1 value: 18.002564129737443 - type: nauc_ndcg_at_20_max value: 10.419960104065321 - type: nauc_ndcg_at_20_std value: 7.703677617979156 - type: nauc_ndcg_at_3_diff1 value: 18.811972831627603 - type: nauc_ndcg_at_3_max value: 8.322490159345614 - type: nauc_ndcg_at_3_std value: 2.0978218207768586 - type: nauc_ndcg_at_5_diff1 value: 18.219388109405433 - type: nauc_ndcg_at_5_max value: 8.275826301191405 - type: nauc_ndcg_at_5_std value: 3.259197527022326 - type: nauc_precision_at_1000_diff1 value: 6.144404814037684 - type: nauc_precision_at_1000_max value: 17.53740102855067 - type: nauc_precision_at_1000_std value: 33.72365391365075 - type: nauc_precision_at_100_diff1 value: 12.77875393086178 - type: nauc_precision_at_100_max value: 19.621306331527453 - type: nauc_precision_at_100_std value: 27.846320441754568 - type: nauc_precision_at_10_diff1 value: 15.894192193507767 - type: nauc_precision_at_10_max value: 11.184225030108559 - type: nauc_precision_at_10_std value: 11.081411404427586 - type: nauc_precision_at_1_diff1 value: 19.42324725920124 - type: nauc_precision_at_1_max value: 5.813177253135981 - type: nauc_precision_at_1_std value: 0.623588505136798 - type: nauc_precision_at_20_diff1 value: 15.407091479970921 - type: nauc_precision_at_20_max value: 14.51534442723551 - type: nauc_precision_at_20_std value: 17.550861545103665 - type: nauc_precision_at_3_diff1 value: 17.561643392271126 - type: nauc_precision_at_3_max value: 9.386296407523073 - type: nauc_precision_at_3_std value: 4.469734273377907 - type: nauc_precision_at_5_diff1 value: 16.621760108716643 - type: nauc_precision_at_5_max value: 9.225486119103975 - type: nauc_precision_at_5_std value: 6.60309644424765 - type: nauc_recall_at_1000_diff1 value: 13.73758620539813 - type: nauc_recall_at_1000_max value: 25.924626181473847 - type: nauc_recall_at_1000_std value: 48.77621329371596 - type: nauc_recall_at_100_diff1 value: 15.611755205405203 - type: nauc_recall_at_100_max value: 20.8701203082053 - type: nauc_recall_at_100_std value: 28.22869520119306 - type: nauc_recall_at_10_diff1 value: 15.924879565266556 - type: nauc_recall_at_10_max value: 10.392879304441442 - type: nauc_recall_at_10_std value: 8.591273770489796 - type: nauc_recall_at_1_diff1 value: 20.817556457171833 - type: nauc_recall_at_1_max value: 5.72281479959865 - type: nauc_recall_at_1_std value: -1.1538921076884687 - type: nauc_recall_at_20_diff1 value: 15.878910884769823 - type: nauc_recall_at_20_max value: 13.20353825812694 - type: nauc_recall_at_20_std value: 14.293562488536033 - type: nauc_recall_at_3_diff1 value: 17.6964548934301 - type: nauc_recall_at_3_max value: 8.661017445399189 - type: nauc_recall_at_3_std value: 2.563072661506666 - type: nauc_recall_at_5_diff1 value: 16.368469306993973 - type: nauc_recall_at_5_max value: 8.40711587060727 - type: nauc_recall_at_5_std value: 4.526515647566521 - type: ndcg_at_1 value: 9.878 - type: ndcg_at_10 value: 19.243 - type: ndcg_at_100 value: 25.456 - type: ndcg_at_1000 value: 28.083999999999996 - type: ndcg_at_20 value: 21.727 - type: ndcg_at_3 value: 14.163999999999998 - type: ndcg_at_5 value: 16.535 - type: precision_at_1 value: 9.878 - type: precision_at_10 value: 3.6380000000000003 - type: precision_at_100 value: 0.716 - type: precision_at_1000 value: 0.097 - type: precision_at_20 value: 2.396 - type: precision_at_3 value: 6.769 - type: precision_at_5 value: 5.353 - type: recall_at_1 value: 8.476 - type: recall_at_10 value: 31.067 - type: recall_at_100 value: 59.711999999999996 - type: recall_at_1000 value: 79.867 - type: recall_at_20 value: 40.422999999999995 - type: recall_at_3 value: 17.485 - type: recall_at_5 value: 23.042 - task: type: Retrieval dataset: name: MTEB QuoraRetrieval (default) type: mteb/quora config: default split: test revision: e4e08e0b7dbe3c8700f0daef558ff32256715259 metrics: - type: main_score value: 77.86 - type: map_at_1 value: 60.541999999999994 - type: map_at_10 value: 73.068 - type: map_at_100 value: 73.856 - type: map_at_1000 value: 73.89099999999999 - type: map_at_20 value: 73.566 - type: map_at_3 value: 70.119 - type: map_at_5 value: 71.873 - type: mrr_at_1 value: 69.77 - type: mrr_at_10 value: 77.34153571428534 - type: mrr_at_100 value: 77.64058625254714 - type: mrr_at_1000 value: 77.6484160920852 - type: mrr_at_20 value: 77.54938063278261 - type: mrr_at_3 value: 75.83499999999958 - type: mrr_at_5 value: 76.77799999999937 - type: nauc_map_at_1000_diff1 value: 70.42051328825711 - type: nauc_map_at_1000_max value: 35.52950073375353 - type: nauc_map_at_1000_std value: -4.2525875732163 - type: nauc_map_at_100_diff1 value: 70.4215631591108 - type: nauc_map_at_100_max value: 35.51981251464836 - type: nauc_map_at_100_std value: -4.271548224950028 - type: nauc_map_at_10_diff1 value: 70.39744464397162 - type: nauc_map_at_10_max value: 35.17410961183995 - type: nauc_map_at_10_std value: -4.997465025620844 - type: nauc_map_at_1_diff1 value: 72.65287424531431 - type: nauc_map_at_1_max value: 29.85669712853355 - type: nauc_map_at_1_std value: -7.995984805529352 - type: nauc_map_at_20_diff1 value: 70.39621088889086 - type: nauc_map_at_20_max value: 35.385338530071856 - type: nauc_map_at_20_std value: -4.540304253182359 - type: nauc_map_at_3_diff1 value: 70.34799577031751 - type: nauc_map_at_3_max value: 33.735165752612154 - type: nauc_map_at_3_std value: -6.288314248613237 - type: nauc_map_at_5_diff1 value: 70.40682478137778 - type: nauc_map_at_5_max value: 34.71020611027348 - type: nauc_map_at_5_std value: -5.6532569989020915 - type: nauc_mrr_at_1000_diff1 value: 71.33300708743174 - type: nauc_mrr_at_1000_max value: 37.58838215284017 - type: nauc_mrr_at_1000_std value: -3.004971199900334 - type: nauc_mrr_at_100_diff1 value: 71.33202739636636 - type: nauc_mrr_at_100_max value: 37.59027333220707 - type: nauc_mrr_at_100_std value: -2.991291730650559 - type: nauc_mrr_at_10_diff1 value: 71.24040296506607 - type: nauc_mrr_at_10_max value: 37.620967352710174 - type: nauc_mrr_at_10_std value: -2.9995148185723646 - type: nauc_mrr_at_1_diff1 value: 72.97756073040875 - type: nauc_mrr_at_1_max value: 36.856024800382805 - type: nauc_mrr_at_1_std value: -5.295770163711124 - type: nauc_mrr_at_20_diff1 value: 71.29886034081495 - type: nauc_mrr_at_20_max value: 37.599580987297266 - type: nauc_mrr_at_20_std value: -2.973667224309623 - type: nauc_mrr_at_3_diff1 value: 71.24266456768551 - type: nauc_mrr_at_3_max value: 37.43275390419413 - type: nauc_mrr_at_3_std value: -3.803618565583205 - type: nauc_mrr_at_5_diff1 value: 71.22352727409451 - type: nauc_mrr_at_5_max value: 37.667564673453725 - type: nauc_mrr_at_5_std value: -3.176984609998285 - type: nauc_ndcg_at_1000_diff1 value: 70.29481477894221 - type: nauc_ndcg_at_1000_max value: 36.78709637968392 - type: nauc_ndcg_at_1000_std value: -1.8965664514629315 - type: nauc_ndcg_at_100_diff1 value: 70.30815982948721 - type: nauc_ndcg_at_100_max value: 36.75533935417366 - type: nauc_ndcg_at_100_std value: -1.6034476028659186 - type: nauc_ndcg_at_10_diff1 value: 69.83927176813567 - type: nauc_ndcg_at_10_max value: 36.14819225785969 - type: nauc_ndcg_at_10_std value: -3.0672167929844147 - type: nauc_ndcg_at_1_diff1 value: 72.95468588233986 - type: nauc_ndcg_at_1_max value: 36.95102879374528 - type: nauc_ndcg_at_1_std value: -5.230626449450384 - type: nauc_ndcg_at_20_diff1 value: 70.05211197167847 - type: nauc_ndcg_at_20_max value: 36.39117263415345 - type: nauc_ndcg_at_20_std value: -2.315672757758104 - type: nauc_ndcg_at_3_diff1 value: 69.54718031993843 - type: nauc_ndcg_at_3_max value: 35.35135808159563 - type: nauc_ndcg_at_3_std value: -4.447694597960837 - type: nauc_ndcg_at_5_diff1 value: 69.74297554323091 - type: nauc_ndcg_at_5_max value: 35.87559038131577 - type: nauc_ndcg_at_5_std value: -3.808666991968395 - type: nauc_precision_at_1000_diff1 value: -32.74229162550065 - type: nauc_precision_at_1000_max value: -3.694030619202584 - type: nauc_precision_at_1000_std value: 15.375543044164285 - type: nauc_precision_at_100_diff1 value: -28.90591593532601 - type: nauc_precision_at_100_max value: -0.5117915038170152 - type: nauc_precision_at_100_std value: 15.933222162614957 - type: nauc_precision_at_10_diff1 value: -12.897163879462346 - type: nauc_precision_at_10_max value: 8.911596011476787 - type: nauc_precision_at_10_std value: 11.71430900771452 - type: nauc_precision_at_1_diff1 value: 72.95468588233986 - type: nauc_precision_at_1_max value: 36.95102879374528 - type: nauc_precision_at_1_std value: -5.230626449450384 - type: nauc_precision_at_20_diff1 value: -20.977098757786987 - type: nauc_precision_at_20_max value: 4.6209746297728955 - type: nauc_precision_at_20_std value: 14.520775663368807 - type: nauc_precision_at_3_diff1 value: 14.177875480077756 - type: nauc_precision_at_3_max value: 19.64729584119952 - type: nauc_precision_at_3_std value: 4.677131862919459 - type: nauc_precision_at_5_diff1 value: 0.19581157619052483 - type: nauc_precision_at_5_max value: 14.783150950776703 - type: nauc_precision_at_5_std value: 8.354179376016507 - type: nauc_recall_at_1000_diff1 value: 58.35770817458498 - type: nauc_recall_at_1000_max value: 44.098312711969356 - type: nauc_recall_at_1000_std value: 54.840949153567244 - type: nauc_recall_at_100_diff1 value: 62.790344157192465 - type: nauc_recall_at_100_max value: 37.5264227017106 - type: nauc_recall_at_100_std value: 28.602851594494 - type: nauc_recall_at_10_diff1 value: 62.86776129395384 - type: nauc_recall_at_10_max value: 33.714290382332294 - type: nauc_recall_at_10_std value: 1.6098340254779206 - type: nauc_recall_at_1_diff1 value: 72.65287424531431 - type: nauc_recall_at_1_max value: 29.85669712853355 - type: nauc_recall_at_1_std value: -7.995984805529352 - type: nauc_recall_at_20_diff1 value: 61.83952894148527 - type: nauc_recall_at_20_max value: 33.88950042549727 - type: nauc_recall_at_20_std value: 8.225200296858647 - type: nauc_recall_at_3_diff1 value: 65.43175155104451 - type: nauc_recall_at_3_max value: 31.66111655852123 - type: nauc_recall_at_3_std value: -5.35771405542385 - type: nauc_recall_at_5_diff1 value: 64.43399712070679 - type: nauc_recall_at_5_max value: 33.39756587296581 - type: nauc_recall_at_5_std value: -2.6896475751986797 - type: ndcg_at_1 value: 69.78 - type: ndcg_at_10 value: 77.86 - type: ndcg_at_100 value: 80.16499999999999 - type: ndcg_at_1000 value: 80.632 - type: ndcg_at_20 value: 78.999 - type: ndcg_at_3 value: 74.164 - type: ndcg_at_5 value: 75.992 - type: precision_at_1 value: 69.78 - type: precision_at_10 value: 11.823 - type: precision_at_100 value: 1.426 - type: precision_at_1000 value: 0.153 - type: precision_at_20 value: 6.390999999999999 - type: precision_at_3 value: 32.24 - type: precision_at_5 value: 21.328 - type: recall_at_1 value: 60.541999999999994 - type: recall_at_10 value: 87.31400000000001 - type: recall_at_100 value: 96.107 - type: recall_at_1000 value: 98.914 - type: recall_at_20 value: 91.184 - type: recall_at_3 value: 76.708 - type: recall_at_5 value: 81.777 - task: type: Clustering dataset: name: MTEB RedditClustering (default) type: mteb/reddit-clustering config: default split: test revision: 24640382cdbf8abc73003fb0fa6d111a705499eb metrics: - type: main_score value: 31.372616849342727 - type: v_measure value: 31.372616849342727 - type: v_measure_std value: 4.218475883332416 - task: type: Clustering dataset: name: MTEB RedditClusteringP2P (default) type: mteb/reddit-clustering-p2p config: default split: test revision: 385e3cb46b4cfa89021f56c4380204149d0efe33 metrics: - type: main_score value: 42.56167386383679 - type: v_measure value: 42.56167386383679 - type: v_measure_std value: 10.687512676760736 - task: type: Retrieval dataset: name: MTEB SCIDOCS (default) type: mteb/scidocs config: default split: test revision: f8c2fcf00f625baaa80f62ec5bd9e1fff3b8ae88 metrics: - type: main_score value: 11.145 - type: map_at_1 value: 2.483 - type: map_at_10 value: 6.1240000000000006 - type: map_at_100 value: 7.373 - type: map_at_1000 value: 7.6240000000000006 - type: map_at_20 value: 6.702 - type: map_at_3 value: 4.386 - type: map_at_5 value: 5.263 - type: mrr_at_1 value: 12.2 - type: mrr_at_10 value: 20.032777777777785 - type: mrr_at_100 value: 21.15625759013742 - type: mrr_at_1000 value: 21.262436849379473 - type: mrr_at_20 value: 20.65437313202795 - type: mrr_at_3 value: 17.20000000000001 - type: mrr_at_5 value: 18.905000000000012 - type: nauc_map_at_1000_diff1 value: 14.04179602177988 - type: nauc_map_at_1000_max value: 13.468822726683898 - type: nauc_map_at_1000_std value: 14.503925484288697 - type: nauc_map_at_100_diff1 value: 13.908535424545734 - type: nauc_map_at_100_max value: 13.162814624955224 - type: nauc_map_at_100_std value: 14.006563832100305 - type: nauc_map_at_10_diff1 value: 15.201590550896787 - type: nauc_map_at_10_max value: 11.381876530403124 - type: nauc_map_at_10_std value: 10.455475922863256 - type: nauc_map_at_1_diff1 value: 19.866068777666875 - type: nauc_map_at_1_max value: 11.770661936677659 - type: nauc_map_at_1_std value: 6.992466368743762 - type: nauc_map_at_20_diff1 value: 14.503428211315777 - type: nauc_map_at_20_max value: 12.621320375392958 - type: nauc_map_at_20_std value: 12.289076337416786 - type: nauc_map_at_3_diff1 value: 18.65066577348665 - type: nauc_map_at_3_max value: 10.924786636490749 - type: nauc_map_at_3_std value: 7.63906799090755 - type: nauc_map_at_5_diff1 value: 16.19410336729788 - type: nauc_map_at_5_max value: 10.331376246769484 - type: nauc_map_at_5_std value: 8.580454671132498 - type: nauc_mrr_at_1000_diff1 value: 16.483814461625617 - type: nauc_mrr_at_1000_max value: 11.013515468591404 - type: nauc_mrr_at_1000_std value: 10.08446979638758 - type: nauc_mrr_at_100_diff1 value: 16.453692879490145 - type: nauc_mrr_at_100_max value: 10.985293302748216 - type: nauc_mrr_at_100_std value: 10.118895973989607 - type: nauc_mrr_at_10_diff1 value: 16.523414526034056 - type: nauc_mrr_at_10_max value: 10.69835720772289 - type: nauc_mrr_at_10_std value: 9.958323674539256 - type: nauc_mrr_at_1_diff1 value: 18.916140092839388 - type: nauc_mrr_at_1_max value: 11.28382524462848 - type: nauc_mrr_at_1_std value: 7.4575969496971775 - type: nauc_mrr_at_20_diff1 value: 16.515625751883075 - type: nauc_mrr_at_20_max value: 10.91058938020743 - type: nauc_mrr_at_20_std value: 10.089717651490684 - type: nauc_mrr_at_3_diff1 value: 18.411357551531673 - type: nauc_mrr_at_3_max value: 10.150789848617546 - type: nauc_mrr_at_3_std value: 8.28539472469452 - type: nauc_mrr_at_5_diff1 value: 17.076713001566414 - type: nauc_mrr_at_5_max value: 10.05110647296913 - type: nauc_mrr_at_5_std value: 9.650240066197977 - type: nauc_ndcg_at_1000_diff1 value: 12.46764100509925 - type: nauc_ndcg_at_1000_max value: 16.87436450117945 - type: nauc_ndcg_at_1000_std value: 21.75055602465494 - type: nauc_ndcg_at_100_diff1 value: 11.243822565671014 - type: nauc_ndcg_at_100_max value: 14.672906707981689 - type: nauc_ndcg_at_100_std value: 19.445159161356347 - type: nauc_ndcg_at_10_diff1 value: 13.957173044270302 - type: nauc_ndcg_at_10_max value: 11.533365924682878 - type: nauc_ndcg_at_10_std value: 11.95008133703523 - type: nauc_ndcg_at_1_diff1 value: 18.916140092839388 - type: nauc_ndcg_at_1_max value: 11.28382524462848 - type: nauc_ndcg_at_1_std value: 7.4575969496971775 - type: nauc_ndcg_at_20_diff1 value: 13.080870749498825 - type: nauc_ndcg_at_20_max value: 13.529404104442364 - type: nauc_ndcg_at_20_std value: 14.698639769858543 - type: nauc_ndcg_at_3_diff1 value: 18.07665765209192 - type: nauc_ndcg_at_3_max value: 10.36217623223806 - type: nauc_ndcg_at_3_std value: 8.543586014725516 - type: nauc_ndcg_at_5_diff1 value: 15.753684501523171 - type: nauc_ndcg_at_5_max value: 9.756813674219412 - type: nauc_ndcg_at_5_std value: 9.81629695469914 - type: nauc_precision_at_1000_diff1 value: 6.49181271146623 - type: nauc_precision_at_1000_max value: 20.038684056693548 - type: nauc_precision_at_1000_std value: 31.72411429728146 - type: nauc_precision_at_100_diff1 value: 3.9695256002728554 - type: nauc_precision_at_100_max value: 15.760993964001843 - type: nauc_precision_at_100_std value: 27.819395859684438 - type: nauc_precision_at_10_diff1 value: 10.034023456463208 - type: nauc_precision_at_10_max value: 11.9637202656781 - type: nauc_precision_at_10_std value: 14.548441016814499 - type: nauc_precision_at_1_diff1 value: 18.916140092839388 - type: nauc_precision_at_1_max value: 11.28382524462848 - type: nauc_precision_at_1_std value: 7.4575969496971775 - type: nauc_precision_at_20_diff1 value: 8.352003454018947 - type: nauc_precision_at_20_max value: 15.467696310306941 - type: nauc_precision_at_20_std value: 19.25895187079644 - type: nauc_precision_at_3_diff1 value: 17.691306880096178 - type: nauc_precision_at_3_max value: 10.139076766993572 - type: nauc_precision_at_3_std value: 8.90413050013151 - type: nauc_precision_at_5_diff1 value: 13.143973705786955 - type: nauc_precision_at_5_max value: 8.749700144288802 - type: nauc_precision_at_5_std value: 11.136372587280404 - type: nauc_recall_at_1000_diff1 value: 6.304363095725838 - type: nauc_recall_at_1000_max value: 21.169336566844237 - type: nauc_recall_at_1000_std value: 32.09330745051374 - type: nauc_recall_at_100_diff1 value: 4.165661039426188 - type: nauc_recall_at_100_max value: 16.139180608853625 - type: nauc_recall_at_100_std value: 27.757831244843974 - type: nauc_recall_at_10_diff1 value: 10.495690478197298 - type: nauc_recall_at_10_max value: 12.167459093833328 - type: nauc_recall_at_10_std value: 14.254322684356577 - type: nauc_recall_at_1_diff1 value: 19.866068777666875 - type: nauc_recall_at_1_max value: 11.770661936677659 - type: nauc_recall_at_1_std value: 6.992466368743762 - type: nauc_recall_at_20_diff1 value: 8.776171330802653 - type: nauc_recall_at_20_max value: 15.704340191560787 - type: nauc_recall_at_20_std value: 18.923540881805714 - type: nauc_recall_at_3_diff1 value: 18.255717163312028 - type: nauc_recall_at_3_max value: 10.28617567778892 - type: nauc_recall_at_3_std value: 8.4013196603258 - type: nauc_recall_at_5_diff1 value: 13.539959991282688 - type: nauc_recall_at_5_max value: 8.920008079822104 - type: nauc_recall_at_5_std value: 10.908337371904086 - type: ndcg_at_1 value: 12.2 - type: ndcg_at_10 value: 11.145 - type: ndcg_at_100 value: 17.16 - type: ndcg_at_1000 value: 22.429 - type: ndcg_at_20 value: 13.017000000000001 - type: ndcg_at_3 value: 10.204 - type: ndcg_at_5 value: 9.182 - type: precision_at_1 value: 12.2 - type: precision_at_10 value: 5.88 - type: precision_at_100 value: 1.477 - type: precision_at_1000 value: 0.27499999999999997 - type: precision_at_20 value: 4.03 - type: precision_at_3 value: 9.6 - type: precision_at_5 value: 8.24 - type: recall_at_1 value: 2.483 - type: recall_at_10 value: 11.927 - type: recall_at_100 value: 29.947000000000003 - type: recall_at_1000 value: 55.797 - type: recall_at_20 value: 16.322 - type: recall_at_3 value: 5.848 - type: recall_at_5 value: 8.362 - task: type: STS dataset: name: MTEB SICK-R (default) type: mteb/sickr-sts config: default split: test revision: 20a6d6f312dd54037fe07a32d58e5e168867909d metrics: - type: cosine_pearson value: 75.44317740316959 - type: cosine_spearman value: 64.66033328975722 - type: euclidean_pearson value: 65.32225778168542 - type: euclidean_spearman value: 58.37263214991483 - type: main_score value: 64.66033328975722 - type: manhattan_pearson value: 65.50832595100484 - type: manhattan_spearman value: 58.70461764721123 - type: pearson value: 75.44317740316959 - type: spearman value: 64.66033328975722 - task: type: STS dataset: name: MTEB STS12 (default) type: mteb/sts12-sts config: default split: test revision: a0d554a64d88156834ff5ae9920b964011b16384 metrics: - type: cosine_pearson value: 75.27179637130625 - type: cosine_spearman value: 64.63642414925371 - type: euclidean_pearson value: 57.60541573965394 - type: euclidean_spearman value: 54.16675402216673 - type: main_score value: 64.63642414925371 - type: manhattan_pearson value: 57.61916313400251 - type: manhattan_spearman value: 54.187861798376346 - type: pearson value: 75.27179637130625 - type: spearman value: 64.63642414925371 - task: type: STS dataset: name: MTEB STS13 (default) type: mteb/sts13-sts config: default split: test revision: 7e90230a92c190f1bf69ae9002b8cea547a64cca metrics: - type: cosine_pearson value: 76.65670145290832 - type: cosine_spearman value: 77.9790218678965 - type: euclidean_pearson value: 55.69048686852492 - type: euclidean_spearman value: 56.915278453300886 - type: main_score value: 77.9790218678965 - type: manhattan_pearson value: 56.04078388415448 - type: manhattan_spearman value: 57.24479867581495 - type: pearson value: 76.65670145290832 - type: spearman value: 77.9790218678965 - task: type: STS dataset: name: MTEB STS14 (default) type: mteb/sts14-sts config: default split: test revision: 6031580fec1f6af667f0bd2da0a551cf4f0b2375 metrics: - type: cosine_pearson value: 72.6084144444495 - type: cosine_spearman value: 70.08415685571369 - type: euclidean_pearson value: 55.23603920676092 - type: euclidean_spearman value: 54.7951569454598 - type: main_score value: 70.08415685571369 - type: manhattan_pearson value: 55.467477859550954 - type: manhattan_spearman value: 54.97322607753517 - type: pearson value: 72.6084144444495 - type: spearman value: 70.08415685571369 - task: type: STS dataset: name: MTEB STS15 (default) type: mteb/sts15-sts config: default split: test revision: ae752c7c21bf194d8b67fd573edf7ae58183cbe3 metrics: - type: cosine_pearson value: 77.27930178635249 - type: cosine_spearman value: 78.77945492051583 - type: euclidean_pearson value: 56.209330819002254 - type: euclidean_spearman value: 58.59820677825991 - type: main_score value: 78.77945492051583 - type: manhattan_pearson value: 56.5027867921535 - type: manhattan_spearman value: 58.688012882636556 - type: pearson value: 77.27930178635249 - type: spearman value: 78.77945492051583 - task: type: STS dataset: name: MTEB STS16 (default) type: mteb/sts16-sts config: default split: test revision: 4d8694f8f0e0100860b497b999b3dbed754a0513 metrics: - type: cosine_pearson value: 67.59697831579804 - type: cosine_spearman value: 70.14433798829924 - type: euclidean_pearson value: 47.052560327076165 - type: euclidean_spearman value: 49.043366162737 - type: main_score value: 70.14433798829924 - type: manhattan_pearson value: 47.609083434026 - type: manhattan_spearman value: 49.4861745311838 - type: pearson value: 67.59697831579804 - type: spearman value: 70.14433798829924 - task: type: STS dataset: name: MTEB STS17 (en-tr) type: mteb/sts17-crosslingual-sts config: en-tr split: test revision: faeb762787bd10488a50c8b5be4a3b82e411949c metrics: - type: cosine_pearson value: 6.816344817341537 - type: cosine_spearman value: 3.881927995948511 - type: euclidean_pearson value: -5.709905452714134 - type: euclidean_spearman value: -7.917676805793398 - type: main_score value: 3.881927995948511 - type: manhattan_pearson value: -5.915405149261368 - type: manhattan_spearman value: -6.999675819608648 - type: pearson value: 6.816344817341537 - type: spearman value: 3.881927995948511 - task: type: STS dataset: name: MTEB STS17 (en-en) type: mteb/sts17-crosslingual-sts config: en-en split: test revision: faeb762787bd10488a50c8b5be4a3b82e411949c metrics: - type: cosine_pearson value: 80.3422878690775 - type: cosine_spearman value: 82.82381067886578 - type: euclidean_pearson value: 64.17718233417268 - type: euclidean_spearman value: 66.43456400831298 - type: main_score value: 82.82381067886578 - type: manhattan_pearson value: 64.18727485692851 - type: manhattan_spearman value: 66.66001258551782 - type: pearson value: 80.3422878690775 - type: spearman value: 82.82381067886578 - task: type: STS dataset: name: MTEB STS17 (it-en) type: mteb/sts17-crosslingual-sts config: it-en split: test revision: faeb762787bd10488a50c8b5be4a3b82e411949c metrics: - type: cosine_pearson value: 8.532514092953193 - type: cosine_spearman value: 2.925603710568467 - type: euclidean_pearson value: -15.310770996955645 - type: euclidean_spearman value: -15.153258000229735 - type: main_score value: 2.925603710568467 - type: manhattan_pearson value: -15.475725980253795 - type: manhattan_spearman value: -16.680696135577048 - type: pearson value: 8.532514092953193 - type: spearman value: 2.925603710568467 - task: type: STS dataset: name: MTEB STS17 (fr-en) type: mteb/sts17-crosslingual-sts config: fr-en split: test revision: faeb762787bd10488a50c8b5be4a3b82e411949c metrics: - type: cosine_pearson value: 21.045522855096426 - type: cosine_spearman value: 18.414978055057233 - type: euclidean_pearson value: -6.730015584391847 - type: euclidean_spearman value: -8.874563174643498 - type: main_score value: 18.414978055057233 - type: manhattan_pearson value: -8.373709018568837 - type: manhattan_spearman value: -10.869965671268195 - type: pearson value: 21.045522855096426 - type: spearman value: 18.414978055057233 - task: type: STS dataset: name: MTEB STS17 (en-de) type: mteb/sts17-crosslingual-sts config: en-de split: test revision: faeb762787bd10488a50c8b5be4a3b82e411949c metrics: - type: cosine_pearson value: 20.53965530318208 - type: cosine_spearman value: 19.779624251024742 - type: euclidean_pearson value: -8.422957022168687 - type: euclidean_spearman value: -8.441117623652552 - type: main_score value: 19.779624251024742 - type: manhattan_pearson value: -9.022000031615297 - type: manhattan_spearman value: -10.01944986236205 - type: pearson value: 20.53965530318208 - type: spearman value: 19.779624251024742 - task: type: STS dataset: name: MTEB STS17 (nl-en) type: mteb/sts17-crosslingual-sts config: nl-en split: test revision: faeb762787bd10488a50c8b5be4a3b82e411949c metrics: - type: cosine_pearson value: 16.253197751087466 - type: cosine_spearman value: 10.281262353298493 - type: euclidean_pearson value: -8.327034080178862 - type: euclidean_spearman value: -14.082944243419524 - type: main_score value: 10.281262353298493 - type: manhattan_pearson value: -8.317424880938788 - type: manhattan_spearman value: -13.283091899944917 - type: pearson value: 16.253197751087466 - type: spearman value: 10.281262353298493 - task: type: STS dataset: name: MTEB STS17 (es-en) type: mteb/sts17-crosslingual-sts config: es-en split: test revision: faeb762787bd10488a50c8b5be4a3b82e411949c metrics: - type: cosine_pearson value: 16.022314822926344 - type: cosine_spearman value: 15.481253181142337 - type: euclidean_pearson value: -7.565040371732333 - type: euclidean_spearman value: -13.24259258219904 - type: main_score value: 15.481253181142337 - type: manhattan_pearson value: -8.469782300955151 - type: manhattan_spearman value: -14.992140842771388 - type: pearson value: 16.022314822926344 - type: spearman value: 15.481253181142337 - task: type: STS dataset: name: MTEB STS17 (en-ar) type: mteb/sts17-crosslingual-sts config: en-ar split: test revision: faeb762787bd10488a50c8b5be4a3b82e411949c metrics: - type: cosine_pearson value: 2.6572077286972204 - type: cosine_spearman value: 11.841223302913573 - type: euclidean_pearson value: 12.35431245166608 - type: euclidean_spearman value: 7.392490905400255 - type: main_score value: 11.841223302913573 - type: manhattan_pearson value: 12.216768710760196 - type: manhattan_spearman value: 7.503351553256482 - type: pearson value: 2.6572077286972204 - type: spearman value: 11.841223302913573 - task: type: STS dataset: name: MTEB STS22 (pl-en) type: mteb/sts22-crosslingual-sts config: pl-en split: test revision: de9d86b3b84231dc21f76c7b7af1f28e2f57f6e3 metrics: - type: cosine_pearson value: 38.2886612129808 - type: cosine_spearman value: 36.3923475100124 - type: euclidean_pearson value: 37.198162712953945 - type: euclidean_spearman value: 26.567559733905416 - type: main_score value: 36.3923475100124 - type: manhattan_pearson value: 36.744350682243976 - type: manhattan_spearman value: 23.764942648250294 - type: pearson value: 38.2886612129808 - type: spearman value: 36.3923475100124 - task: type: STS dataset: name: MTEB STS22 (zh-en) type: mteb/sts22-crosslingual-sts config: zh-en split: test revision: de9d86b3b84231dc21f76c7b7af1f28e2f57f6e3 metrics: - type: cosine_pearson value: 2.899096111714493 - type: cosine_spearman value: 3.6484939276246147 - type: euclidean_pearson value: -3.9577702896129683 - type: euclidean_spearman value: 0.5248086125754212 - type: main_score value: 3.6484939276246147 - type: manhattan_pearson value: -4.224953170165652 - type: manhattan_spearman value: 0.8064642714775411 - type: pearson value: 2.899096111714493 - type: spearman value: 3.6484939276246147 - task: type: STS dataset: name: MTEB STS22 (en) type: mteb/sts22-crosslingual-sts config: en split: test revision: de9d86b3b84231dc21f76c7b7af1f28e2f57f6e3 metrics: - type: cosine_pearson value: 48.83428631897875 - type: cosine_spearman value: 59.208656752528064 - type: euclidean_pearson value: 50.318050030105866 - type: euclidean_spearman value: 59.248536122711904 - type: main_score value: 59.208656752528064 - type: manhattan_pearson value: 49.48047897165944 - type: manhattan_spearman value: 59.05695237288997 - type: pearson value: 48.83428631897875 - type: spearman value: 59.208656752528064 - task: type: STS dataset: name: MTEB STS22 (es-en) type: mteb/sts22-crosslingual-sts config: es-en split: test revision: de9d86b3b84231dc21f76c7b7af1f28e2f57f6e3 metrics: - type: cosine_pearson value: 18.384789734482013 - type: cosine_spearman value: 20.680582502429395 - type: euclidean_pearson value: 15.257874781731998 - type: euclidean_spearman value: 20.121386973148013 - type: main_score value: 20.680582502429395 - type: manhattan_pearson value: 21.41821286518122 - type: manhattan_spearman value: 27.06116036653386 - type: pearson value: 18.384789734482013 - type: spearman value: 20.680582502429395 - task: type: STS dataset: name: MTEB STS22 (de-en) type: mteb/sts22-crosslingual-sts config: de-en split: test revision: de9d86b3b84231dc21f76c7b7af1f28e2f57f6e3 metrics: - type: cosine_pearson value: 31.971938782568998 - type: cosine_spearman value: 31.7331263475721 - type: euclidean_pearson value: 28.778244848424606 - type: euclidean_spearman value: 30.339181910659924 - type: main_score value: 31.7331263475721 - type: manhattan_pearson value: 27.763784017642745 - type: manhattan_spearman value: 34.60355364902863 - type: pearson value: 31.971938782568998 - type: spearman value: 31.7331263475721 - task: type: STS dataset: name: MTEB STSBenchmark (default) type: mteb/stsbenchmark-sts config: default split: test revision: b0fddb56ed78048fa8b90373c8a3cfc37b684831 metrics: - type: cosine_pearson value: 70.44848272475741 - type: cosine_spearman value: 68.90568918705054 - type: euclidean_pearson value: 55.15791539468034 - type: euclidean_spearman value: 54.524734170607026 - type: main_score value: 68.90568918705054 - type: manhattan_pearson value: 55.57205796134256 - type: manhattan_spearman value: 54.873833418202324 - type: pearson value: 70.44848272475741 - type: spearman value: 68.90568918705054 - task: type: Reranking dataset: name: MTEB SciDocsRR (default) type: mteb/scidocs-reranking config: default split: test revision: d3c5e1fc0b855ab6097bf1cda04dd73947d7caab metrics: - type: main_score value: 71.51230402119855 - type: map value: 71.51230402119855 - type: mrr value: 90.07260169024875 - type: nAUC_map_diff1 value: 9.457889555724364 - type: nAUC_map_max value: 54.32226718709489 - type: nAUC_map_std value: 64.3833035531696 - type: nAUC_mrr_diff1 value: 46.733117682332065 - type: nAUC_mrr_max value: 73.34626532031795 - type: nAUC_mrr_std value: 66.21738431904454 - task: type: Retrieval dataset: name: MTEB SciFact (default) type: mteb/scifact config: default split: test revision: 0228b52cf27578f30900b9e5271d331663a030d7 metrics: - type: main_score value: 44.634 - type: map_at_1 value: 31.056 - type: map_at_10 value: 39.797 - type: map_at_100 value: 40.786 - type: map_at_1000 value: 40.849999999999994 - type: map_at_20 value: 40.39 - type: map_at_3 value: 37.537 - type: map_at_5 value: 38.443 - type: mrr_at_1 value: 32.666666666666664 - type: mrr_at_10 value: 41.20542328042328 - type: mrr_at_100 value: 42.02789454802248 - type: mrr_at_1000 value: 42.08644496488576 - type: mrr_at_20 value: 41.66418048182754 - type: mrr_at_3 value: 39.277777777777764 - type: mrr_at_5 value: 39.99444444444444 - type: nauc_map_at_1000_diff1 value: 48.342044133365135 - type: nauc_map_at_1000_max value: 31.7826592935102 - type: nauc_map_at_1000_std value: 4.00630138549911 - type: nauc_map_at_100_diff1 value: 48.31242764868427 - type: nauc_map_at_100_max value: 31.78012740426817 - type: nauc_map_at_100_std value: 4.060444642374662 - type: nauc_map_at_10_diff1 value: 48.50670999217726 - type: nauc_map_at_10_max value: 31.64653583263081 - type: nauc_map_at_10_std value: 3.7046444258333997 - type: nauc_map_at_1_diff1 value: 51.5249564463508 - type: nauc_map_at_1_max value: 28.73405802025607 - type: nauc_map_at_1_std value: -1.356155719324098 - type: nauc_map_at_20_diff1 value: 48.456709981407556 - type: nauc_map_at_20_max value: 31.741525375093172 - type: nauc_map_at_20_std value: 3.646386443000312 - type: nauc_map_at_3_diff1 value: 48.78563822360489 - type: nauc_map_at_3_max value: 31.058620920212448 - type: nauc_map_at_3_std value: 1.8326719277579946 - type: nauc_map_at_5_diff1 value: 48.458976779652104 - type: nauc_map_at_5_max value: 31.083028206356534 - type: nauc_map_at_5_std value: 3.003794552425189 - type: nauc_mrr_at_1000_diff1 value: 48.45609849024956 - type: nauc_mrr_at_1000_max value: 33.43988504966969 - type: nauc_mrr_at_1000_std value: 6.171555010767457 - type: nauc_mrr_at_100_diff1 value: 48.42274487649728 - type: nauc_mrr_at_100_max value: 33.43866490885405 - type: nauc_mrr_at_100_std value: 6.218254767081506 - type: nauc_mrr_at_10_diff1 value: 48.521379897756056 - type: nauc_mrr_at_10_max value: 33.75224042509809 - type: nauc_mrr_at_10_std value: 6.352815348713422 - type: nauc_mrr_at_1_diff1 value: 52.443480348141506 - type: nauc_mrr_at_1_max value: 31.16915445000578 - type: nauc_mrr_at_1_std value: 1.0921650382724417 - type: nauc_mrr_at_20_diff1 value: 48.50227992314695 - type: nauc_mrr_at_20_max value: 33.538352513692566 - type: nauc_mrr_at_20_std value: 5.888417051206362 - type: nauc_mrr_at_3_diff1 value: 48.69864177007364 - type: nauc_mrr_at_3_max value: 33.6846609645469 - type: nauc_mrr_at_3_std value: 4.737077772688193 - type: nauc_mrr_at_5_diff1 value: 48.762159728901295 - type: nauc_mrr_at_5_max value: 33.470448387039426 - type: nauc_mrr_at_5_std value: 5.88722077710814 - type: nauc_ndcg_at_1000_diff1 value: 47.013205594218704 - type: nauc_ndcg_at_1000_max value: 33.784237785724024 - type: nauc_ndcg_at_1000_std value: 7.895748496610112 - type: nauc_ndcg_at_100_diff1 value: 46.28673967820463 - type: nauc_ndcg_at_100_max value: 33.65163465988455 - type: nauc_ndcg_at_100_std value: 9.496219412572335 - type: nauc_ndcg_at_10_diff1 value: 46.92009617313736 - type: nauc_ndcg_at_10_max value: 33.93070825037539 - type: nauc_ndcg_at_10_std value: 6.905054855789704 - type: nauc_ndcg_at_1_diff1 value: 52.443480348141506 - type: nauc_ndcg_at_1_max value: 31.16915445000578 - type: nauc_ndcg_at_1_std value: 1.0921650382724417 - type: nauc_ndcg_at_20_diff1 value: 46.80080169568735 - type: nauc_ndcg_at_20_max value: 33.85986080942156 - type: nauc_ndcg_at_20_std value: 6.030321172261957 - type: nauc_ndcg_at_3_diff1 value: 47.37837330317871 - type: nauc_ndcg_at_3_max value: 32.70364322442463 - type: nauc_ndcg_at_3_std value: 3.5796359299979734 - type: nauc_ndcg_at_5_diff1 value: 47.10858556467903 - type: nauc_ndcg_at_5_max value: 32.5432022640858 - type: nauc_ndcg_at_5_std value: 5.587372921117886 - type: nauc_precision_at_1000_diff1 value: -4.00258727241286 - type: nauc_precision_at_1000_max value: 33.62365433015907 - type: nauc_precision_at_1000_std value: 27.912442602898498 - type: nauc_precision_at_100_diff1 value: 12.742459620152669 - type: nauc_precision_at_100_max value: 38.534530486483895 - type: nauc_precision_at_100_std value: 38.335218929783586 - type: nauc_precision_at_10_diff1 value: 34.00766046475659 - type: nauc_precision_at_10_max value: 42.736601309849924 - type: nauc_precision_at_10_std value: 19.11941288765331 - type: nauc_precision_at_1_diff1 value: 52.443480348141506 - type: nauc_precision_at_1_max value: 31.16915445000578 - type: nauc_precision_at_1_std value: 1.0921650382724417 - type: nauc_precision_at_20_diff1 value: 27.71446616326318 - type: nauc_precision_at_20_max value: 40.76177840979056 - type: nauc_precision_at_20_std value: 16.820454969752006 - type: nauc_precision_at_3_diff1 value: 43.24269855398618 - type: nauc_precision_at_3_max value: 38.62040878020923 - type: nauc_precision_at_3_std value: 9.502376433837679 - type: nauc_precision_at_5_diff1 value: 37.91908025291434 - type: nauc_precision_at_5_max value: 38.025934168347106 - type: nauc_precision_at_5_std value: 13.649985136861408 - type: nauc_recall_at_1000_diff1 value: 36.18220825225795 - type: nauc_recall_at_1000_max value: 58.085116998453465 - type: nauc_recall_at_1000_std value: 71.24753209171706 - type: nauc_recall_at_100_diff1 value: 34.15584735503604 - type: nauc_recall_at_100_max value: 37.6512924522105 - type: nauc_recall_at_100_std value: 46.323437983877746 - type: nauc_recall_at_10_diff1 value: 40.95203469532717 - type: nauc_recall_at_10_max value: 38.227625869068206 - type: nauc_recall_at_10_std value: 14.047310749226211 - type: nauc_recall_at_1_diff1 value: 51.5249564463508 - type: nauc_recall_at_1_max value: 28.73405802025607 - type: nauc_recall_at_1_std value: -1.356155719324098 - type: nauc_recall_at_20_diff1 value: 40.36640963259781 - type: nauc_recall_at_20_max value: 38.003316708318394 - type: nauc_recall_at_20_std value: 10.141759227688368 - type: nauc_recall_at_3_diff1 value: 43.45581442601486 - type: nauc_recall_at_3_max value: 34.015330740461444 - type: nauc_recall_at_3_std value: 5.800825635858678 - type: nauc_recall_at_5_diff1 value: 42.4514713019334 - type: nauc_recall_at_5_max value: 33.81098452352482 - type: nauc_recall_at_5_std value: 10.553580332520063 - type: ndcg_at_1 value: 32.667 - type: ndcg_at_10 value: 44.634 - type: ndcg_at_100 value: 49.455 - type: ndcg_at_1000 value: 51.292 - type: ndcg_at_20 value: 46.56 - type: ndcg_at_3 value: 40.006 - type: ndcg_at_5 value: 41.502 - type: precision_at_1 value: 32.667 - type: precision_at_10 value: 6.433 - type: precision_at_100 value: 0.9129999999999999 - type: precision_at_1000 value: 0.108 - type: precision_at_20 value: 3.6830000000000003 - type: precision_at_3 value: 16.222 - type: precision_at_5 value: 10.667 - type: recall_at_1 value: 31.056 - type: recall_at_10 value: 58.4 - type: recall_at_100 value: 81.15599999999999 - type: recall_at_1000 value: 95.633 - type: recall_at_20 value: 65.606 - type: recall_at_3 value: 45.306000000000004 - type: recall_at_5 value: 49.028 - task: type: PairClassification dataset: name: MTEB SprintDuplicateQuestions (default) type: mteb/sprintduplicatequestions-pairclassification config: default split: test revision: d66bd1f72af766a5cc4b0ca5e00c162f89e8cc46 metrics: - type: cosine_accuracy value: 99.65445544554456 - type: cosine_accuracy_threshold value: 80.49482107162476 - type: cosine_ap value: 87.93890477556427 - type: cosine_f1 value: 81.80420297283445 - type: cosine_f1_threshold value: 79.41848039627075 - type: cosine_precision value: 83.91167192429022 - type: cosine_recall value: 79.80000000000001 - type: dot_accuracy value: 99.04752475247524 - type: dot_accuracy_threshold value: 105860.55908203125 - type: dot_ap value: 27.092515912441513 - type: dot_f1 value: 33.942093541202674 - type: dot_f1_threshold value: 73063.5009765625 - type: dot_precision value: 30.602409638554217 - type: dot_recall value: 38.1 - type: euclidean_accuracy value: 99.38118811881188 - type: euclidean_accuracy_threshold value: 1505.2160263061523 - type: euclidean_ap value: 62.30186807590662 - type: euclidean_f1 value: 62.507221259387634 - type: euclidean_f1_threshold value: 1578.0624389648438 - type: euclidean_precision value: 74.00820793433653 - type: euclidean_recall value: 54.1 - type: main_score value: 87.93890477556427 - type: manhattan_accuracy value: 99.38118811881188 - type: manhattan_accuracy_threshold value: 18967.7490234375 - type: manhattan_ap value: 62.32090024036487 - type: manhattan_f1 value: 62.29130685089235 - type: manhattan_f1_threshold value: 19725.3662109375 - type: manhattan_precision value: 73.40569877883311 - type: manhattan_recall value: 54.1 - type: max_accuracy value: 99.65445544554456 - type: max_ap value: 87.93890477556427 - type: max_f1 value: 81.80420297283445 - type: max_precision value: 83.91167192429022 - type: max_recall value: 79.80000000000001 - type: similarity_accuracy value: 99.65445544554456 - type: similarity_accuracy_threshold value: 80.49482107162476 - type: similarity_ap value: 87.93890477556427 - type: similarity_f1 value: 81.80420297283445 - type: similarity_f1_threshold value: 79.41848039627075 - type: similarity_precision value: 83.91167192429022 - type: similarity_recall value: 79.80000000000001 - task: type: Clustering dataset: name: MTEB StackExchangeClustering (default) type: mteb/stackexchange-clustering config: default split: test revision: 6cbc1f7b2bc0622f2e39d2c77fa502909748c259 metrics: - type: main_score value: 39.351977907677735 - type: v_measure value: 39.351977907677735 - type: v_measure_std value: 4.851580948174954 - task: type: Clustering dataset: name: MTEB StackExchangeClusteringP2P (default) type: mteb/stackexchange-clustering-p2p config: default split: test revision: 815ca46b2622cec33ccafc3735d572c266efdb44 metrics: - type: main_score value: 28.718039425336826 - type: v_measure value: 28.718039425336826 - type: v_measure_std value: 1.5093426797012535 - task: type: Reranking dataset: name: MTEB StackOverflowDupQuestions (default) type: mteb/stackoverflowdupquestions-reranking config: default split: test revision: e185fbe320c72810689fc5848eb6114e1ef5ec69 metrics: - type: main_score value: 40.74663316880145 - type: map value: 40.74663316880145 - type: mrr value: 41.04591078855785 - type: nAUC_map_diff1 value: 29.118826541226202 - type: nAUC_map_max value: 14.062845178703915 - type: nAUC_map_std value: -4.409892124802246 - type: nAUC_mrr_diff1 value: 28.22298206074212 - type: nAUC_mrr_max value: 14.631847809852314 - type: nAUC_mrr_std value: -3.6963659779241236 - task: type: Summarization dataset: name: MTEB SummEval (default) type: mteb/summeval config: default split: test revision: cda12ad7615edc362dbf25a00fdd61d3b1eaf93c metrics: - type: cosine_pearson value: 30.385829441542235 - type: cosine_spearman value: 31.44709156272413 - type: dot_pearson value: 16.8215086744564 - type: dot_spearman value: 19.63108392674418 - type: main_score value: 31.44709156272413 - type: pearson value: 30.385829441542235 - type: spearman value: 31.44709156272413 - task: type: Retrieval dataset: name: MTEB TRECCOVID (default) type: mteb/trec-covid config: default split: test revision: bb9466bac8153a0349341eb1b22e06409e78ef4e metrics: - type: main_score value: 50.293 - type: map_at_1 value: 0.161 - type: map_at_10 value: 1.065 - type: map_at_100 value: 5.1819999999999995 - type: map_at_1000 value: 12.165 - type: map_at_20 value: 1.8419999999999999 - type: map_at_3 value: 0.428 - type: map_at_5 value: 0.639 - type: mrr_at_1 value: 68.0 - type: mrr_at_10 value: 77.83333333333333 - type: mrr_at_100 value: 78.05833333333334 - type: mrr_at_1000 value: 78.05833333333334 - type: mrr_at_20 value: 78.05833333333334 - type: mrr_at_3 value: 76.33333333333334 - type: mrr_at_5 value: 77.63333333333334 - type: nauc_map_at_1000_diff1 value: -20.730615524173658 - type: nauc_map_at_1000_max value: 39.839166514300764 - type: nauc_map_at_1000_std value: 58.90233287533496 - type: nauc_map_at_100_diff1 value: -19.5516027613563 - type: nauc_map_at_100_max value: 20.59184199252621 - type: nauc_map_at_100_std value: 47.71226850012564 - type: nauc_map_at_10_diff1 value: -6.646519709068492 - type: nauc_map_at_10_max value: 5.985482445627173 - type: nauc_map_at_10_std value: 23.95348041285318 - type: nauc_map_at_1_diff1 value: 4.168508506022353 - type: nauc_map_at_1_max value: -13.919817882224258 - type: nauc_map_at_1_std value: 6.874058840078575 - type: nauc_map_at_20_diff1 value: -11.782478697292618 - type: nauc_map_at_20_max value: 4.899042508743441 - type: nauc_map_at_20_std value: 30.581099865283782 - type: nauc_map_at_3_diff1 value: -6.40346598043105 - type: nauc_map_at_3_max value: -5.693242415097199 - type: nauc_map_at_3_std value: 12.446665858993656 - type: nauc_map_at_5_diff1 value: -9.611896908622962 - type: nauc_map_at_5_max value: 2.1092593900870904 - type: nauc_map_at_5_std value: 17.067486050785096 - type: nauc_mrr_at_1000_diff1 value: -4.623321883762855 - type: nauc_mrr_at_1000_max value: 12.396808389858892 - type: nauc_mrr_at_1000_std value: 19.4089140622997 - type: nauc_mrr_at_100_diff1 value: -4.623321883762855 - type: nauc_mrr_at_100_max value: 12.396808389858892 - type: nauc_mrr_at_100_std value: 19.4089140622997 - type: nauc_mrr_at_10_diff1 value: -4.647195096446223 - type: nauc_mrr_at_10_max value: 11.952010167473812 - type: nauc_mrr_at_10_std value: 19.233980598143418 - type: nauc_mrr_at_1_diff1 value: 3.5035588418214934 - type: nauc_mrr_at_1_max value: 7.68433418561253 - type: nauc_mrr_at_1_std value: 27.081749706309154 - type: nauc_mrr_at_20_diff1 value: -4.623321883762855 - type: nauc_mrr_at_20_max value: 12.396808389858892 - type: nauc_mrr_at_20_std value: 19.4089140622997 - type: nauc_mrr_at_3_diff1 value: -8.79007316324313 - type: nauc_mrr_at_3_max value: 16.737683188929008 - type: nauc_mrr_at_3_std value: 20.698383219632017 - type: nauc_mrr_at_5_diff1 value: -6.38001261114355 - type: nauc_mrr_at_5_max value: 12.852936867850659 - type: nauc_mrr_at_5_std value: 19.604197982217094 - type: nauc_ndcg_at_1000_diff1 value: -21.248774862042268 - type: nauc_ndcg_at_1000_max value: 37.112470599317845 - type: nauc_ndcg_at_1000_std value: 51.33184264725945 - type: nauc_ndcg_at_100_diff1 value: -21.502469395614007 - type: nauc_ndcg_at_100_max value: 27.036619615428126 - type: nauc_ndcg_at_100_std value: 44.231578927541634 - type: nauc_ndcg_at_10_diff1 value: -14.03544852632917 - type: nauc_ndcg_at_10_max value: 23.239909164511957 - type: nauc_ndcg_at_10_std value: 33.99420048710792 - type: nauc_ndcg_at_1_diff1 value: -2.3076073755106807 - type: nauc_ndcg_at_1_max value: 4.093124497231777 - type: nauc_ndcg_at_1_std value: 15.907190965157136 - type: nauc_ndcg_at_20_diff1 value: -17.80684642201865 - type: nauc_ndcg_at_20_max value: 22.356390424376404 - type: nauc_ndcg_at_20_std value: 36.58074650432794 - type: nauc_ndcg_at_3_diff1 value: -14.03425485397747 - type: nauc_ndcg_at_3_max value: 22.900831825285497 - type: nauc_ndcg_at_3_std value: 27.595172162485166 - type: nauc_ndcg_at_5_diff1 value: -15.9847552107415 - type: nauc_ndcg_at_5_max value: 23.610018767111146 - type: nauc_ndcg_at_5_std value: 31.76023082670396 - type: nauc_precision_at_1000_diff1 value: -18.48606922966335 - type: nauc_precision_at_1000_max value: 40.09384944686907 - type: nauc_precision_at_1000_std value: 48.495329491382236 - type: nauc_precision_at_100_diff1 value: -20.913247230868738 - type: nauc_precision_at_100_max value: 30.275117729529665 - type: nauc_precision_at_100_std value: 48.03556929860873 - type: nauc_precision_at_10_diff1 value: -10.864615585413775 - type: nauc_precision_at_10_max value: 25.99575088281568 - type: nauc_precision_at_10_std value: 40.69762382986124 - type: nauc_precision_at_1_diff1 value: 3.5035588418214934 - type: nauc_precision_at_1_max value: 7.68433418561253 - type: nauc_precision_at_1_std value: 27.081749706309154 - type: nauc_precision_at_20_diff1 value: -18.231210614806834 - type: nauc_precision_at_20_max value: 24.49814133520953 - type: nauc_precision_at_20_std value: 42.08404347300964 - type: nauc_precision_at_3_diff1 value: -13.464379703587404 - type: nauc_precision_at_3_max value: 25.641765547809243 - type: nauc_precision_at_3_std value: 38.4713052310818 - type: nauc_precision_at_5_diff1 value: -13.230437128979991 - type: nauc_precision_at_5_max value: 27.40564849793124 - type: nauc_precision_at_5_std value: 40.16046051448101 - type: nauc_recall_at_1000_diff1 value: -19.18062584482319 - type: nauc_recall_at_1000_max value: 42.54485632066801 - type: nauc_recall_at_1000_std value: 51.96242599629826 - type: nauc_recall_at_100_diff1 value: -16.015292729450607 - type: nauc_recall_at_100_max value: 15.503701664732361 - type: nauc_recall_at_100_std value: 40.715412297495895 - type: nauc_recall_at_10_diff1 value: -3.543521347350292 - type: nauc_recall_at_10_max value: 2.800389319981027 - type: nauc_recall_at_10_std value: 17.827330080949704 - type: nauc_recall_at_1_diff1 value: 4.168508506022353 - type: nauc_recall_at_1_max value: -13.919817882224258 - type: nauc_recall_at_1_std value: 6.874058840078575 - type: nauc_recall_at_20_diff1 value: -11.824453402321485 - type: nauc_recall_at_20_max value: 0.1600646646737227 - type: nauc_recall_at_20_std value: 22.770804511027276 - type: nauc_recall_at_3_diff1 value: -12.153358693322797 - type: nauc_recall_at_3_max value: -0.8091436535543653 - type: nauc_recall_at_3_std value: 8.9194053611711 - type: nauc_recall_at_5_diff1 value: -11.666886982290547 - type: nauc_recall_at_5_max value: 6.265898355667695 - type: nauc_recall_at_5_std value: 12.278654991544476 - type: ndcg_at_1 value: 63.0 - type: ndcg_at_10 value: 50.293 - type: ndcg_at_100 value: 35.410000000000004 - type: ndcg_at_1000 value: 31.432 - type: ndcg_at_20 value: 47.281 - type: ndcg_at_3 value: 56.285 - type: ndcg_at_5 value: 53.931 - type: precision_at_1 value: 68.0 - type: precision_at_10 value: 53.2 - type: precision_at_100 value: 37.1 - type: precision_at_1000 value: 15.02 - type: precision_at_20 value: 50.4 - type: precision_at_3 value: 61.333000000000006 - type: precision_at_5 value: 58.4 - type: recall_at_1 value: 0.161 - type: recall_at_10 value: 1.322 - type: recall_at_100 value: 8.129999999999999 - type: recall_at_1000 value: 30.206 - type: recall_at_20 value: 2.4410000000000003 - type: recall_at_3 value: 0.47000000000000003 - type: recall_at_5 value: 0.741 - task: type: Retrieval dataset: name: MTEB Touche2020 (default) type: mteb/touche2020 config: default split: test revision: a34f9a33db75fa0cbb21bb5cfc3dae8dc8bec93f metrics: - type: main_score value: 15.790000000000001 - type: map_at_1 value: 1.277 - type: map_at_10 value: 5.24 - type: map_at_100 value: 9.093 - type: map_at_1000 value: 10.529 - type: map_at_20 value: 6.658 - type: map_at_3 value: 2.157 - type: map_at_5 value: 3.343 - type: mrr_at_1 value: 16.3265306122449 - type: mrr_at_10 value: 31.72902494331065 - type: mrr_at_100 value: 32.884769060167756 - type: mrr_at_1000 value: 32.884769060167756 - type: mrr_at_20 value: 32.43515354859892 - type: mrr_at_3 value: 25.170068027210885 - type: mrr_at_5 value: 29.455782312925173 - type: nauc_map_at_1000_diff1 value: -3.1586324258036242 - type: nauc_map_at_1000_max value: -39.772263097287876 - type: nauc_map_at_1000_std value: -28.342876126008754 - type: nauc_map_at_100_diff1 value: -3.665739300943786 - type: nauc_map_at_100_max value: -40.456706346113755 - type: nauc_map_at_100_std value: -31.83791676517853 - type: nauc_map_at_10_diff1 value: -10.54672586401425 - type: nauc_map_at_10_max value: -44.54144890865597 - type: nauc_map_at_10_std value: -31.333904561832384 - type: nauc_map_at_1_diff1 value: -1.818488518052858 - type: nauc_map_at_1_max value: -32.722843529731556 - type: nauc_map_at_1_std value: -21.190183458683524 - type: nauc_map_at_20_diff1 value: -7.663503040209939 - type: nauc_map_at_20_max value: -45.78706394536052 - type: nauc_map_at_20_std value: -37.180872568708374 - type: nauc_map_at_3_diff1 value: 3.1417424508761047 - type: nauc_map_at_3_max value: -35.02057696281606 - type: nauc_map_at_3_std value: -20.37396361107187 - type: nauc_map_at_5_diff1 value: -6.496367073339007 - type: nauc_map_at_5_max value: -42.29452042433092 - type: nauc_map_at_5_std value: -24.800465182129475 - type: nauc_mrr_at_1000_diff1 value: -1.5444742737338488 - type: nauc_mrr_at_1000_max value: -35.18049506603158 - type: nauc_mrr_at_1000_std value: -17.20070057367544 - type: nauc_mrr_at_100_diff1 value: -1.5444742737338488 - type: nauc_mrr_at_100_max value: -35.18049506603158 - type: nauc_mrr_at_100_std value: -17.20070057367544 - type: nauc_mrr_at_10_diff1 value: -1.4960263964114606 - type: nauc_mrr_at_10_max value: -34.873555341513196 - type: nauc_mrr_at_10_std value: -16.45999571373483 - type: nauc_mrr_at_1_diff1 value: -12.189270623334648 - type: nauc_mrr_at_1_max value: -28.579192532694353 - type: nauc_mrr_at_1_std value: -11.459855962330844 - type: nauc_mrr_at_20_diff1 value: -0.7609385123928843 - type: nauc_mrr_at_20_max value: -36.171296772870264 - type: nauc_mrr_at_20_std value: -18.301749458938232 - type: nauc_mrr_at_3_diff1 value: -0.2301847707610496 - type: nauc_mrr_at_3_max value: -30.80499065218597 - type: nauc_mrr_at_3_std value: -12.834712397437057 - type: nauc_mrr_at_5_diff1 value: -1.091903500739519 - type: nauc_mrr_at_5_max value: -35.21876224937198 - type: nauc_mrr_at_5_std value: -17.333123783071695 - type: nauc_ndcg_at_1000_diff1 value: 12.341092315492014 - type: nauc_ndcg_at_1000_max value: -28.424531285639727 - type: nauc_ndcg_at_1000_std value: -9.684075691376377 - type: nauc_ndcg_at_100_diff1 value: 8.032059858306981 - type: nauc_ndcg_at_100_max value: -39.255306271493794 - type: nauc_ndcg_at_100_std value: -27.422782475792946 - type: nauc_ndcg_at_10_diff1 value: 1.9001557608396897 - type: nauc_ndcg_at_10_max value: -38.83941665073113 - type: nauc_ndcg_at_10_std value: -28.76852256482092 - type: nauc_ndcg_at_1_diff1 value: -4.204891374640269 - type: nauc_ndcg_at_1_max value: -26.231692867275363 - type: nauc_ndcg_at_1_std value: -9.148523397771116 - type: nauc_ndcg_at_20_diff1 value: 2.518885050551834 - type: nauc_ndcg_at_20_max value: -43.189561788855066 - type: nauc_ndcg_at_20_std value: -39.682465095289366 - type: nauc_ndcg_at_3_diff1 value: 8.562730018960336 - type: nauc_ndcg_at_3_max value: -30.96991992817989 - type: nauc_ndcg_at_3_std value: -15.69208953358737 - type: nauc_ndcg_at_5_diff1 value: 3.5840568515154994 - type: nauc_ndcg_at_5_max value: -36.53566191704277 - type: nauc_ndcg_at_5_std value: -20.55546310613085 - type: nauc_precision_at_1000_diff1 value: 10.175495027635408 - type: nauc_precision_at_1000_max value: 44.31983167314647 - type: nauc_precision_at_1000_std value: 47.40763634184565 - type: nauc_precision_at_100_diff1 value: 9.792026002798021 - type: nauc_precision_at_100_max value: -10.304602707011593 - type: nauc_precision_at_100_std value: 0.63567352854242 - type: nauc_precision_at_10_diff1 value: -1.442177091120521 - type: nauc_precision_at_10_max value: -35.92859526255585 - type: nauc_precision_at_10_std value: -26.896073645887427 - type: nauc_precision_at_1_diff1 value: -12.189270623334648 - type: nauc_precision_at_1_max value: -28.579192532694353 - type: nauc_precision_at_1_std value: -11.459855962330844 - type: nauc_precision_at_20_diff1 value: 2.2669891060284955 - type: nauc_precision_at_20_max value: -36.92227467517464 - type: nauc_precision_at_20_std value: -45.42095329154831 - type: nauc_precision_at_3_diff1 value: 10.90702129082723 - type: nauc_precision_at_3_max value: -33.745641123222846 - type: nauc_precision_at_3_std value: -16.27280451843888 - type: nauc_precision_at_5_diff1 value: 0.6068634276790119 - type: nauc_precision_at_5_max value: -39.046167694767696 - type: nauc_precision_at_5_std value: -22.166228729900332 - type: nauc_recall_at_1000_diff1 value: 7.096875956365895 - type: nauc_recall_at_1000_max value: -12.075390522906268 - type: nauc_recall_at_1000_std value: 27.949986052890573 - type: nauc_recall_at_100_diff1 value: 2.9637437003660403 - type: nauc_recall_at_100_max value: -37.470315822402604 - type: nauc_recall_at_100_std value: -20.07639190396403 - type: nauc_recall_at_10_diff1 value: -10.55130289262311 - type: nauc_recall_at_10_max value: -47.33072741498118 - type: nauc_recall_at_10_std value: -37.47543950737137 - type: nauc_recall_at_1_diff1 value: -1.818488518052858 - type: nauc_recall_at_1_max value: -32.722843529731556 - type: nauc_recall_at_1_std value: -21.190183458683524 - type: nauc_recall_at_20_diff1 value: -3.3497197311334665 - type: nauc_recall_at_20_max value: -46.86976432359865 - type: nauc_recall_at_20_std value: -46.35186722318313 - type: nauc_recall_at_3_diff1 value: 10.548810696046742 - type: nauc_recall_at_3_max value: -36.36954645321451 - type: nauc_recall_at_3_std value: -20.082840698599032 - type: nauc_recall_at_5_diff1 value: -7.380160291481995 - type: nauc_recall_at_5_max value: -47.34539862970469 - type: nauc_recall_at_5_std value: -31.4779670684682 - type: ndcg_at_1 value: 12.245000000000001 - type: ndcg_at_10 value: 15.790000000000001 - type: ndcg_at_100 value: 26.016000000000002 - type: ndcg_at_1000 value: 38.249 - type: ndcg_at_20 value: 16.947000000000003 - type: ndcg_at_3 value: 13.027 - type: ndcg_at_5 value: 14.968 - type: precision_at_1 value: 16.326999999999998 - type: precision_at_10 value: 16.326999999999998 - type: precision_at_100 value: 6.204 - type: precision_at_1000 value: 1.402 - type: precision_at_20 value: 12.959000000000001 - type: precision_at_3 value: 15.645999999999999 - type: precision_at_5 value: 17.551 - type: recall_at_1 value: 1.277 - type: recall_at_10 value: 11.657 - type: recall_at_100 value: 37.804 - type: recall_at_1000 value: 74.81 - type: recall_at_20 value: 17.813000000000002 - type: recall_at_3 value: 2.96 - type: recall_at_5 value: 6.196 - task: type: Classification dataset: name: MTEB ToxicConversationsClassification (default) type: mteb/toxic_conversations_50k config: default split: test revision: edfaf9da55d3dd50d43143d90c1ac476895ae6de metrics: - type: accuracy value: 67.6025390625 - type: ap value: 12.228064322266615 - type: ap_weighted value: 12.228064322266615 - type: f1 value: 51.545356210775054 - type: f1_weighted value: 74.8674960323055 - type: main_score value: 67.6025390625 - task: type: Classification dataset: name: MTEB TweetSentimentExtractionClassification (default) type: mteb/tweet_sentiment_extraction config: default split: test revision: d604517c81ca91fe16a244d1248fc021f9ecee7a metrics: - type: accuracy value: 50.019807583474815 - type: f1 value: 50.18981751190431 - type: f1_weighted value: 49.516664117140984 - type: main_score value: 50.019807583474815 - task: type: Clustering dataset: name: MTEB TwentyNewsgroupsClustering (default) type: mteb/twentynewsgroups-clustering config: default split: test revision: 6125ec4e24fa026cec8a478383ee943acfbd5449 metrics: - type: main_score value: 25.377920343280692 - type: v_measure value: 25.377920343280692 - type: v_measure_std value: 1.8170084203745749 - task: type: PairClassification dataset: name: MTEB TwitterSemEval2015 (default) type: mteb/twittersemeval2015-pairclassification config: default split: test revision: 70970daeab8776df92f5ea462b6173c0b46fd2d1 metrics: - type: cosine_accuracy value: 81.86803361745247 - type: cosine_accuracy_threshold value: 78.45279574394226 - type: cosine_ap value: 58.60727455967692 - type: cosine_f1 value: 56.49663137632338 - type: cosine_f1_threshold value: 71.48932218551636 - type: cosine_precision value: 51.92392746572313 - type: cosine_recall value: 61.952506596306065 - type: dot_accuracy value: 77.82678667222984 - type: dot_accuracy_threshold value: 108382.0556640625 - type: dot_ap value: 39.48633290656697 - type: dot_f1 value: 45.00564789414233 - type: dot_f1_threshold value: 52613.238525390625 - type: dot_precision value: 32.41515574151557 - type: dot_recall value: 73.58839050131925 - type: euclidean_accuracy value: 79.50765929546402 - type: euclidean_accuracy_threshold value: 1738.2392883300781 - type: euclidean_ap value: 46.636574638537574 - type: euclidean_f1 value: 46.01173657900456 - type: euclidean_f1_threshold value: 2300.4941940307617 - type: euclidean_precision value: 39.11677753141168 - type: euclidean_recall value: 55.85751978891821 - type: main_score value: 58.60727455967692 - type: manhattan_accuracy value: 79.63283066102403 - type: manhattan_accuracy_threshold value: 22057.51953125 - type: manhattan_ap value: 47.091319468141194 - type: manhattan_f1 value: 46.32838283828383 - type: manhattan_f1_threshold value: 29230.82275390625 - type: manhattan_precision value: 38.02912292583813 - type: manhattan_recall value: 59.26121372031662 - type: max_accuracy value: 81.86803361745247 - type: max_ap value: 58.60727455967692 - type: max_f1 value: 56.49663137632338 - type: max_precision value: 51.92392746572313 - type: max_recall value: 73.58839050131925 - type: similarity_accuracy value: 81.86803361745247 - type: similarity_accuracy_threshold value: 78.45279574394226 - type: similarity_ap value: 58.60727455967692 - type: similarity_f1 value: 56.49663137632338 - type: similarity_f1_threshold value: 71.48932218551636 - type: similarity_precision value: 51.92392746572313 - type: similarity_recall value: 61.952506596306065 - task: type: PairClassification dataset: name: MTEB TwitterURLCorpus (default) type: mteb/twitterurlcorpus-pairclassification config: default split: test revision: 8b6510b0b1fa4e4c4f879467980e9be563ec1cdf metrics: - type: cosine_accuracy value: 86.24209259906081 - type: cosine_accuracy_threshold value: 72.93155193328857 - type: cosine_ap value: 79.47942287203573 - type: cosine_f1 value: 71.80506478209658 - type: cosine_f1_threshold value: 67.74765849113464 - type: cosine_precision value: 68.78702397743301 - type: cosine_recall value: 75.10009239297814 - type: dot_accuracy value: 81.52869949935965 - type: dot_accuracy_threshold value: 37410.955810546875 - type: dot_ap value: 66.02217146026899 - type: dot_f1 value: 62.43364213594255 - type: dot_f1_threshold value: 30416.412353515625 - type: dot_precision value: 56.82435419693046 - type: dot_recall value: 69.27163535571297 - type: euclidean_accuracy value: 82.72596732254434 - type: euclidean_accuracy_threshold value: 1420.4879760742188 - type: euclidean_ap value: 68.52026211185712 - type: euclidean_f1 value: 60.637769715485966 - type: euclidean_f1_threshold value: 1657.3232650756836 - type: euclidean_precision value: 60.15761157838902 - type: euclidean_recall value: 61.12565445026178 - type: main_score value: 79.47942287203573 - type: manhattan_accuracy value: 82.68133659331703 - type: manhattan_accuracy_threshold value: 17628.411865234375 - type: manhattan_ap value: 68.57038227508352 - type: manhattan_f1 value: 60.69790481781823 - type: manhattan_f1_threshold value: 21103.260803222656 - type: manhattan_precision value: 57.981072555205046 - type: manhattan_recall value: 63.68186017862642 - type: max_accuracy value: 86.24209259906081 - type: max_ap value: 79.47942287203573 - type: max_f1 value: 71.80506478209658 - type: max_precision value: 68.78702397743301 - type: max_recall value: 75.10009239297814 - type: similarity_accuracy value: 86.24209259906081 - type: similarity_accuracy_threshold value: 72.93155193328857 - type: similarity_ap value: 79.47942287203573 - type: similarity_f1 value: 71.80506478209658 - type: similarity_f1_threshold value: 67.74765849113464 - type: similarity_precision value: 68.78702397743301 - type: similarity_recall value: 75.10009239297814 --- # minishlab/M2V_base_glove Model Card This [Model2Vec](https://github.com/MinishLab/model2vec) model is a distilled version of the [BAAI/bge-base-en-v1.5](https://huggingface.co/BAAI/bge-base-en-v1.5) Sentence Transformer. It uses static embeddings, allowing text embeddings to be computed orders of magnitude faster on both GPU and CPU. It is designed for applications where computational resources are limited or where real-time performance is critical. ## Installation Install model2vec using pip: ``` pip install model2vec ``` ## Usage Load this model using the `from_pretrained` method: ```python from model2vec import StaticModel # Load a pretrained Model2Vec model model = StaticModel.from_pretrained("minishlab/M2V_base_glove") # Compute text embeddings embeddings = model.encode(["Example sentence"]) ``` Alternatively, you can distill your own model using the `distill` method: ```python from model2vec.distill import distill # Choose a Sentence Transformer model model_name = "BAAI/bge-base-en-v1.5" # Distill the model m2v_model = distill(model_name=model_name, pca_dims=256) # Save the model m2v_model.save_pretrained("m2v_model") ``` ## How it works Model2vec creates a small, fast, and powerful model that outperforms other static embedding models by a large margin on all tasks we could find, while being much faster to create than traditional static embedding models such as GloVe. Best of all, you don't need any data to distill a model using Model2Vec. It works by passing a vocabulary through a sentence transformer model, then reducing the dimensionality of the resulting embeddings using PCA, and finally weighting the embeddings using zipf weighting. During inference, we simply take the mean of all token embeddings occurring in a sentence. ## Additional Resources - [All Model2Vec models on the hub](https://huggingface.co/models?library=model2vec) - [Model2Vec Repo](https://github.com/MinishLab/model2vec) - [Model2Vec Results](https://github.com/MinishLab/model2vec?tab=readme-ov-file#results) - [Model2Vec Tutorials](https://github.com/MinishLab/model2vec/tree/main/tutorials) ## Library Authors Model2Vec was developed by the [Minish Lab](https://github.com/MinishLab) team consisting of [Stephan Tulkens](https://github.com/stephantul) and [Thomas van Dongen](https://github.com/Pringled). ## Citation Please cite the [Model2Vec repository](https://github.com/MinishLab/model2vec) if you use this model in your work. ``` @software{minishlab2024model2vec, authors = {Stephan Tulkens, Thomas van Dongen}, title = {Model2Vec: Turn any Sentence Transformer into a Small Fast Model}, year = {2024}, url = {https://github.com/MinishLab/model2vec}, } ```
[ "SUMMARIZATION" ]
[ "BIOSSES", "SCIFACT" ]
louisbrulenaudet/lemone-embed-pro
louisbrulenaudet
sentence-similarity
[ "sentence-transformers", "safetensors", "new", "sentence-similarity", "feature-extraction", "generated_from_trainer", "dataset_size:303863", "loss:CachedGISTEmbedLoss", "legal", "taxation", "fiscalité", "tax", "custom_code", "fr", "dataset:louisbrulenaudet/code-impots", "dataset:louisbrulenaudet/code-impots-annexe-iv", "dataset:louisbrulenaudet/code-impots-annexe-iii", "dataset:louisbrulenaudet/code-impots-annexe-i", "dataset:louisbrulenaudet/code-impots-annexe-ii", "dataset:louisbrulenaudet/livre-procedures-fiscales", "dataset:louisbrulenaudet/bofip", "arxiv:1908.10084", "base_model:Alibaba-NLP/gte-multilingual-base", "base_model:finetune:Alibaba-NLP/gte-multilingual-base", "license:apache-2.0", "model-index", "co2_eq_emissions", "autotrain_compatible", "text-embeddings-inference", "endpoints_compatible", "region:us" ]
2024-09-29T23:29:08
2024-10-02T22:56:30
128
2
--- base_model: Alibaba-NLP/gte-multilingual-base datasets: - louisbrulenaudet/code-impots - louisbrulenaudet/code-impots-annexe-iv - louisbrulenaudet/code-impots-annexe-iii - louisbrulenaudet/code-impots-annexe-i - louisbrulenaudet/code-impots-annexe-ii - louisbrulenaudet/livre-procedures-fiscales - louisbrulenaudet/bofip language: - fr library_name: sentence-transformers license: apache-2.0 metrics: - cosine_accuracy@1 - cosine_accuracy@3 - cosine_accuracy@5 - cosine_accuracy@10 - cosine_precision@1 - cosine_precision@3 - cosine_precision@5 - cosine_precision@10 - cosine_recall@1 - cosine_recall@3 - cosine_recall@5 - cosine_recall@10 - cosine_ndcg@10 - cosine_mrr@10 - cosine_map@100 - dot_accuracy@1 - dot_accuracy@3 - dot_accuracy@5 - dot_accuracy@10 - dot_precision@1 - dot_precision@3 - dot_precision@5 - dot_precision@10 - dot_recall@1 - dot_recall@3 - dot_recall@5 - dot_recall@10 - dot_ndcg@10 - dot_mrr@10 - dot_map@100 pipeline_tag: sentence-similarity tags: - sentence-transformers - sentence-similarity - feature-extraction - generated_from_trainer - dataset_size:303863 - loss:CachedGISTEmbedLoss - legal - taxation - fiscalité - tax widget: - source_sentence: Élucider la signification de 'navire de plaisance' d'après l'article 217 undecies du Code général des impôts et détailler les différents types d'investissements concernés. sentences: - Selon l'article 217 undecies du Code général des impôts, pour bénéficier de la déduction fiscale, les investissements doivent être réalisés sous forme de souscriptions au capital de sociétés qui gèrent des concessions de service public local. Ces investissements doivent être spécifiquement orientés vers des activités productives assignées à ces concessions pour une durée minimale de cinq ans. En outre, ces concessions doivent opérer exclusivement dans des secteurs éligibles situés dans les départements ou collectivités d'outre-mer, contribuant ainsi au développement économique des territoires ultramarins. - Dans le contexte de l'article 217 undecies du Code général des impôts, un 'navire de plaisance' désigne une embarcation spécifiquement utilisée pour des activités de loisir, excluant ainsi toute utilisation professionnelle telle que la pêche ou le transport. Les investissements pertinents pouvant bénéficier de cet agrément incluent non seulement l'achat ou la construction de ces navires, mais aussi leur utilisation dans des activités de tourisme comme la location sous différentes formes, les voyages organisés et la pêche de loisir, ainsi que les investissements dans les infrastructures et équipements nécessaires à ces activités touristiques. - L'article R. 257 B-1 du Livre des Procédures Fiscales organise les modalités pratiques relatives à l'information du contribuable quant à la mise en œuvre d'une compensation fiscale de recouvrement. Cette disposition confère au contribuable le droit d'être informé en amont de la réalisation de la compensation. Ce dispositif implique que le comptable public est tenu de communiquer avec le contribuable, afin de l'éclairer sur le processus et les conséquences de cette opération. L'information préalable joue un rôle crucial, car elle accorde au redevable l'opportunité de comprendre les ajustements à venir sur ses comptes vis-à-vis de l'administration fiscale. - source_sentence: Énumérer en détail les informations requises par l'article 50-00 G, Annexe IV du Code général des impôts concernant la déclaration récapitulative mensuelle que doit établir l'entrepositaire agréé. sentences: - 'Pour se conformer aux dispositions imposées par l''article 50-00 G, Annexe IV du Code général des impôts, l''entrepositaire agréé est tenu de rédiger une déclaration récapitulative mensuelle distincte pour chaque entrepôt fiscal suspensif des droits d''accises qu''il gère. Une telle déclaration doit comprendre : les noms ou la dénomination de l''entreprise, l''adresse du siège social ou du principal établissement, le numéro d''identification de l''entrepôt fiscal, l''adresse de l''entrepôt fiscal, le lieu de tenue de la comptabilité matières, l''année et le mois concernés par la déclaration, la date et le lieu d''établissement de la déclaration ainsi que la signature et le cachet de l''entreprise. Elle doit également indiquer la raison sociale de la caution ou, le cas échéant, la mention ''Dispense''. Au besoin, elle peut comporter des mentions relatives aux comptes d''âge ou de vieillissement, les références aux contrats d''achat qui exigent un visa de l''établissement mentionné dans l''article L. 621-1 du Code rural et de la pêche maritime, les numéros d''enregistrement des contrats d''achat et les numéros des déclarations de transactions soumises aux interprofessions, ainsi que l''avis de blocage, l''engagement de garantie ou la mainlevée de warrant agricole ou de l''engagement de garantie, selon l''applicabilité à chaque cas particulier.' - L'intégration de Mayotte dans le champ d'application du Code général des impôts, rendant ainsi les entreprises mahoraises éligibles au crédit d'impôt pour investissements productifs outre-mer, a été actée par le législateur au travers de la loi n° 2010-1487 du 7 décembre 2010. Cette loi a élevé Mayotte au statut de département, étendant à ce titre l'ensemble des dispositions du CGI. L'ordonnance n° 2013-837 du 19 septembre 2013 est venue quant à elle expliciter les adaptations nécessaires au code des douanes et au CGI pour Mayotte. Conséquence directe de ces textes, les entreprises exerçant à Mayotte peuvent prétendre au crédit d'impôt en vigueur dès le 1er janvier 2014, conformément à l'article 244 quater W du CGI. - Le relevé des frais généraux prévu à l'article 54 quater du Code général des impôts doit comporter les renseignements propres à l'exercice pour lequel il est fourni et ceux qui se rapportent à l'exercice précédent. - source_sentence: Quels sont les éléments que doit contenir la demande déposée auprès de la direction générale des finances publiques pour que les sociétés, compagnies ou entreprises françaises puissent bénéficier du régime fiscal prévu pour l'émission de séries spéciales d'obligations à l'étranger ? sentences: - Pour le premier exercice comptable de l'entreprise d'une durée de quatorze mois, le plafond standard d'exonération de 61 000 € est ajusté au prorata de la durée, donnant un nouveau plafond d'exonération de 71 166 € (61 000 € x 14/12). - Pour être admises à bénéficier du régime fiscal prévu au 1 de l'article 131 ter du Code général des impôts, les sociétés, compagnies ou entreprises françaises qui se proposent d'émettre à l'étranger des séries spéciales d'obligations, doivent déposer au préalable une demande spéciale à la direction générale des finances publiques. Cette demande indique la date et les conditions de l'émission ainsi que le nombre, le montant et les numéros des titres à émettre. - Pour atténuer certaines contraintes fiscales, les sociétés étrangères exerçant une activité sur le territoire français ont la possibilité de restreindre le montant de la retenue à la source, qu'elles sont tenues de verser en vertu de l'article 115 quinquies du Code général des impôts, à une somme équivalente à l'impôt définitivement dû. Cette réduction prend en considération les prévisions de distributions de dividendes et le lieu de résidence fiscale des actionnaires. Pour bénéficier de ce dispositif, lesdites sociétés doivent expressément formuler une demande en référence à la directive pertinente et la joindre à la déclaration n° 2777-D-SD. Cela implique un suivi rigoureux de l'impact des distributions réelles et des domiciliations des bénéficiaires afin d'éviter les insuffisances de versement, sous peine de régularisation ultérieure accompagnée de l'intérêt de retard selon les articles 1727 et 1729 du même code. - source_sentence: Expliquez comment est organisé le recouvrement de l'impôt sur la fortune immobilière en référence aux modalités décrites dans l'article 1658 du Code général des impôts. sentences: - 'Dans le contexte de la déclaration des revenus fonciers, la société doit émettre une attestation annuelle qui doit être remise à chaque associé au plus tard le deuxième jour ouvré après le 1er mai, selon les modalités fixées par le décret n° 2009-316 du 20 mars 2009. Cette attestation revêt une importance cruciale puisqu''elle permet aux associés de renseigner correctement leur déclaration de revenus fonciers via l''imprimé n° 2044 spécial. Elle doit recenser des informations précises : l''identité et l''adresse de l''associé, la détention des parts au cours de l''année, le respect des conditions de loyer, le montant de l''amortissement ainsi que le revenu net foncier qui découle des parts de l''associé, tant dans le régime de droit commun qu''en incluant la déduction liée à l''amortissement.' - Le recouvrement de l'impôt sur la fortune immobilière s'orchestre conformément aux dispositions disposées dans l'article 1658 du Code général des impôts. Cela implique que les techniques, les procédures, ainsi que les moyens d'exécution prévus pour le recouvrement de cet impôt sont alignés sur ceux établis pour l'impôt sur le revenu. - L'article 981 du Code général des impôts établit que les normes régissant les droits d'enregistrement, sauf spécification contraire, sont adaptées à la gestion de l'impôt sur la fortune immobilière. Cela signifie que les méthodes de contrôle, telles que les audits et inspections, ainsi que les procédures de règlement des contentieux sont extensibles à l'impôt sur la fortune immobilière. Cette approche garantit une uniformité des pratiques administratives fiscales, facilitant ainsi une application homogène et cohérente des lois fiscales relatives à la fortune immobilière. - source_sentence: Exposer les modalités de dérogation au secret fiscal autorisant le juge à demander des documents fiscaux nécessaires pour résoudre un litige, en vertu de l'article L. 143 du Livre des Procédures Fiscales. sentences: - Selon les dispositions du Bulletin officiel des finances publiques-instructions administratives, spécifiquement le BOI-DJC-SECR-10-20-50, le procureur de la République détient le droit, dans le contexte de toute investigation judiciaire, qu'elle relève d'une enquête de flagrance, préliminaire ou autre, de solliciter des renseignements ou documents essentiels à l'enquête auprès de l'administration fiscale. Cette sollicitation peut être adressée directement ou via un officier de police judiciaire agissant sur une réquisition du procureur. Conformément à l'article L.141 A du Livre des procédures fiscales, le secret fiscal ne constitue pas un frein légal à la transmission des informations ou documents exigés par le procureur. - L'article 199 novovicies du Code général des impôts dispose de modalités de réduction d'impôt spécifiques pour les transactions d'acquisition et de construction durant les années 2023 et 2024. En 2023, les bénéfices de cette réduction s'établissent à 4,5 % pour la première phase triennale et à 2,5 % pour la seconde. Pour les opérations effectuées en 2024, les réductions offertes sont de 3 % pendant la première période triennale et de 2 % pour la suivante. Ces pourcentages se rapportent aux acquisitions non mentionnées au 5° du B du I ainsi qu'aux constructions référencées au 1° du B du I, avec nécessité que le permis de construire ait été délivré durant l'année correspondante. - Conformément aux dispositions de l'article L. 143 du Livre des Procédures Fiscales, le secret fiscal peut être levé dans le cadre d'un litige par décision du juge. Cette mesure vise à autoriser la présentation de documents fiscaux, jugés utiles par le magistrat pour trancher une affaire. La levée de ce secret est toutefois soumise à une interprétation stricte, de sorte que seuls les documents réellement susceptibles d'éclairer le juge sur l'étendue du préjudice des individus impliqués peuvent être divulgués. Les renseignements qui n'ont de pertinence que pour des questions périphériques de la procédure ou qui se rapportent uniquement à l'application d'un jugement déjà prononcé sont exclus de cette possibilité de communication. co2_eq_emissions: emissions: 2036.3553910202609 energy_consumed: 5.516569338938681 source: codecarbon training_type: fine-tuning on_cloud: false cpu_model: AMD EPYC 9V84 96-Core Processor ram_total_size: 314.68053817749023 hours_used: 9.954 hardware_used: 1 x NVIDIA H100 NVL model-index: - name: SentenceTransformer based on Alibaba-NLP/gte-multilingual-base results: - task: type: information-retrieval name: Information Retrieval dataset: name: Lemone type: Lemone metrics: - type: cosine_accuracy@1 value: 0.9736673089274245 name: Cosine Accuracy@1 - type: cosine_accuracy@3 value: 0.9916506101477199 name: Cosine Accuracy@3 - type: cosine_accuracy@5 value: 0.993577392421323 name: Cosine Accuracy@5 - type: cosine_accuracy@10 value: 0.9967886962106616 name: Cosine Accuracy@10 - type: cosine_precision@1 value: 0.9736673089274245 name: Cosine Precision@1 - type: cosine_precision@3 value: 0.33055020338257335 name: Cosine Precision@3 - type: cosine_precision@5 value: 0.1987154784842646 name: Cosine Precision@5 - type: cosine_precision@10 value: 0.09967886962106615 name: Cosine Precision@10 - type: cosine_recall@1 value: 0.9736673089274245 name: Cosine Recall@1 - type: cosine_recall@3 value: 0.9916506101477199 name: Cosine Recall@3 - type: cosine_recall@5 value: 0.993577392421323 name: Cosine Recall@5 - type: cosine_recall@10 value: 0.9967886962106616 name: Cosine Recall@10 - type: cosine_ndcg@10 value: 0.9865226900324854 name: Cosine Ndcg@10 - type: cosine_mrr@10 value: 0.9830947793375538 name: Cosine Mrr@10 - type: cosine_map@100 value: 0.9832069316895906 name: Cosine Map@100 - type: dot_accuracy@1 value: 0.9736673089274245 name: Dot Accuracy@1 - type: dot_accuracy@3 value: 0.9916506101477199 name: Dot Accuracy@3 - type: dot_accuracy@5 value: 0.993577392421323 name: Dot Accuracy@5 - type: dot_accuracy@10 value: 0.9967886962106616 name: Dot Accuracy@10 - type: dot_precision@1 value: 0.9736673089274245 name: Dot Precision@1 - type: dot_precision@3 value: 0.33055020338257335 name: Dot Precision@3 - type: dot_precision@5 value: 0.1987154784842646 name: Dot Precision@5 - type: dot_precision@10 value: 0.09967886962106615 name: Dot Precision@10 - type: dot_recall@1 value: 0.9736673089274245 name: Dot Recall@1 - type: dot_recall@3 value: 0.9916506101477199 name: Dot Recall@3 - type: dot_recall@5 value: 0.993577392421323 name: Dot Recall@5 - type: dot_recall@10 value: 0.9967886962106616 name: Dot Recall@10 - type: dot_ndcg@10 value: 0.9865226900324854 name: Dot Ndcg@10 - type: dot_mrr@10 value: 0.9830947793375538 name: Dot Mrr@10 - type: dot_map@100 value: 0.9832069316895906 name: Dot Map@100 --- <img src="assets/thumbnail.webp"> # Lemone-Embed: A Series of Fine-Tuned Embedding Models for French Taxation <div class="not-prose bg-gradient-to-r from-gray-50-to-white text-gray-900 border" style="border-radius: 8px; padding: 0.5rem 1rem;"> <p>This series is made up of 7 models, 3 basic models of different sizes trained on 1 epoch, 3 models trained on 2 epochs making up the Boost series and a Pro model with a non-Roberta architecture.</p> </div> This sentence transformers model, specifically designed for French taxation, has been fine-tuned on a dataset comprising 43 million tokens, integrating a blend of semi-synthetic and fully synthetic data generated by GPT-4 Turbo and Llama 3.1 70B, which have been further refined through evol-instruction tuning and manual curation. The model is tailored to meet the specific demands of information retrieval across large-scale tax-related corpora, supporting the implementation of production-ready Retrieval-Augmented Generation (RAG) applications. Its primary purpose is to enhance the efficiency and accuracy of legal processes in the taxation domain, with an emphasis on delivering consistent performance in real-world settings, while also contributing to advancements in legal natural language processing research. This is a [sentence-transformers](https://www.SBERT.net) model finetuned from [Alibaba-NLP/gte-multilingual-base](https://huggingface.co/Alibaba-NLP/gte-multilingual-base). It maps sentences & paragraphs to a 768-dimensional dense vector space and can be used for semantic textual similarity, semantic search, paraphrase mining, text classification, clustering, and more. ## Model Details ### Model Description - **Model Type:** Sentence Transformer - **Base model:** [Alibaba-NLP/gte-multilingual-base](https://huggingface.co/Alibaba-NLP/gte-multilingual-base) <!-- at revision 7fc06782350c1a83f88b15dd4b38ef853d3b8503 --> - **Maximum Sequence Length:** 8192 tokens - **Output Dimensionality:** 768 tokens - **Similarity Function:** Cosine Similarity - **Developed by:** Louis Brulé Naudet - **Funded by:** Microsoft for Startups - **Shared by:** Louis Brulé Naudet - **Model type:** Sentence Transformers - **Language(s) (NLP):** FR - **License:** Apache 2 - **Finetuned from model:** [Alibaba-NLP/gte-multilingual-base](https://huggingface.co/Alibaba-NLP/gte-multilingual-base) ### Model Sources - **Documentation:** [Sentence Transformers Documentation](https://sbert.net) - **Repository:** [Sentence Transformers on GitHub](https://github.com/UKPLab/sentence-transformers) - **Hugging Face:** [Sentence Transformers on Hugging Face](https://huggingface.co/models?library=sentence-transformers) ### Full Model Architecture ``` SentenceTransformer( (0): Transformer({'max_seq_length': 8192, 'do_lower_case': False}) with Transformer model: NewModel (1): Pooling({'word_embedding_dimension': 768, 'pooling_mode_cls_token': True, 'pooling_mode_mean_tokens': False, 'pooling_mode_max_tokens': False, 'pooling_mode_mean_sqrt_len_tokens': False, 'pooling_mode_weightedmean_tokens': False, 'pooling_mode_lasttoken': False, 'include_prompt': True}) (2): Normalize() ) ``` ## Usage ### Direct Usage (Sentence Transformers) First install the Sentence Transformers library: ```bash pip install -U sentence-transformers ``` Then you can load this model and run inference. ```python from sentence_transformers import SentenceTransformer # Download from the 🤗 Hub model = SentenceTransformer("louisbrulenaudet/lemone-gte-embed-max") # Run inference sentences = [ "Exposer les modalités de dérogation au secret fiscal autorisant le juge à demander des documents fiscaux nécessaires pour résoudre un litige, en vertu de l'article L. 143 du Livre des Procédures Fiscales.", "Conformément aux dispositions de l'article L. 143 du Livre des Procédures Fiscales, le secret fiscal peut être levé dans le cadre d'un litige par décision du juge. Cette mesure vise à autoriser la présentation de documents fiscaux, jugés utiles par le magistrat pour trancher une affaire. La levée de ce secret est toutefois soumise à une interprétation stricte, de sorte que seuls les documents réellement susceptibles d'éclairer le juge sur l'étendue du préjudice des individus impliqués peuvent être divulgués. Les renseignements qui n'ont de pertinence que pour des questions périphériques de la procédure ou qui se rapportent uniquement à l'application d'un jugement déjà prononcé sont exclus de cette possibilité de communication.", "Selon les dispositions du Bulletin officiel des finances publiques-instructions administratives, spécifiquement le BOI-DJC-SECR-10-20-50, le procureur de la République détient le droit, dans le contexte de toute investigation judiciaire, qu'elle relève d'une enquête de flagrance, préliminaire ou autre, de solliciter des renseignements ou documents essentiels à l'enquête auprès de l'administration fiscale. Cette sollicitation peut être adressée directement ou via un officier de police judiciaire agissant sur une réquisition du procureur. Conformément à l'article L.141 A du Livre des procédures fiscales, le secret fiscal ne constitue pas un frein légal à la transmission des informations ou documents exigés par le procureur.", ] embeddings = model.encode(sentences) print(embeddings.shape) # [3, 768] # Get the similarity scores for the embeddings similarities = model.similarity(embeddings, embeddings) print(similarities.shape) # [3, 3] ``` <!-- ### Direct Usage (Transformers) <details><summary>Click to see the direct usage in Transformers</summary> </details> --> <!-- ### Downstream Usage (Sentence Transformers) You can finetune this model on your own dataset. <details><summary>Click to expand</summary> </details> --> <!-- ### Out-of-Scope Use *List how the model may foreseeably be misused and address what users ought not to do with the model.* --> ## Evaluation ### Metrics #### Information Retrieval * Dataset: `Lemone` * Evaluated with [<code>InformationRetrievalEvaluator</code>](https://sbert.net/docs/package_reference/sentence_transformer/evaluation.html#sentence_transformers.evaluation.InformationRetrievalEvaluator) | Metric | Value | |:--------------------|:-----------| | cosine_accuracy@1 | 0.9737 | | cosine_accuracy@3 | 0.9917 | | cosine_accuracy@5 | 0.9936 | | cosine_accuracy@10 | 0.9968 | | cosine_precision@1 | 0.9737 | | cosine_precision@3 | 0.3306 | | cosine_precision@5 | 0.1987 | | cosine_precision@10 | 0.0997 | | cosine_recall@1 | 0.9737 | | cosine_recall@3 | 0.9917 | | cosine_recall@5 | 0.9936 | | cosine_recall@10 | 0.9968 | | cosine_ndcg@10 | 0.9865 | | cosine_mrr@10 | 0.9831 | | **cosine_map@100** | **0.9832** | | dot_accuracy@1 | 0.9737 | | dot_accuracy@3 | 0.9917 | | dot_accuracy@5 | 0.9936 | | dot_accuracy@10 | 0.9968 | | dot_precision@1 | 0.9737 | | dot_precision@3 | 0.3306 | | dot_precision@5 | 0.1987 | | dot_precision@10 | 0.0997 | | dot_recall@1 | 0.9737 | | dot_recall@3 | 0.9917 | | dot_recall@5 | 0.9936 | | dot_recall@10 | 0.9968 | | dot_ndcg@10 | 0.9865 | | dot_mrr@10 | 0.9831 | | dot_map@100 | 0.9832 | <!-- ## Bias, Risks and Limitations *What are the known or foreseeable issues stemming from this model? You could also flag here known failure cases or weaknesses of the model.* --> <!-- ### Recommendations *What are recommendations with respect to the foreseeable issues? For example, filtering explicit content.* --> ## Training Details ### Training Dataset * Size: 303,863 training samples * Columns: <code>query</code>, <code>positive</code>, and <code>negative</code> * Approximate statistics based on the first 1000 samples: | | query | positive | negative | |:--------|:------------------------------------------------------------------------------------|:-------------------------------------------------------------------------------------|:--------------------------------------------------------------------------------------| | type | string | string | string | | details | <ul><li>min: 27 tokens</li><li>mean: 51.44 tokens</li><li>max: 137 tokens</li></ul> | <ul><li>min: 39 tokens</li><li>mean: 197.8 tokens</li><li>max: 1607 tokens</li></ul> | <ul><li>min: 48 tokens</li><li>mean: 224.41 tokens</li><li>max: 2735 tokens</li></ul> | * Loss: [<code>CachedGISTEmbedLoss</code>](https://sbert.net/docs/package_reference/sentence_transformer/losses.html#cachedgistembedloss) with these parameters: ```json {'guide': SentenceTransformer( (0): Transformer({'max_seq_length': 8192, 'do_lower_case': False}) with Transformer model: NewModel (1): Pooling({'word_embedding_dimension': 768, 'pooling_mode_cls_token': True, 'pooling_mode_mean_tokens': False, 'pooling_mode_max_tokens': False, 'pooling_mode_mean_sqrt_len_tokens': False, 'pooling_mode_weightedmean_tokens': False, 'pooling_mode_lasttoken': False, 'include_prompt': True}) (2): Normalize() ), 'temperature': 0.01} ``` ### Training Hyperparameters #### Non-Default Hyperparameters - `eval_strategy`: steps - `per_device_train_batch_size`: 128 - `learning_rate`: 2e-05 - `num_train_epochs`: 1 - `warmup_ratio`: 0.1 - `fp16`: True - `batch_sampler`: no_duplicates #### All Hyperparameters <details><summary>Click to expand</summary> - `overwrite_output_dir`: False - `do_predict`: False - `eval_strategy`: steps - `prediction_loss_only`: True - `per_device_train_batch_size`: 128 - `per_device_eval_batch_size`: 8 - `per_gpu_train_batch_size`: None - `per_gpu_eval_batch_size`: None - `gradient_accumulation_steps`: 1 - `eval_accumulation_steps`: None - `torch_empty_cache_steps`: None - `learning_rate`: 2e-05 - `weight_decay`: 0.0 - `adam_beta1`: 0.9 - `adam_beta2`: 0.999 - `adam_epsilon`: 1e-08 - `max_grad_norm`: 1.0 - `num_train_epochs`: 1 - `max_steps`: -1 - `lr_scheduler_type`: linear - `lr_scheduler_kwargs`: {} - `warmup_ratio`: 0.1 - `warmup_steps`: 0 - `log_level`: passive - `log_level_replica`: warning - `log_on_each_node`: True - `logging_nan_inf_filter`: True - `save_safetensors`: True - `save_on_each_node`: False - `save_only_model`: False - `restore_callback_states_from_checkpoint`: False - `no_cuda`: False - `use_cpu`: False - `use_mps_device`: False - `seed`: 42 - `data_seed`: None - `jit_mode_eval`: False - `use_ipex`: False - `bf16`: False - `fp16`: True - `fp16_opt_level`: O1 - `half_precision_backend`: auto - `bf16_full_eval`: False - `fp16_full_eval`: False - `tf32`: None - `local_rank`: 0 - `ddp_backend`: None - `tpu_num_cores`: None - `tpu_metrics_debug`: False - `debug`: [] - `dataloader_drop_last`: False - `dataloader_num_workers`: 0 - `dataloader_prefetch_factor`: None - `past_index`: -1 - `disable_tqdm`: False - `remove_unused_columns`: True - `label_names`: None - `load_best_model_at_end`: False - `ignore_data_skip`: False - `fsdp`: [] - `fsdp_min_num_params`: 0 - `fsdp_config`: {'min_num_params': 0, 'xla': False, 'xla_fsdp_v2': False, 'xla_fsdp_grad_ckpt': False} - `fsdp_transformer_layer_cls_to_wrap`: None - `accelerator_config`: {'split_batches': False, 'dispatch_batches': None, 'even_batches': True, 'use_seedable_sampler': True, 'non_blocking': False, 'gradient_accumulation_kwargs': None} - `deepspeed`: None - `label_smoothing_factor`: 0.0 - `optim`: adamw_torch - `optim_args`: None - `adafactor`: False - `group_by_length`: False - `length_column_name`: length - `ddp_find_unused_parameters`: None - `ddp_bucket_cap_mb`: None - `ddp_broadcast_buffers`: False - `dataloader_pin_memory`: True - `dataloader_persistent_workers`: False - `skip_memory_metrics`: True - `use_legacy_prediction_loop`: False - `push_to_hub`: False - `resume_from_checkpoint`: None - `hub_model_id`: None - `hub_strategy`: every_save - `hub_private_repo`: False - `hub_always_push`: False - `gradient_checkpointing`: False - `gradient_checkpointing_kwargs`: None - `include_inputs_for_metrics`: False - `eval_do_concat_batches`: True - `fp16_backend`: auto - `push_to_hub_model_id`: None - `push_to_hub_organization`: None - `mp_parameters`: - `auto_find_batch_size`: False - `full_determinism`: False - `torchdynamo`: None - `ray_scope`: last - `ddp_timeout`: 1800 - `torch_compile`: False - `torch_compile_backend`: None - `torch_compile_mode`: None - `dispatch_batches`: None - `split_batches`: None - `include_tokens_per_second`: False - `include_num_input_tokens_seen`: False - `neftune_noise_alpha`: None - `optim_target_modules`: None - `batch_eval_metrics`: False - `eval_on_start`: False - `eval_use_gather_object`: False - `batch_sampler`: no_duplicates - `multi_dataset_batch_sampler`: proportional </details> ### Environmental Impact Carbon emissions were measured using [CodeCarbon](https://github.com/mlco2/codecarbon). - **Energy Consumed**: 5.517 kWh - **Carbon Emitted**: 2.036 kg of CO2 - **Hours Used**: 9.954 hours ### Training Hardware - **On Cloud**: No - **GPU Model**: 1 x NVIDIA H100 NVL - **CPU Model**: AMD EPYC 9V84 96-Core Processor - **RAM Size**: 314.68 GB ### Framework Versions - Python: 3.10.12 - Sentence Transformers: 3.1.1 - Transformers: 4.44.2 - PyTorch: 2.3.0+cu121 - Accelerate: 0.33.0 - Datasets: 2.21.0 - Tokenizers: 0.19.1 ## Citation ### BibTeX #### Sentence Transformers ```bibtex @inproceedings{reimers-2019-sentence-bert, title = "Sentence-BERT: Sentence Embeddings using Siamese BERT-Networks", author = "Reimers, Nils and Gurevych, Iryna", booktitle = "Proceedings of the 2019 Conference on Empirical Methods in Natural Language Processing", month = "11", year = "2019", publisher = "Association for Computational Linguistics", url = "https://arxiv.org/abs/1908.10084", } ``` If you use this code in your research, please use the following BibTeX entry. ```BibTeX @misc{louisbrulenaudet2024, author = {Louis Brulé Naudet}, title = {Lemone-Embed: A Series of Fine-Tuned Embedding Models for French Taxation}, year = {2024} howpublished = {\url{https://huggingface.co/datasets/louisbrulenaudet/lemone-embed-pro}}, } ``` ## Feedback If you have any feedback, please reach out at [[email protected]](mailto:[email protected]).
[ "TEXT_CLASSIFICATION" ]
[ "CAS" ]
StivenLancheros/roberta-base-biomedical-clinical-es-finetuned-ner-CRAFT_Augmented_ES
StivenLancheros
token-classification
[ "transformers", "pytorch", "tensorboard", "roberta", "token-classification", "generated_from_trainer", "license:apache-2.0", "autotrain_compatible", "endpoints_compatible", "region:us" ]
2022-03-21T20:16:37
2022-03-21T22:25:59
127
0
--- license: apache-2.0 metrics: - precision - recall - f1 - accuracy tags: - generated_from_trainer model-index: - name: roberta-base-biomedical-clinical-es-finetuned-ner-CRAFT_Augmented_ES results: [] --- <!-- This model card has been generated automatically according to the information the Trainer had access to. You should probably proofread and complete it, then remove this comment. --> # roberta-base-biomedical-clinical-es-finetuned-ner-CRAFT_Augmented_ES This model is a fine-tuned version of [PlanTL-GOB-ES/roberta-base-biomedical-clinical-es](https://huggingface.co/PlanTL-GOB-ES/roberta-base-biomedical-clinical-es) on the CRAFT dataset. It achieves the following results on the evaluation set: - Loss: 0.2224 - Precision: 0.8298 - Recall: 0.8306 - F1: 0.8302 - Accuracy: 0.9659 ## Model description This model performs Named Entity Recognition for 6 entity tags: Sequence, Cell, Protein, Gene, Taxon, and Chemical from the CRAFT(Colorado Richly Annotated Full Text) Corpus in English. Entity tags have been normalized and replaced from the original three letter code to a full name e.g. B-Protein, I-Chemical. This model is trained on augmented data created using Entity Replacement. 20% of the entities were replaced using a list of entities for each entity tag obtained from the official ontologies for each entity class. Three datasets (original, augmented, MT translated CRAFT) were concatenated. ## Intended uses & limitations More information needed ## Training and evaluation data More information needed ## Training procedure ### Training hyperparameters The following hyperparameters were used during training: - learning_rate: 3e-05 - train_batch_size: 8 - eval_batch_size: 8 - seed: 42 - optimizer: Adam with betas=(0.9,0.999) and epsilon=1e-08 - lr_scheduler_type: linear - num_epochs: 4 ### Training results | Training Loss | Epoch | Step | Validation Loss | Precision | Recall | F1 | Accuracy | |:-------------:|:-----:|:-----:|:---------------:|:---------:|:------:|:------:|:--------:| | 0.0624 | 1.0 | 4078 | 0.1844 | 0.8002 | 0.7923 | 0.7963 | 0.9607 | | 0.0284 | 2.0 | 8156 | 0.1937 | 0.8394 | 0.7988 | 0.8186 | 0.9637 | | 0.0118 | 3.0 | 12234 | 0.2007 | 0.8285 | 0.8232 | 0.8258 | 0.9649 | | 0.0043 | 4.0 | 16312 | 0.2224 | 0.8298 | 0.8306 | 0.8302 | 0.9659 | ### Framework versions - Transformers 4.17.0 - Pytorch 1.10.0+cu111 - Datasets 2.0.0 - Tokenizers 0.11.6
[ "NAMED_ENTITY_RECOGNITION" ]
[ "CRAFT" ]
StivenLancheros/roberta-base-biomedical-clinical-es-finetuned-ner-CRAFT_AugmentedTransfer_EN
StivenLancheros
token-classification
[ "transformers", "pytorch", "tensorboard", "roberta", "token-classification", "generated_from_trainer", "license:apache-2.0", "autotrain_compatible", "endpoints_compatible", "region:us" ]
2022-03-21T21:04:02
2022-03-21T22:10:39
127
0
--- license: apache-2.0 metrics: - precision - recall - f1 - accuracy tags: - generated_from_trainer model-index: - name: roberta-base-biomedical-clinical-es-finetuned-ner-CRAFT_AugmentedTransfer_EN results: [] --- <!-- This model card has been generated automatically according to the information the Trainer had access to. You should probably proofread and complete it, then remove this comment. --> # roberta-base-biomedical-clinical-es-finetuned-ner-CRAFT_AugmentedTransfer_EN This model is a fine-tuned version of [StivenLancheros/roberta-base-biomedical-clinical-es-finetuned-ner-CRAFT_Augmented_EN](https://huggingface.co/StivenLancheros/roberta-base-biomedical-clinical-es-finetuned-ner-CRAFT_Augmented_EN) on the CRAFT dataset. It achieves the following results on the evaluation set: - Loss: 0.2308 - Precision: 0.8366 - Recall: 0.8513 - F1: 0.8439 - Accuracy: 0.9681 ## Model description This model performs Named Entity Recognition for 6 entity tags: Sequence, Cell, Protein, Gene, Taxon, and Chemical from the CRAFT(Colorado Richly Annotated Full Text) Corpus in Spanish and English. Entity tags have been normalized and replaced from the original three letter code to a full name e.g. B-Protein, I-Chemical. This model is trained on augmented data created using Entity Replacement. 20% of the entities were replaced using a list of entities for each entity tag obtained from the official ontologies for each entity class. Both datasets (original, augmented) were concatenated. To improve F1 score the transfer learning was completed in two steps. Using [StivenLancheros/roberta-base-biomedical-clinical-es-finetuned-ner-CRAFT_Augmented_EN](https://huggingface.co/StivenLancheros/roberta-base-biomedical-clinical-es-finetuned-ner-CRAFT_Augmented_EN as a base model, I finetuned once more on the original CRAFT dataset in English. Biobert --> Augmented CRAFT --> CRAFT ## Intended uses & limitations More information needed ## Training and evaluation data More information needed ## Training procedure ### Training hyperparameters The following hyperparameters were used during training: - learning_rate: 3e-05 - train_batch_size: 8 - eval_batch_size: 8 - seed: 42 - optimizer: Adam with betas=(0.9,0.999) and epsilon=1e-08 - lr_scheduler_type: linear - num_epochs: 4 ### Training results | Training Loss | Epoch | Step | Validation Loss | Precision | Recall | F1 | Accuracy | |:-------------:|:-----:|:----:|:---------------:|:---------:|:------:|:------:|:--------:| | 0.0129 | 1.0 | 1360 | 0.2119 | 0.8404 | 0.8364 | 0.8384 | 0.9666 | | 0.0072 | 2.0 | 2720 | 0.2132 | 0.8173 | 0.8583 | 0.8373 | 0.9662 | | 0.0042 | 3.0 | 4080 | 0.2180 | 0.8410 | 0.8515 | 0.8462 | 0.9686 | | 0.0019 | 4.0 | 5440 | 0.2308 | 0.8366 | 0.8513 | 0.8439 | 0.9681 | ### Framework versions - Transformers 4.17.0 - Pytorch 1.10.0+cu111 - Datasets 2.0.0 - Tokenizers 0.11.6
[ "NAMED_ENTITY_RECOGNITION" ]
[ "CRAFT" ]
RichardErkhov/aisingapore_-_llama3-8b-cpt-sea-lionv2-base-gguf
RichardErkhov
null
[ "gguf", "arxiv:2309.06085", "arxiv:2101.09635", "endpoints_compatible", "region:us" ]
2024-08-03T06:10:18
2024-08-03T08:16:05
127
0
--- {} --- Quantization made by Richard Erkhov. [Github](https://github.com/RichardErkhov) [Discord](https://discord.gg/pvy7H8DZMG) [Request more models](https://github.com/RichardErkhov/quant_request) llama3-8b-cpt-sea-lionv2-base - GGUF - Model creator: https://huggingface.co/aisingapore/ - Original model: https://huggingface.co/aisingapore/llama3-8b-cpt-sea-lionv2-base/ | Name | Quant method | Size | | ---- | ---- | ---- | | [llama3-8b-cpt-sea-lionv2-base.Q2_K.gguf](https://huggingface.co/RichardErkhov/aisingapore_-_llama3-8b-cpt-sea-lionv2-base-gguf/blob/main/llama3-8b-cpt-sea-lionv2-base.Q2_K.gguf) | Q2_K | 2.96GB | | [llama3-8b-cpt-sea-lionv2-base.IQ3_XS.gguf](https://huggingface.co/RichardErkhov/aisingapore_-_llama3-8b-cpt-sea-lionv2-base-gguf/blob/main/llama3-8b-cpt-sea-lionv2-base.IQ3_XS.gguf) | IQ3_XS | 3.28GB | | [llama3-8b-cpt-sea-lionv2-base.IQ3_S.gguf](https://huggingface.co/RichardErkhov/aisingapore_-_llama3-8b-cpt-sea-lionv2-base-gguf/blob/main/llama3-8b-cpt-sea-lionv2-base.IQ3_S.gguf) | IQ3_S | 3.43GB | | [llama3-8b-cpt-sea-lionv2-base.Q3_K_S.gguf](https://huggingface.co/RichardErkhov/aisingapore_-_llama3-8b-cpt-sea-lionv2-base-gguf/blob/main/llama3-8b-cpt-sea-lionv2-base.Q3_K_S.gguf) | Q3_K_S | 3.41GB | | [llama3-8b-cpt-sea-lionv2-base.IQ3_M.gguf](https://huggingface.co/RichardErkhov/aisingapore_-_llama3-8b-cpt-sea-lionv2-base-gguf/blob/main/llama3-8b-cpt-sea-lionv2-base.IQ3_M.gguf) | IQ3_M | 3.52GB | | [llama3-8b-cpt-sea-lionv2-base.Q3_K.gguf](https://huggingface.co/RichardErkhov/aisingapore_-_llama3-8b-cpt-sea-lionv2-base-gguf/blob/main/llama3-8b-cpt-sea-lionv2-base.Q3_K.gguf) | Q3_K | 3.74GB | | [llama3-8b-cpt-sea-lionv2-base.Q3_K_M.gguf](https://huggingface.co/RichardErkhov/aisingapore_-_llama3-8b-cpt-sea-lionv2-base-gguf/blob/main/llama3-8b-cpt-sea-lionv2-base.Q3_K_M.gguf) | Q3_K_M | 3.74GB | | [llama3-8b-cpt-sea-lionv2-base.Q3_K_L.gguf](https://huggingface.co/RichardErkhov/aisingapore_-_llama3-8b-cpt-sea-lionv2-base-gguf/blob/main/llama3-8b-cpt-sea-lionv2-base.Q3_K_L.gguf) | Q3_K_L | 4.03GB | | [llama3-8b-cpt-sea-lionv2-base.IQ4_XS.gguf](https://huggingface.co/RichardErkhov/aisingapore_-_llama3-8b-cpt-sea-lionv2-base-gguf/blob/main/llama3-8b-cpt-sea-lionv2-base.IQ4_XS.gguf) | IQ4_XS | 4.18GB | | [llama3-8b-cpt-sea-lionv2-base.Q4_0.gguf](https://huggingface.co/RichardErkhov/aisingapore_-_llama3-8b-cpt-sea-lionv2-base-gguf/blob/main/llama3-8b-cpt-sea-lionv2-base.Q4_0.gguf) | Q4_0 | 3.03GB | | [llama3-8b-cpt-sea-lionv2-base.IQ4_NL.gguf](https://huggingface.co/RichardErkhov/aisingapore_-_llama3-8b-cpt-sea-lionv2-base-gguf/blob/main/llama3-8b-cpt-sea-lionv2-base.IQ4_NL.gguf) | IQ4_NL | 4.38GB | | [llama3-8b-cpt-sea-lionv2-base.Q4_K_S.gguf](https://huggingface.co/RichardErkhov/aisingapore_-_llama3-8b-cpt-sea-lionv2-base-gguf/blob/main/llama3-8b-cpt-sea-lionv2-base.Q4_K_S.gguf) | Q4_K_S | 1.52GB | | [llama3-8b-cpt-sea-lionv2-base.Q4_K.gguf](https://huggingface.co/RichardErkhov/aisingapore_-_llama3-8b-cpt-sea-lionv2-base-gguf/blob/main/llama3-8b-cpt-sea-lionv2-base.Q4_K.gguf) | Q4_K | 0.36GB | | [llama3-8b-cpt-sea-lionv2-base.Q4_K_M.gguf](https://huggingface.co/RichardErkhov/aisingapore_-_llama3-8b-cpt-sea-lionv2-base-gguf/blob/main/llama3-8b-cpt-sea-lionv2-base.Q4_K_M.gguf) | Q4_K_M | 0.16GB | | [llama3-8b-cpt-sea-lionv2-base.Q4_1.gguf](https://huggingface.co/RichardErkhov/aisingapore_-_llama3-8b-cpt-sea-lionv2-base-gguf/blob/main/llama3-8b-cpt-sea-lionv2-base.Q4_1.gguf) | Q4_1 | 0.01GB | | [llama3-8b-cpt-sea-lionv2-base.Q5_0.gguf](https://huggingface.co/RichardErkhov/aisingapore_-_llama3-8b-cpt-sea-lionv2-base-gguf/blob/main/llama3-8b-cpt-sea-lionv2-base.Q5_0.gguf) | Q5_0 | 0.17GB | | [llama3-8b-cpt-sea-lionv2-base.Q5_K_S.gguf](https://huggingface.co/RichardErkhov/aisingapore_-_llama3-8b-cpt-sea-lionv2-base-gguf/blob/main/llama3-8b-cpt-sea-lionv2-base.Q5_K_S.gguf) | Q5_K_S | 1.65GB | | [llama3-8b-cpt-sea-lionv2-base.Q5_K.gguf](https://huggingface.co/RichardErkhov/aisingapore_-_llama3-8b-cpt-sea-lionv2-base-gguf/blob/main/llama3-8b-cpt-sea-lionv2-base.Q5_K.gguf) | Q5_K | 5.34GB | | [llama3-8b-cpt-sea-lionv2-base.Q5_K_M.gguf](https://huggingface.co/RichardErkhov/aisingapore_-_llama3-8b-cpt-sea-lionv2-base-gguf/blob/main/llama3-8b-cpt-sea-lionv2-base.Q5_K_M.gguf) | Q5_K_M | 5.34GB | | [llama3-8b-cpt-sea-lionv2-base.Q5_1.gguf](https://huggingface.co/RichardErkhov/aisingapore_-_llama3-8b-cpt-sea-lionv2-base-gguf/blob/main/llama3-8b-cpt-sea-lionv2-base.Q5_1.gguf) | Q5_1 | 5.65GB | | [llama3-8b-cpt-sea-lionv2-base.Q6_K.gguf](https://huggingface.co/RichardErkhov/aisingapore_-_llama3-8b-cpt-sea-lionv2-base-gguf/blob/main/llama3-8b-cpt-sea-lionv2-base.Q6_K.gguf) | Q6_K | 6.14GB | | [llama3-8b-cpt-sea-lionv2-base.Q8_0.gguf](https://huggingface.co/RichardErkhov/aisingapore_-_llama3-8b-cpt-sea-lionv2-base-gguf/blob/main/llama3-8b-cpt-sea-lionv2-base.Q8_0.gguf) | Q8_0 | 7.95GB | Original model description: --- language: - en - id - ta - th - vi license: llama3 --- # Llama3 8B CPT SEA-LIONv2 SEA-LION is a collection of Large Language Models (LLMs) which has been pretrained and instruct-tuned for the Southeast Asia (SEA) region. This is the card for the Llama3 8B CPT SEA-LIONv2 base model which has undergone continued pre-training from the [Meta-Llama-3-8B-Instruct](https://huggingface.co/meta-llama/Meta-Llama-3-8B-Instruct) model. SEA-LION stands for <i>Southeast Asian Languages In One Network</i>. ## Model Details ### Model Description The continued pre-training data for Llama3 8B CPT SEA-LIONv2 base model encompasses approximately 48B tokens. - **Developed by:** Products Pillar, AI Singapore - **Funded by:** Singapore NRF - **Model type:** Decoder - **Languages:** English, Indonesian, Thai, Vietnamese, Tamil - **License:** [Llama3 Community License](https://huggingface.co/meta-llama/Meta-Llama-3-8B/blob/main/LICENSE) For tokenization, the model employs the default tokenizer used in Meta-Llama-3-8B-Instruct. ### Benchmark Performance We evaluated Llama3 8B CPT SEA-LIONv2 base model on general language capabilities. #### General Language Capabilities For the evaluation of general language capabilities in SEA languages, we employed the [BHASA evaluation benchmark](https://arxiv.org/abs/2309.06085v2) across a variety of tasks. These tasks include Question Answering (QA), Sentiment Analysis (Sentiment), Toxicity Detection (Toxicity), Translation in both directions (Eng>Lang & Lang>Eng), Abstractive Summarization (Summ), Causal Reasoning (Causal) and Natural Language Inference (NLI). The evaluation was done **five-shot** with native prompts and only a sample of 100-1000 instances for each dataset was used as per the setting described in the paper. **BHASA** To be released soon We also evaluated the model on English capabilities using tasks from the Open LLM Leaderboard. **English** | Model | ARC | BBH | HellaSwag | MMLU | GSM8k | Average | | ----------------------------------------- |:-----:|:-----:|:---------:|:-----:|:-----:|:-------:| | Qwen/Qwen2-7B | 61.86 | 53.10 | 80.63 | 70.45 | 78.09 | 68.83 | | aisingapore/llama3-8b-cpt-sea-lionv2-base | 58.87 | 47.70 | 81.14 | 63.11 | 50.49 | 60.26 | | meta-llama/Meta-Llama-3-8B | 57.85 | 46.09 | 81.89 | 65.10 | 45.34 | 59.25 | | mistralai/Mistral-7B-v0.3 | 59.56 | 44.89 | 82.97 | 62.36 | 33.36 | 56.63 | | Sail/Sailor-7B | 50.34 | 35.65 | 76.11 | 52.80 | 33.81 | 49.74 | ## Training Details ### Data Llama3 8B CPT SEA-LIONv2 base model was continued pre-trained on 48B tokens of the following data: | Data Source | Unique Tokens (B) | Multiplier | Total Tokens (B) | Percentage (%) | |---------------------------|:-----------------:|:----------:|:----------------:|:--------------:| | Dolma RefinedWeb - English| 7.650 | 1 | 7.650 | 15.90 | | Dolma C4 - English | 1.160 | 1 | 1 | 9.21 | | Dolma Reddit - English | 1.339 | 1 | 14.7 | 2.42 | | Dolma Semantic Scholar | 0.959 | 1 | 2.9 | 2.79 | | Dolma arXiv | 0.469 | 1 | 5.3 | 1.99 | | Dolma StarCoder | 4.422 | 1 | 4.9 | 0.98 | | SEA-LION Pile - Indonesian| 3.4 | 1 | 6.8 | 14.17 | | Wiki* - Indonesian | 0.3 | 4 | 1.2 | 2.50 | | SEA-LION Pile - Tamil | 5.6 | 1 | 5.6 | 11.67 | | Wiki* + News - Tamil | 0.6 | 4 | 2.4 | 5.00 | | SEA-LION Pile - Thai | 2.28 | 1 | 2.28 | 4.75 | | WangChanBERTa - Thai | 5 | 1 | 5 | 10.42 | | Wiki* - Thai | 0.18 | 4 | 0.72 | 1.50 | | SEA-LION Pile - Vietnamese| 6.76 | 1 | 6.76 | 14.08 | | Wiki* - Vietnamese | 0.31 | 4 | 1.24 | 2.58 | Note: - All token counts are counted using Llama3 tokenizer - wiki* sources includes Wikipedia, Wiki Books, Wiki Source and Wiki Voyage - Tamil news is sourced with permission from [Seithi](https://seithi.mediacorp.sg/) ### Infrastructure Llama3 8B CPT SEA-LIONv2 was trained using [MosaicML Composer](https://github.com/mosaicml/composer) on the following hardware: | Training Details | Llama3 8B CPT SEA-LIONv2 | |----------------------|:--------------------:| | AWS EC2 p5d.24xlarge | 8 instances | | Nvidia H100 80GB GPU | 64 | | Training Duration | 2 days | ### Configuration | HyperParameter | Llama3 8B CPT SEA-LIONv2 | |-------------------|:--------------------:| | Precision | bfloat16 | | Optimizer | decoupled_adamw | | Scheduler | weight_stable_decay | | Learning Rate | 1.0e-5 | | Global Batch Size | 512 | | Micro Batch Size | 2 | ## The Team Choa Esther<br> Cheng Nicholas<br> Huang Yuli<br> Lau Wayne<br> Lee Chwan Ren<br> Leong Wai Yi<br> Leong Wei Qi<br> Li Yier<br> Liu Bing Jie Darius<br> Lovenia Holy<br> Montalan Jann Railey<br> Ng Boon Cheong Raymond<br> Ngui Jian Gang<br> Nguyen Thanh Ngan<br> Ong Brandon<br> Ong Tat-Wee David<br> Ong Zhi Hao<br> Rengarajan Hamsawardhini<br> Siow Bryan<br> Susanto Yosephine<br> Tai Ngee Chia<br> Tan Choon Meng<br> Teo Eng Sipp Leslie<br> Teo Wei Yi<br> Tjhi William<br> Teng Walter<br> Yeo Yeow Tong<br> Yong Xianbin<br> ## Acknowledgements AI Singapore is a national programme supported by the National Research Foundation, Singapore and hosted by the National University of Singapore. Any opinions, findings and conclusions or recommendations expressed in this material are those of the author(s) and do not reflect the views of National Research Foundation, Singapore. ## Contact For more info, please contact us using this [SEA-LION Inquiry Form](https://forms.gle/sLCUVb95wmGf43hi6) [Link to SEA-LION's GitHub repository](https://github.com/aisingapore/sealion) ## Disclaimer This the repository for the base model. The model has _not_ been aligned for safety. Developers and users should perform their own safety fine-tuning and related security measures. In no event shall the authors be held liable for any claim, damages, or other liability arising from the use of the released weights and codes. ## References ```bibtex @misc{lowphansirikul2021wangchanberta, title={WangchanBERTa: Pretraining transformer-based Thai Language Models}, author={Lalita Lowphansirikul and Charin Polpanumas and Nawat Jantrakulchai and Sarana Nutanong}, year={2021}, eprint={2101.09635}, archivePrefix={arXiv}, primaryClass={cs.CL} } ```
[ "QUESTION_ANSWERING", "TRANSLATION", "SUMMARIZATION" ]
[ "CHIA" ]
Tejasw1/votum-case-law-v1
Tejasw1
sentence-similarity
[ "sentence-transformers", "safetensors", "new", "sentence-similarity", "feature-extraction", "generated_from_trainer", "dataset_size:132576", "loss:MatryoshkaLoss", "loss:MultipleNegativesRankingLoss", "custom_code", "en", "arxiv:1908.10084", "arxiv:2205.13147", "arxiv:1705.00652", "base_model:Alibaba-NLP/gte-base-en-v1.5", "base_model:finetune:Alibaba-NLP/gte-base-en-v1.5", "license:apache-2.0", "model-index", "autotrain_compatible", "text-embeddings-inference", "endpoints_compatible", "region:us" ]
2024-12-08T11:51:01
2024-12-08T11:51:14
126
0
--- base_model: Alibaba-NLP/gte-base-en-v1.5 language: - en library_name: sentence-transformers license: apache-2.0 metrics: - cosine_accuracy@1 - cosine_accuracy@3 - cosine_accuracy@5 - cosine_accuracy@10 - cosine_precision@1 - cosine_precision@3 - cosine_precision@5 - cosine_precision@10 - cosine_recall@1 - cosine_recall@3 - cosine_recall@5 - cosine_recall@10 - cosine_ndcg@10 - cosine_mrr@10 - cosine_map@100 pipeline_tag: sentence-similarity tags: - sentence-transformers - sentence-similarity - feature-extraction - generated_from_trainer - dataset_size:132576 - loss:MatryoshkaLoss - loss:MultipleNegativesRankingLoss widget: - source_sentence: In what circumstances can the permission to pay turnover tax under Section 7 of the KGST Act be challenged or rectified? sentences: - '**1. Key Legal Issues and Holdings:** * **Amalgamation of LLPs:** The case revolves around the proposed Scheme of Amalgamation of two Limited Liability Partnerships (LLPs), Alps Trade Com LLP (Transferee) and Lubstor Trade Com LLP (Transferor), under Section 60-62 of the Limited Liability Partnership Act, 2008. * **Approval of Scheme:** The main legal issue is the Tribunal''s approval of the proposed Scheme of Amalgamation, which involves the transfer of assets, liabilities, and rights of the Transferor LLP to the Transferee LLP. * **Compliance with LLP Act:** The court considered the compliance of the LLPs with the provisions of the Limited Liability Partnership Act, 2008, including the requirement for consent from partners, creditors, and other stakeholders. **2. Significant Facts of the Case:** * The Transferee LLP, Alps Trade Com LLP, has 4 partners, and the Transferor LLP, Lubstor Trade Com LLP, has 3 partners. * The Transferor LLP has NIL creditors, and the Transferee LLP has one major creditor, Yaduka Agrotech Private Limited, which has given its no objection to the proposed merger. * The Scheme of Amalgamation has been approved by the partners and creditors of both LLPs. * The Tribunal has dispensed with the requirement of holding separate meetings of partners and creditors of both LLPs. **3. Court''s Ruling:** * The Tribunal has approved the Scheme of Amalgamation under Section 60-62 of the Limited Liability Partnership Act, 2008. * The Tribunal has dispensed with the requirement of holding separate meetings of partners and creditors of both LLPs. * The LLPs are required to serve notice to the Registrar of Companies, West Bengal, the Official Liquidator, and the Income-Tax Assessing Officer within 7 days from the date of the order. **4. Citations:** * **Limited Liability Partnership Act, 2008** (Sections 60-62)' - '**1. Key Legal Issues and Holdings:** * **Alternate Method of Taxation:** The case revolves around the applicability of the alternate method of taxation under Section 7 of the Kerala General Sales Tax Act, 1963. * **Section 7 of KGST Act:** The main legal issue is the interpretation of Section 7 of the KGST Act, which provides for payment of tax at a compounded rate. * **Assessment Year:** The court considered the issue of whether the amended provisions of the Kerala Finance Act, 2001, which came into effect from 23-7-2001, were applicable for Assessment Year 2001-2002. **2. Significant Facts of the Case:** * The appellant, M/s Varkisons Engineers, is a partnership firm with a crushing unit at Kadiyiruppu, Kolenchery, Ernakulam District. * The appellant opted to pay turnover tax under Section 7 of the KGST Act for Assessment Year 2001-2002. * The assessing authority granted permission to the appellant to pay tax under Section 7 on 9-4-2001. * The Finance Act, 2001, enhanced the rate per machine from Rs 30,000 to Rs 90,000 from 23-7-2001. * The appellant challenged the notice issued under Section 43 of the KGST Act seeking to rectify the permission/order dated 9-4-2001 and seeking an enhanced rate per machine with effect from 23-7-2001. **3. Court''s Ruling:** * The Supreme Court set aside the impugned judgment dated 4-10-2007 and restored Original Petition No. 1501 of 2003 to the file of the Kerala High Court for de novo consideration. * The court held that the Surcharge Act, 1957, was not retrospective in operation and could not be regarded as law in force at the commencement of the year of Assessment 1957-1958. * The court also referred to the judgment of this Court in CIT v. Isthmian Steamship Lines, where it was held that the law to be applied is the law in force in the assessment year, unless otherwise stated or implied. * The civil appeal stands disposed of accordingly, with all contentions expressly kept open. **4. Citations:** * **State of Kerala v. Builders Assn. of India**, (1997) 2 SCC 183 * **Mycon Construction Ltd. v. State of Karnataka**, (2003) 9 SCC 583 * **Mathuram Agrawal v. State of M.P.**, (1999) 8 SCC 667 * **Karimtharuvi Tea Estate Ltd. v. State of Kerala**, AIR 1966 SC 1385 : (1966) 60 ITR 262 * **CST v. Modi Sugar Mills Ltd.**, AIR 1961 SC 1047 : (1961) 2 SCR 189 : (1961) 12 STC 182' - '**1. Key Legal Issues and Holdings:** * **Existence of Dispute:** The main legal issue is whether there was an existence of dispute prior to the issuance of the Demand Notice dated 11.04.2019. * **Section 8 of IBC:** The court considered the application of Section 8 of the Insolvency and Bankruptcy Code, 2016, which deals with the requirement of a dispute to be raised by the corporate debtor in response to a demand notice. * **Admissibility of Corporate Insolvency Resolution Process (CIRP):** The court''s ruling affected the admissibility of the CIRP against the corporate debtor. **2. Significant Facts of the Case:** * The corporate debtor, Triumph Realty Pvt. Ltd., had a pre-existing dispute with the operational creditor, Tech India Engineers Pvt. Ltd. * The operational creditor issued a demand notice dated 11.04.2019, which was received by the corporate debtor on 16.04.2019. * The corporate debtor raised disputes through e-mails dated 04.10.2018, 01.11.2018, and 04.12.2018, among others. * The corporate debtor also pointed out discrepancies in the billed and actual executed work through e-mails dated 05.11.2018 and 29.04.2019. * The parties exchanged several e-mails and letters regarding the completion of the work and deficiency in services, indicating a pre-existing dispute. **3. Court''s Ruling:** * The NCLAT (National Company Law Appellate Tribunal) allowed the appeal and set aside the Impugned Order dated 04.06.2020 passed by the learned Adjudicating Authority. * The court held that the corporate debtor had raised disputes prior to the issuance of the demand notice, making the initiation of the CIRP against the corporate debtor invalid. * The court quashed the steps taken in consequence of the Impugned Order and released the corporate debtor from the rigour of the Corporate Insolvency Resolution Process. **4. Citations:** * **Mobilox Innovations Private Limited v. Kirusa Software Private Limited** (2018) 1 SCC 353 * **Innoventive Industries Ltd. v. ICICI Bank** (2018) 1 SCC 407 * **Vinod Mittal v. Rays Power Exports** (Company Appeal (AT) (Insolvency) No. 851/2019 dated 18.11.2019) * **Gajendra Parihar v. Devi Industrial Engineers** (Company Appeal (AT) (Insolvency) No. 1370 of 2019 dated 18.03.2020)' - source_sentence: How does the court determine the adequacy of shareholder approval in corporate amalgamations? sentences: - '**1. Key Legal Issues and Holdings:** * **Trademark Infringement:** The primary legal issue is whether the term "Split View" can be considered a trademark or is merely descriptive of a software feature. * **Prior Use:** The court considered whether Apple Inc. or the respondents (Rohit Singh and Vyooh Low Level Computing LLP) had prior use of the term "Split View" as a trademark. * **Passing Off:** The court examined whether Apple''s use of "Split View" constitutes passing off, given the distinction between a product and a feature within an operating system. * **Descriptive Use vs. Trademark Use:** The court evaluated whether "Split View" is a descriptive term or a trademark, noting that if it is merely descriptive, it cannot be claimed as a trademark. * **Distinctiveness:** The court assessed whether the term "Split View" had acquired a secondary meaning or distinctiveness, thereby qualifying as a trademark. **2. Significant Facts of the Case:** * Rohit Singh developed a software product called "Split View" in 2005, which allowed users to simultaneously work on multiple windows on their computer screen. * Apple Inc. launched an update to their operating system (Mac OS X El Capitan and iOS 9) in December 2015, which included a feature called "Split View." * Rohit Singh claimed that Apple''s use of "Split View" infringed on his trademark and sought relief from Apple. * Apple argued that "Split View" is a descriptive term used by various software developers and not a trademark. * Apple highlighted that its use of "Split View" is integrated within its operating system and not sold as a standalone product. * Apple provided examples of other entities using "Split View" to describe multi-window functionality. * The court noted that the respondents had established prior use of the term "Split View" as a trademark for their software product. * The court recognized the distinction between a product and a feature within an operating system, which is relevant to the passing off claim. * The court found that the term "Split View" was used descriptively and not as a trademark by either party. **3. Court''s Ruling:** * The High Court vacated the ex-parte interim order granted by the learned Single Judge. * The court directed Apple to file a written statement. * The court noted that the respondents had established prior use of the term "Split View" as a trademark for their software product. * The court recognized the distinction between a product and a feature within an operating system, which is relevant to the passing off claim. * The court concluded that the term "Split View" is descriptive and cannot be claimed as a trademark by either party. **4. Citations:** * **Kavi Raj Pandit v. Durga Dutt Sharma**, AIR 1965 SC 1980 * **Carlsberg India Pvt. Ltd. v. Radico Khaitan Ltd.**, 2012 (49) PTC 54 * **Automatic Electric v. R.K. Dhawan**, 57 (1995) DLT 49 * **Laxmikant V. Patel v. Chetanbhai Shah**, (2002) 3 SCC 65 * **Cadila Healthcare Ltd. v. Gujarat Cooperative Milk Marketing Federation Ltd.**, ILR (2010) II Del 85 * **Uniply Industries Ltd. v. Karnataka Industrial Development Corporation**, (2001) 5 SCC 95, AIR 2001 SC 2083' - '**1. Key Legal Issues and Holdings:** * **Amalgamation of Companies:** The case revolves around the proposed amalgamation between Crown Beers India Private Limited (Transferor Company) and Anheuser Busch InBev India Limited (Transferee Company) under sections 230 to 232 of the Companies Act, 2013. * **Scheme of Amalgamation:** The main legal issue is the approval of the Scheme of Amalgamation, which includes the transfer of assets and liabilities from the Transferor Company to the Transferee Company. * **Shareholder Approval:** The court considered the requirements for shareholder approval, including the notice period, proxy voting, and quorum. **2. Significant Facts of the Case:** * The Transferor Company is engaged in the business of manufacturing, brewing, packaging, distribution, marketing, sale, export, and import of beer, liquor, and other alcoholic products. * The Scheme provides for the issuance of new equity shares by the Transferee Company to the equity shareholders of the Transferor Company. * The Scheme also provides for the transfer of assets and liabilities from the Transferor Company to the Transferee Company. * There are no secured creditors, but there are approximately 1,250 unsecured creditors. **3. Court''s Ruling:** * The Tribunal directed the Applicant Company to issue notices to the equity shareholders, unsecured creditors, and regulatory authorities. * The Tribunal also directed the Applicant Company to serve notices to the concerned Income Tax Authority and the Central Government. * The Tribunal dispensed with the meeting of the creditors and directed the Applicant Company to file an affidavit of service. **4. Citations:** * **Companies Act, 2013** * **Companies (Compromises, Arrangements and Amalgamations) Rules, 2016**' - '**1. Key Legal Issues and Holdings:** * **Amalgamation of Companies:** The case revolves around the proposed amalgamation of Fizza Plastics Private Limited (Transferor Company) with Krypton Datamatics Limited (Transferee Company) under Sections 230-232 of the Companies Act, 2013. * **Scheme of Amalgamation:** The court considered the applicability of the Scheme of Amalgamation, including its compliance with the Accounting Standards and the requirements of the Companies Act, 2013. * **Dispensation of Convening Meetings:** The court held that the requirement of convening meetings of shareholders and creditors can be dispensed with, considering the consent of shareholders and creditors. **2. Significant Facts of the Case:** * The Transferor Company and Transferee Company are incorporated under the Companies Act, 2013. * The registered offices of both companies are situated in the National Capital Territory of Delhi. * The Scheme of Amalgamation is necessitated by the consolidation of the businesses, strategic and competitive advantages, economies of scale, and better utilization of skilled manpower. * The Share Exchange Ratio has been determined in accordance with the Report on Valuation of Shares & Share Exchange Ratio dated 5th December 2017. * The Board of Directors of each company has unanimously approved the proposed Scheme of Amalgamation. **3. Court''s Ruling:** * The court allowed the application for dispensing with the requirement of convening meetings of shareholders and creditors of the applicant companies. * The court directed the applicant companies to comply with the applicable law, including forms and formats contained in the Companies (Compromises, Arrangements, Amalgamations) Rules, 2016. * The court also directed the applicant companies to send notices to the Central Government, Income Tax Authorities, Registrar of Companies, and other sectoral regulators or authorities as required under sub-section (5) of section 230 of the Companies Act, 2013. **4. Citations:** * Companies Act, 2013 * Companies (Compromises, Arrangements, and Amalgamations) Rules, 2016' - source_sentence: Under what circumstances can a government servant be prosecuted without obtaining prior sanction as per Section 197 CrPC? sentences: - '**1. Key Legal Issues and Holdings:** * **Share Transfer and Acquisition:** The case revolves around the alleged illegal transfer and acquisition of shares by Respondent No. 2 from Respondents 5-12, which diluted the shareholding of the Petitioner. * **Section 108 of the Company Act 1956:** The main legal issue is the application of Section 108, which deals with the transfer of shares, and whether the transfer was made without the previous sanction of the Directors. * **Articles of Association:** The court considered the provisions of the Articles of Association, particularly Article No. of the AOA, which permits member-to-member transfers. **2. Significant Facts of the Case:** * The Respondent company was incorporated on 29.5.2007 with 1,50,000 shares held equally by the three initial promoters. * The company acquired a property in Goa, and to raise finances for development, further allotment of 90,000 shares was made at a premium to third parties. * Respondent No. 2 purchased an adjoining piece of land for Rs. 1.2 crores and proposed to amalgamate it with the project. * The Petitioner alleges that Respondent No. 2 was in control of the company''s affairs and had not transferred the plot of 300 sq. meters to the company. * The Respondent company''s bank account is jointly operated, and the security advance received from the Lessee has been spent on renovations and additions. **3. Court''s Ruling:** * The court dismissed the petition on grounds of limitation and lack of merit. * The court held that the acquisition of shares by Respondent No. 2 was not illegal, as it was a member-to-member transfer permitted under the Articles of Association. * The court found that the Petitioner had knowledge of the acquisition and had not objected to it, giving rise to the inference of his consent. * The court also found that the Respondent company''s management decisions, including the leasing of the property, were not oppressive or mismanaged. **4. Citations:** * **Section 108 of the Company Act 1956** * **Articles of Association of the Company** * **Precedents under the Companies Act 2013**' - '**1. Key Legal Issues and Holdings:** * **Section 196 CrPC:** Whether the court can take cognizance of an offense committed by a police officer while acting in the discharge of his official duties without sanction. * **Section 197 CrPC:** Whether a government servant can be prosecuted without sanction. * **Protection of Public Servants:** The court balanced the need to protect public servants in the discharge of their duties while also emphasizing the protection of citizens'' rights. **2. Significant Facts of the Case:** * The petitioner, Bakhshish Singh Brar, a Deputy Superintendent of Police, was accused of causing grievous injuries and death during a raid and search. * The case was committed to the Court of Sessions by the Judicial Magistrate First Class, Kapurthala. * The complainant, Gurmej Kaur, alleged that the petitioner and his police party had attacked her and her sons, Ajit Singh and Manjit Singh, who were later killed. * The respondent''s case was that the police party was conducting a raid on a haveli in connection with illicit liquor and unlicensed arms. * The court noted that the two versions of the incident were in conflict. **3. Court''s Ruling:** * The court held that the trial could proceed without sanction under Section 196 CrPC. * The court observed that the question of whether the petitioner exceeded his official capacity while acting in the discharge of his duties could only be determined after some evidence had been noted by the trial court. * The court allowed the trial to proceed as expeditiously as possible and directed that the question of sanction under Section 197 CrPC may be agitated after some evidence had been noted by the learned Additional Sessions Judge. **4. Citations:** * **Pukhraj v. State of Rajasthan**, (1973) 2 SCC 701 : 1973 SCC (Cri) 944 : (1974) 1 SCR 559' - '**1. Key Legal Issues and Holdings:** * **Circumstantial Evidence:** The case revolves around the use of circumstantial evidence to establish the guilt of the accused under Section 302 of the Indian Penal Code, 1860. * **Dying Declaration:** The admissibility of the oral dying declaration made by the deceased to P.Ws.1 and 2 is a crucial issue. * **Extra-Judicial Confession:** The evidence of P.W.7 regarding the extra-judicial confession made by the accused is significant. * **Recovery of Materials:** The recovery of materials of evidence, such as blood-stained clothes and weapons, is also an issue. **2. Significant Facts of the Case:** * The deceased was cutting tapioca plants on the accused''s land, which led to a quarrel and subsequent assault by the accused. * The accused beat the deceased with a stick and inflicted cut injuries with a sickle, leaving him with 15 external injuries and fractures in the skull, right leg, and left elbow. * The deceased was tied with a nylon rope and left bleeding, and the accused fled the scene. * P.Ws.1 and 2 found the accused with blood-stained clothes and reported the incident to the police. **3. Court''s Ruling:** * The High Court upheld the conviction of the accused under Section 302 of the Indian Penal Code, 1860. * The court rejected the accused''s plea for sympathy and modification of the conviction and sentence. * The accused was sentenced to life imprisonment. **4. Citations:** * **Gentela Vijayavardhan Rao v. State of A.P.**, (1996) (6) SCC 241 * **Namdeo Daulata Dhayagude v. State of Maharashtra**, (1976) (4) SCC 441 * **Padala Veera Reddy v. State of A.P.**, AIR 1990 SC 709 * **Puran Singh v. State of Punjab**, 1995 Supp (3) SCC 665 * **Rattan Singh v. State of H.P.**, (1997) (4) SCC 161 **Additional Key Points:** * The prosecution relied on circumstantial evidence, which must satisfy the tests laid down in Padala Veera Reddy v. State of A.P. (AIR 1990 SC 709). * The accused''s motive was established through the evidence of P.Ws.1, 2, and 7, showing the accused had a grudge against the deceased for cutting the tapioca plants. * The oral dying declaration of the deceased to P.Ws.1 and 2 was corroborated by the medical evidence and other circumstances, making it reliable. * The accused''s extra-judicial confession to P.W.7 was significant, along with the recovery of blood-stained clothes and weapons. * The accused''s sentence was upheld, and he was sentenced to life imprisonment. **Refined Summary:** The case revolves around the murder of the deceased by the accused, who was convicted under Section 302 of the Indian Penal Code, 1860. The prosecution relied on circumstantial evidence, including the oral dying declaration of the deceased, the accused''s extra-judicial confession, and the recovery of blood-stained clothes and weapons. The court upheld the conviction and sentence, rejecting the accused''s plea for sympathy and modification. The accused was sentenced to life imprisonment.' - source_sentence: How does the court assess the significance of the recovery of firearms and cartridges from the accused at the crime scene in establishing a conspiracy to commit murder? sentences: - '**1. Key Legal Issues and Holdings:** * **Tenancy and Land Laws:** The case revolves around the interpretation of tenancy rights under the U.P. Tenancy Act, 1939, and the U.P. Zamindari Abolition and Land Reforms Act, 1950. * **Bhumidari Rights:** The main legal issue is the applicability of Section 182(2) of the U.P. Tenancy Act, 1939, which deals with the extinguishment of a female tenant''s rights upon marriage and the consequent hereditary tenancy rights of the person in possession. * **Possession and Sirdari Rights:** The court considered the question of whether Chhanoo, the respondent, had acquired sirdari rights through adverse possession or as a representative of Mst Sundariya, the original tenant. **2. Significant Facts of the Case:** * Mst Sundariya, the original tenant, died, and Chhanoo, her guardian, managed the property. * Mst Sundariya obtained bhumidari rights in the plots in question by depositing ten times the rent. * She sold the plots to the plaintiff, and Chhanoo claimed rights on the land. * The revenue entries showed that Chhanoo was the guardian of Mst Sundariya, and he continued to manage the property. * Mst Sundariya continued to be shown as a tenant in the revenue records, and Chhanoo did not take any action to correct the entries or claim adverse possession. **3. Court''s Ruling:** * The court upheld the finding of the first appellate court that Chhanoo''s possession was always as a representative or de facto guardian of Mst Sundariya. * The court held that Chhanoo did not acquire any title by way of adverse possession and was not entitled to sirdari rights. * The court allowed the appeal and set aside the order of the High Court, restoring the order of the first appellate court. **4. Citations:** * **U.P. Tenancy Act, 1939** * **U.P. Zamindari Abolition and Land Reforms Act, 1950** * **Section 182(2) of the U.P. Tenancy Act, 1939** * **Section 36 of the Tenancy Act** * **Section 134 of the U.P. Zamindari Abolition and Land Reforms Act, 1950** * **Section 137 and 137-A of the U.P. Zamindari Abolition and Land Reforms Act, 1950**' - '**1. Key Legal Issues and Holdings:** * **Murder and Attempted Murder:** The case revolves around allegations of murder and attempted murder of Dr. Satya Prakash Dubey and his wife Smt. Manorma Dubey, and injuries to Umesh Chandra Mishra and Munnu Singh. * **Section 302 and 307 IPC:** The main legal issue is the application of Section 302 (punishment for murder) and Section 307 (attempt to murder) of the Indian Penal Code, 1860. * **Arms Act:** The court also considered the application of the Arms Act, specifically Section 25, which deals with the unlawful possession of firearms. **2. Significant Facts of the Case:** * The occurrence took place on August 8, 1982, at the residence of Dr. Satya Prakash Dubey in Etawah. * Dr. Dubey and his wife Smt. Manorma Dubey were found dead, while Umesh Chandra Mishra and Munnu Singh were injured. * The accused, Brijendra Kumar, Ashok Dixit, and Chaman Lal, were apprehended at the scene, and firearms and cartridges were recovered from them. * The prosecution case was that the accused had conspired to murder Dr. Dubey and his wife, and had attempted to murder the injured individuals. * The defense argued that the accused were innocent and that the prosecution had failed to prove their guilt. * The investigating officer failed to record the statements of eye witnesses, including Umesh Chandra Mishra, Km. Ritu, Munnu Singh, and Bhagwat Dayal Dubey, on the night of the occurrence. * The accused persons were not interrogated on the night of the occurrence, and the investigating officer recorded their statements in the morning of 9-8-1982. * The First Information Report (FIR) was allegedly founded on the information furnished by Munnu Singh, one of the injured, but Munnu Singh was not examined as a witness to corroborate the version in the FIR. **3. Court''s Ruling:** * The High Court has acquitted the accused, Brijendra Kumar, Ashok Dixit, and Chaman Lal, due to lack of credible evidence. * The court has observed that the investigation was marred by several irregularities and that the evidence presented by the prosecution was unreliable. * The court has also noted that the investigating officer and other police personnel had conducted themselves in a manner that raised doubts about the prosecution case. **4. Citations:** * The case does not seem to be a precedent-setting case, but the court has considered the judgments of the Apex Court in other cases while delivering its verdict.' - '**1. Key Legal Issues and Holdings:** * **Occupier of a Factory:** The main legal issue is the interpretation of who can be considered the occupier of a factory, particularly in the case of a company. * **Ultimate Control:** The court holds that a company, which owns or runs a factory, cannot nominate any employee or officer, except a director, as the occupier of the factory. * **Proviso (ii) to Section 2(n) of the Factories Act, 1948:** The court upholds the validity of the proviso, which provides a deeming fiction that a director of a company shall be deemed to be the occupier in case of a company. * **Vicarious Liability:** The court affirms the principle of vicarious liability, holding that the occupier (director) is responsible for the actions of the manager and actual offenders in the factory. * **Strict Liability:** The court upholds the principle of strict liability, where the occupier is liable for the contravention of provisions under the Act, even without mens rea. * **Section 101 as an Exception:** The court holds that Section 101 of the Act provides an exception to the principle of strict liability, allowing the occupier to extricate himself from liability by establishing that the actual offender is someone else. **2. Significant Facts of the Case:** * The case revolves around the interpretation of Section 2(n) of the Factories Act, 1948, and the proviso (ii) added in 1987. * The court considers the legislative history of the amendment and the Statement of Objects and Reasons. * The court refers to various judgments, including M.C. Mehta (II) v. Union of India, to understand the context of the amendment. * The Chief Inspector of Factories directed the petitioners/appellants to file applications seeking renewal of the registration of licence of their respective factories, signed by a director of the company in his capacity as the occupier of the factory. **3. Court''s Ruling:** * The Supreme Court upholds the validity of proviso (ii) to Section 2(n) of the Factories Act, 1948. * The court holds that a company, which owns or runs a factory, cannot nominate any employee or officer, except a director, as the occupier of the factory. * The court affirms the directions given by the Chief Inspector of Factories to the writ petitioners and the appellants, stating that only a director of the company can file an application for renewal of the factory licence. * The court also holds that Section 101 of the Act provides an exception to the principle of strict liability, allowing the occupier to extricate himself from liability by establishing that the actual offender is someone else. **4. Citations:** * **M.C. Mehta (II) v. Union of India**, (1986) 2 SCC 325 * **John Donald Mackenzie v. Chief Inspector of Factories**, AIR 1962 SC 1351 * **Tesco Supermarkets Ltd. v. Nattrass**, 1972 AC 153 * **Lennard''s Carrying Co. Ltd. v. Asiatic Petroleum Co. Ltd.**, 1915 AC 705 * **Reserve Bank of India v. Peerless General Finance and Investment Co. Ltd.**, (1984) 4 SCC 444 * **S. Gopal Reddy v. State of A.P.**, (1995) 6 SCC 738 * **CIT v. Indo Mercantile Bank Ltd.**, (1999) 2 SCC 76 * **State of Gujarat v. Kansara Manilal Bhikalal**, (AIR at p. 1897) * **Maneklal Jinabhai Kot v. State of Gujarat**, (approved by a three-Judge Bench of this Court)' - source_sentence: What role does the liquidator play in verifying the claims and charges of secured creditors during the liquidation of a corporate debtor? sentences: - '**1. Key Legal Issues and Holdings:** * **Priority of Charges:** The main legal issue is the priority of charges on the secured assets of the corporate debtor, Reid and Taylor India Ltd. * **Insolvency and Bankruptcy Code, 2016:** The court considered the provisions of the Insolvency and Bankruptcy Code, 2016, particularly Section 52 and Regulation 37 of the Insolvency and Bankruptcy Board of India (Liquidation Process) Regulations, 2016. * **Security Interest:** The court examined the security interest held by the applicant, Finquest Financial Solutions P. Ltd., and other financial creditors, including Edelweiss Asset Reconstruction Co. Ltd. * **Entitlement to Realize Security Interest:** The court held that the applicant is entitled to realize their security interest in the manner specified under Section 52(1)(b) read with Regulation 37 of the IBBI (Liquidation Process) Regulations, 2016. **2. Significant Facts of the Case:** * The applicant, Finquest Financial Solutions P. Ltd., is a secured creditor with a first pari passu charge on the immovable fixed assets of the corporate debtor. * Edelweiss Asset Reconstruction Co. Ltd. is also a secured creditor with a claim on the same assets. * The corporate debtor, Reid and Taylor India Ltd., has been under liquidation. * Suit No. 84 of 2013 is pending in the Civil Judge (Senior Division), Nanjangud, challenging the first charge created by IDM. * The liquidator has verified the documents and found that the applicant is the sole first charge holder of the immovable property of the corporate debtor at Mysore. * The Edelweiss had not obtained an NOC from the IDM and had not ventilated their grievance or enforced their rights before any forum. **3. Court''s Ruling:** * The court ruled that the applicant, Finquest Financial Solutions P. Ltd., is entitled to realize their security interest in the manner specified under Section 52(1)(b) read with Regulation 37 of the IBBI (Liquidation Process) Regulations, 2016. * The court held that the applicant is the first charge holder of the immovable fixed assets of the corporate debtor. * The court dismissed the objection of Edelweiss Asset Reconstruction Co. Ltd. regarding the priority of charges. * The court directed the liquidator to hand over the symbolic possession of the fixed assets of the corporate debtor to the applicant to enable them to proceed with the sale of the assets. * The court directed the liquidator to inform the Tribunal about the manner and progress of the sale of assets from time-to-time for further directions/instructions. **4. Citations:** * **Insolvency and Bankruptcy Code, 2016** * **Regulation 37 of the Insolvency and Bankruptcy Board of India (Liquidation Process) Regulations, 2016** * **Suit No. 84 of 2013 filed with the Court of Civil Judge (Senior Division), Nanjangud, Karnataka**' - '**1. Key Legal Issues and Holdings:** * **Dowry and Cruelty:** The case revolves around allegations of dowry demands and cruelty by the husband (petitioner) towards his wife. * **Section 498-A IPC:** The main legal issue is the application of Section 498-A of the Indian Penal Code, 1860, which deals with cruelty by the husband or his relatives towards a married woman. * **Sentencing:** The court considered the appropriateness of the sentence awarded to the petitioner under Section 498-A IPC. **2. Significant Facts of the Case:** * The petitioner, Mangat Ram, was convicted under Section 498-A IPC. * He was sentenced to one year imprisonment and a fine. * He appealed the conviction and sentence, which was dismissed. * He then filed a revision petition, seeking a reduction in sentence. * The petitioner had already served over two months in prison. * The complainant (wife) had obtained an ex-parte divorce decree. **3. Court''s Ruling:** * The High Court upheld the conviction of the petitioner under Section 498-A IPC. * The court reduced the sentence to the period already undergone by the petitioner. * The court enhanced the fine to Rs. 5000/-. **4. Citations:** * **Yogendra Yadav v. State of Jharkhand**, Criminal Appeal No. 1205 of 2014 * **Lajpat Rai v. State of Haryana**, Criminal Revision No. 1380 of 1999 **Refined Summary (Updated):** **1. Key Legal Issues and Holdings:** * **Default Bail under Section 167(2) Cr.P.C.:** The court considered the applicability of default bail under Section 167(2) Cr.P.C. in cases where the investigating agency fails to file the final report within the prescribed time limit. * **Investigation and Filing of Challan:** The court held that the investigation is not considered incomplete merely because the investigating officer awaits reports of experts or fails to append certain documents to the police report. * **Role of the Court:** The court emphasized its role in determining whether to permit the prosecutor to adduce evidence of experts and to balance the interest of the accused with the interest of justice. **2. Significant Facts of the Case:** * The petitioners, Sukhwinder Kumar @ Sukha, Harpreet Singh @ Bahadur, Navjit Singh, and Rakesh Kumar @ Kesha, were accused of offenses under the Narcotic Drugs and Psychotropic Substances (NDPS) Act, 1985. * They filed revision petitions seeking default bail under Section 167(2) Cr.P.C. * The prosecution opposed their claims, arguing that the investigating agency had not failed to file the final report within the prescribed time limit. * The court considered the rival contentions and held that the petitioners were entitled to default bail. **3. Court''s Ruling:** * The court disposed of the revision petitions, releasing the petitioners on interim bail till the filing of the report under Section 173 Cr.P.C. * The court emphasized the importance of the investigating agency and the prosecuting agency complying with statutory provisions to avoid delay in completing investigations and filing challans. * The court noted that the respondent-State had failed to comply with statutory provisions, resulting in the accused getting benefit of default bail. **4. Citations:** * **Abdul Azeez P.V. v. National Investigation Agency**, 2015 (1) RCR (Criminal) 239 * **Mehal Singh v. State of Haryana**, 1978 PLR 480' - '**Refined Summary:** **1. Key Legal Issues and Holdings:** * **Public Purpose:** The main legal issue is the interpretation of the public purpose for which land was acquired under the Land Acquisition Act, 1894. * **Section 4 and 6:** The court considered the validity of notifications under Sections 4 and 6 of the Act. * **Land Acquisition:** The court held that the public purpose of acquiring land for planned development of the expanding town of Greater Delhi remained the same, despite the introduction of the Master Plan. **2. Significant Facts of the Case:** * The case involves the acquisition of land for the execution of the Interim General Plan for Greater Delhi. * The Master Plan for Delhi came into force on September 1, 1962, replacing the Interim General Plan. * The respondents contended that the public purpose indicated in the declaration under Section 6 ceased to be operative after the introduction of the Master Plan. * The appellants argued that the public purpose remained the same, i.e., the planned development of the expanding town of Greater Delhi. **3. Court''s Ruling:** * The Supreme Court allowed the appeal and set aside the judgment of the Delhi High Court. * The court held that the public purpose of acquiring land remained the same, despite the introduction of the Master Plan. * The court directed the parties to bear their own costs. **4. Citations:** * **Babu Singh v. Union of India**, (1981) 3 SCC 628' model-index: - name: GTE-base Votum Case Law results: - task: type: information-retrieval name: Information Retrieval dataset: name: dim 768 type: dim_768 metrics: - type: cosine_accuracy@1 value: 0.0824018343364861 name: Cosine Accuracy@1 - type: cosine_accuracy@3 value: 0.24835196331327028 name: Cosine Accuracy@3 - type: cosine_accuracy@5 value: 0.33935224992834623 name: Cosine Accuracy@5 - type: cosine_accuracy@10 value: 0.4760676411579249 name: Cosine Accuracy@10 - type: cosine_precision@1 value: 0.0824018343364861 name: Cosine Precision@1 - type: cosine_precision@3 value: 0.08278398777109008 name: Cosine Precision@3 - type: cosine_precision@5 value: 0.06787044998566925 name: Cosine Precision@5 - type: cosine_precision@10 value: 0.04760676411579248 name: Cosine Precision@10 - type: cosine_recall@1 value: 0.0824018343364861 name: Cosine Recall@1 - type: cosine_recall@3 value: 0.24835196331327028 name: Cosine Recall@3 - type: cosine_recall@5 value: 0.33935224992834623 name: Cosine Recall@5 - type: cosine_recall@10 value: 0.4760676411579249 name: Cosine Recall@10 - type: cosine_ndcg@10 value: 0.2582198876800978 name: Cosine Ndcg@10 - type: cosine_mrr@10 value: 0.19086027742519565 name: Cosine Mrr@10 - type: cosine_map@100 value: 0.20176101999097426 name: Cosine Map@100 - task: type: information-retrieval name: Information Retrieval dataset: name: dim 512 type: dim_512 metrics: - type: cosine_accuracy@1 value: 0.07781599312123817 name: Cosine Accuracy@1 - type: cosine_accuracy@3 value: 0.235024362281456 name: Cosine Accuracy@3 - type: cosine_accuracy@5 value: 0.32745772427629694 name: Cosine Accuracy@5 - type: cosine_accuracy@10 value: 0.4656061908856406 name: Cosine Accuracy@10 - type: cosine_precision@1 value: 0.07781599312123817 name: Cosine Precision@1 - type: cosine_precision@3 value: 0.07834145409381867 name: Cosine Precision@3 - type: cosine_precision@5 value: 0.06549154485525939 name: Cosine Precision@5 - type: cosine_precision@10 value: 0.046560619088564056 name: Cosine Precision@10 - type: cosine_recall@1 value: 0.07781599312123817 name: Cosine Recall@1 - type: cosine_recall@3 value: 0.235024362281456 name: Cosine Recall@3 - type: cosine_recall@5 value: 0.32745772427629694 name: Cosine Recall@5 - type: cosine_recall@10 value: 0.4656061908856406 name: Cosine Recall@10 - type: cosine_ndcg@10 value: 0.25020804232360305 name: Cosine Ndcg@10 - type: cosine_mrr@10 value: 0.1837239601104605 name: Cosine Mrr@10 - type: cosine_map@100 value: 0.19468382782021346 name: Cosine Map@100 --- # GTE-base Votum Case Law This is a [sentence-transformers](https://www.SBERT.net) model finetuned from [Alibaba-NLP/gte-base-en-v1.5](https://huggingface.co/Alibaba-NLP/gte-base-en-v1.5) on the json dataset. It maps sentences & paragraphs to a 768-dimensional dense vector space and can be used for semantic textual similarity, semantic search, paraphrase mining, text classification, clustering, and more. ## Model Details ### Model Description - **Model Type:** Sentence Transformer - **Base model:** [Alibaba-NLP/gte-base-en-v1.5](https://huggingface.co/Alibaba-NLP/gte-base-en-v1.5) <!-- at revision a829fd0e060bb84554da0dfd354d0de0f7712b7f --> - **Maximum Sequence Length:** 8192 tokens - **Output Dimensionality:** 768 dimensions - **Similarity Function:** Cosine Similarity - **Training Dataset:** - json - **Language:** en - **License:** apache-2.0 ### Model Sources - **Documentation:** [Sentence Transformers Documentation](https://sbert.net) - **Repository:** [Sentence Transformers on GitHub](https://github.com/UKPLab/sentence-transformers) - **Hugging Face:** [Sentence Transformers on Hugging Face](https://huggingface.co/models?library=sentence-transformers) ### Full Model Architecture ``` SentenceTransformer( (0): Transformer({'max_seq_length': 8192, 'do_lower_case': False}) with Transformer model: NewModel (1): Pooling({'word_embedding_dimension': 768, 'pooling_mode_cls_token': True, 'pooling_mode_mean_tokens': False, 'pooling_mode_max_tokens': False, 'pooling_mode_mean_sqrt_len_tokens': False, 'pooling_mode_weightedmean_tokens': False, 'pooling_mode_lasttoken': False, 'include_prompt': True}) ) ``` ## Usage ### Direct Usage (Sentence Transformers) First install the Sentence Transformers library: ```bash pip install -U sentence-transformers ``` Then you can load this model and run inference. ```python from sentence_transformers import SentenceTransformer # Download from the 🤗 Hub model = SentenceTransformer("Tejasw1/votum-case-law-v1") # Run inference sentences = [ 'What role does the liquidator play in verifying the claims and charges of secured creditors during the liquidation of a corporate debtor?', "**1. Key Legal Issues and Holdings:**\n\n* **Priority of Charges:** The main legal issue is the priority of charges on the secured assets of the corporate debtor, Reid and Taylor India Ltd.\n* **Insolvency and Bankruptcy Code, 2016:** The court considered the provisions of the Insolvency and Bankruptcy Code, 2016, particularly Section 52 and Regulation 37 of the Insolvency and Bankruptcy Board of India (Liquidation Process) Regulations, 2016.\n* **Security Interest:** The court examined the security interest held by the applicant, Finquest Financial Solutions P. Ltd., and other financial creditors, including Edelweiss Asset Reconstruction Co. Ltd.\n* **Entitlement to Realize Security Interest:** The court held that the applicant is entitled to realize their security interest in the manner specified under Section 52(1)(b) read with Regulation 37 of the IBBI (Liquidation Process) Regulations, 2016.\n\n**2. Significant Facts of the Case:**\n\n* The applicant, Finquest Financial Solutions P. Ltd., is a secured creditor with a first pari passu charge on the immovable fixed assets of the corporate debtor.\n* Edelweiss Asset Reconstruction Co. Ltd. is also a secured creditor with a claim on the same assets.\n* The corporate debtor, Reid and Taylor India Ltd., has been under liquidation.\n* Suit No. 84 of 2013 is pending in the Civil Judge (Senior Division), Nanjangud, challenging the first charge created by IDM.\n* The liquidator has verified the documents and found that the applicant is the sole first charge holder of the immovable property of the corporate debtor at Mysore.\n* The Edelweiss had not obtained an NOC from the IDM and had not ventilated their grievance or enforced their rights before any forum.\n\n**3. Court's Ruling:**\n\n* The court ruled that the applicant, Finquest Financial Solutions P. Ltd., is entitled to realize their security interest in the manner specified under Section 52(1)(b) read with Regulation 37 of the IBBI (Liquidation Process) Regulations, 2016.\n* The court held that the applicant is the first charge holder of the immovable fixed assets of the corporate debtor.\n* The court dismissed the objection of Edelweiss Asset Reconstruction Co. Ltd. regarding the priority of charges.\n* The court directed the liquidator to hand over the symbolic possession of the fixed assets of the corporate debtor to the applicant to enable them to proceed with the sale of the assets.\n* The court directed the liquidator to inform the Tribunal about the manner and progress of the sale of assets from time-to-time for further directions/instructions.\n\n**4. Citations:**\n\n* **Insolvency and Bankruptcy Code, 2016**\n* **Regulation 37 of the Insolvency and Bankruptcy Board of India (Liquidation Process) Regulations, 2016**\n* **Suit No. 84 of 2013 filed with the Court of Civil Judge (Senior Division), Nanjangud, Karnataka**", "**1. Key Legal Issues and Holdings:**\n\n* **Dowry and Cruelty:** The case revolves around allegations of dowry demands and cruelty by the husband (petitioner) towards his wife.\n* **Section 498-A IPC:** The main legal issue is the application of Section 498-A of the Indian Penal Code, 1860, which deals with cruelty by the husband or his relatives towards a married woman.\n* **Sentencing:** The court considered the appropriateness of the sentence awarded to the petitioner under Section 498-A IPC.\n\n**2. Significant Facts of the Case:**\n\n* The petitioner, Mangat Ram, was convicted under Section 498-A IPC.\n* He was sentenced to one year imprisonment and a fine.\n* He appealed the conviction and sentence, which was dismissed.\n* He then filed a revision petition, seeking a reduction in sentence.\n* The petitioner had already served over two months in prison.\n* The complainant (wife) had obtained an ex-parte divorce decree.\n\n**3. Court's Ruling:**\n\n* The High Court upheld the conviction of the petitioner under Section 498-A IPC.\n* The court reduced the sentence to the period already undergone by the petitioner.\n* The court enhanced the fine to Rs. 5000/-.\n\n**4. Citations:**\n\n* **Yogendra Yadav v. State of Jharkhand**, Criminal Appeal No. 1205 of 2014\n* **Lajpat Rai v. State of Haryana**, Criminal Revision No. 1380 of 1999\n\n**Refined Summary (Updated):**\n\n**1. Key Legal Issues and Holdings:**\n\n* **Default Bail under Section 167(2) Cr.P.C.:** The court considered the applicability of default bail under Section 167(2) Cr.P.C. in cases where the investigating agency fails to file the final report within the prescribed time limit.\n* **Investigation and Filing of Challan:** The court held that the investigation is not considered incomplete merely because the investigating officer awaits reports of experts or fails to append certain documents to the police report.\n* **Role of the Court:** The court emphasized its role in determining whether to permit the prosecutor to adduce evidence of experts and to balance the interest of the accused with the interest of justice.\n\n**2. Significant Facts of the Case:**\n\n* The petitioners, Sukhwinder Kumar @ Sukha, Harpreet Singh @ Bahadur, Navjit Singh, and Rakesh Kumar @ Kesha, were accused of offenses under the Narcotic Drugs and Psychotropic Substances (NDPS) Act, 1985.\n* They filed revision petitions seeking default bail under Section 167(2) Cr.P.C.\n* The prosecution opposed their claims, arguing that the investigating agency had not failed to file the final report within the prescribed time limit.\n* The court considered the rival contentions and held that the petitioners were entitled to default bail.\n\n**3. Court's Ruling:**\n\n* The court disposed of the revision petitions, releasing the petitioners on interim bail till the filing of the report under Section 173 Cr.P.C.\n* The court emphasized the importance of the investigating agency and the prosecuting agency complying with statutory provisions to avoid delay in completing investigations and filing challans.\n* The court noted that the respondent-State had failed to comply with statutory provisions, resulting in the accused getting benefit of default bail.\n\n**4. Citations:**\n\n* **Abdul Azeez P.V. v. National Investigation Agency**, 2015 (1) RCR (Criminal) 239\n* **Mehal Singh v. State of Haryana**, 1978 PLR 480", ] embeddings = model.encode(sentences) print(embeddings.shape) # [3, 768] # Get the similarity scores for the embeddings similarities = model.similarity(embeddings, embeddings) print(similarities.shape) # [3, 3] ``` <!-- ### Direct Usage (Transformers) <details><summary>Click to see the direct usage in Transformers</summary> </details> --> <!-- ### Downstream Usage (Sentence Transformers) You can finetune this model on your own dataset. <details><summary>Click to expand</summary> </details> --> <!-- ### Out-of-Scope Use *List how the model may foreseeably be misused and address what users ought not to do with the model.* --> ## Evaluation ### Metrics #### Information Retrieval * Datasets: `dim_768` and `dim_512` * Evaluated with [<code>InformationRetrievalEvaluator</code>](https://sbert.net/docs/package_reference/sentence_transformer/evaluation.html#sentence_transformers.evaluation.InformationRetrievalEvaluator) | Metric | dim_768 | dim_512 | |:--------------------|:-----------|:-----------| | cosine_accuracy@1 | 0.0824 | 0.0778 | | cosine_accuracy@3 | 0.2484 | 0.235 | | cosine_accuracy@5 | 0.3394 | 0.3275 | | cosine_accuracy@10 | 0.4761 | 0.4656 | | cosine_precision@1 | 0.0824 | 0.0778 | | cosine_precision@3 | 0.0828 | 0.0783 | | cosine_precision@5 | 0.0679 | 0.0655 | | cosine_precision@10 | 0.0476 | 0.0466 | | cosine_recall@1 | 0.0824 | 0.0778 | | cosine_recall@3 | 0.2484 | 0.235 | | cosine_recall@5 | 0.3394 | 0.3275 | | cosine_recall@10 | 0.4761 | 0.4656 | | **cosine_ndcg@10** | **0.2582** | **0.2502** | | cosine_mrr@10 | 0.1909 | 0.1837 | | cosine_map@100 | 0.2018 | 0.1947 | <!-- ## Bias, Risks and Limitations *What are the known or foreseeable issues stemming from this model? You could also flag here known failure cases or weaknesses of the model.* --> <!-- ### Recommendations *What are recommendations with respect to the foreseeable issues? For example, filtering explicit content.* --> ## Training Details ### Training Dataset #### json * Dataset: json * Size: 132,576 training samples * Columns: <code>anchor</code> and <code>positive</code> * Approximate statistics based on the first 1000 samples: | | anchor | positive | |:--------|:-----------------------------------------------------------------------------------|:---------------------------------------------------------------------------------------| | type | string | string | | details | <ul><li>min: 2 tokens</li><li>mean: 26.94 tokens</li><li>max: 199 tokens</li></ul> | <ul><li>min: 298 tokens</li><li>mean: 543.71 tokens</li><li>max: 1266 tokens</li></ul> | * Samples: | anchor | positive | |:-------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------|:------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------| | <code>What are the legal implications of a court setting aside an order related to the initiation of a Corporate Insolvency Resolution Process due to a pre-existing dispute?</code> | <code>**1. Key Legal Issues and Holdings:**<br><br>* **Existence of Dispute:** The main legal issue is whether there was an existence of dispute prior to the issuance of the Demand Notice dated 11.04.2019.<br>* **Section 8 of IBC:** The court considered the application of Section 8 of the Insolvency and Bankruptcy Code, 2016, which deals with the requirement of a dispute to be raised by the corporate debtor in response to a demand notice.<br>* **Admissibility of Corporate Insolvency Resolution Process (CIRP):** The court's ruling affected the admissibility of the CIRP against the corporate debtor.<br><br>**2. Significant Facts of the Case:**<br><br>* The corporate debtor, Triumph Realty Pvt. Ltd., had a pre-existing dispute with the operational creditor, Tech India Engineers Pvt. Ltd.<br>* The operational creditor issued a demand notice dated 11.04.2019, which was received by the corporate debtor on 16.04.2019.<br>* The corporate debtor raised disputes through e-mails dated 04.10.2018, 01.11.2018, and 04.12.2018, among o...</code> | | <code>How does the court assess whether a dispute is genuine or merely spurious, hypothetical, or illusory?</code> | <code>**1. Key Legal Issues and Holdings:**<br><br>* **Existence of Dispute:** The court considered whether a dispute existed between the parties before the issuance of the Demand Notice under Section 9 of the Insolvency and Bankruptcy Code, 2016.<br>* **Pre-existing Dispute:** The court relied on the principle laid down by the Hon'ble Supreme Court in "Mobilox Innovations Private Limited v. KIRUSA Software Pvt. Ltd." that a dispute must be pre-existing before the receipt of the Demand Notice.<br>* **Section 8 of the Code:** The court analyzed the provisions of Section 8 of the Code, which deals with the procedure for an operational creditor to initiate insolvency proceedings against a corporate debtor.<br>* **Nature of Dispute:** The court held that the dispute was genuine and not spurious, hypothetical, or illusory, and that the corporate debtor had raised a plausible contention that required further investigation.<br><br>**2. Significant Facts of the Case:**<br><br>* The operational creditor, Nirmal K. Dhiran, supp...</code> | | <code>What are the legal implications of dowry demands and cruelty under Indian law, particularly in the context of Section 498-A IPC?</code> | <code>**1. Key Legal Issues and Holdings:**<br><br>* **Dowry and Cruelty:** The case revolves around allegations of dowry demands and cruelty by the husband (petitioner) towards his wife.<br>* **Section 498-A IPC:** The main legal issue is the application of Section 498-A of the Indian Penal Code, 1860, which deals with cruelty by the husband or his relatives towards a married woman.<br>* **Rent Control and Eviction:** The case also involves a dispute over rent control and eviction under the Uttar Pradesh Urban Buildings (Regulation of Letting, Rent and Eviction) Act, 1972.<br><br>**2. Significant Facts of the Case:**<br><br>* The petitioner, Mangat Ram, was convicted under Section 498-A IPC.<br>* He was sentenced to one year imprisonment and a fine.<br>* He appealed the conviction and sentence, which was dismissed.<br>* He then filed a revision petition, seeking a reduction in sentence.<br>* The petitioner had already served over two months in prison.<br>* The complainant (wife) had obtained an ex-parte divorce decree.<br><br>**3. Cou...</code> | * Loss: [<code>MatryoshkaLoss</code>](https://sbert.net/docs/package_reference/sentence_transformer/losses.html#matryoshkaloss) with these parameters: ```json { "loss": "MultipleNegativesRankingLoss", "matryoshka_dims": [ 768, 512 ], "matryoshka_weights": [ 1, 1 ], "n_dims_per_step": -1 } ``` ### Training Hyperparameters #### Non-Default Hyperparameters - `eval_strategy`: epoch - `gradient_accumulation_steps`: 8 - `learning_rate`: 2e-05 - `num_train_epochs`: 4 - `lr_scheduler_type`: cosine - `warmup_ratio`: 0.1 - `bf16`: True - `tf32`: True - `load_best_model_at_end`: True - `optim`: adamw_torch_fused - `batch_sampler`: no_duplicates #### All Hyperparameters <details><summary>Click to expand</summary> - `overwrite_output_dir`: False - `do_predict`: False - `eval_strategy`: epoch - `prediction_loss_only`: True - `per_device_train_batch_size`: 8 - `per_device_eval_batch_size`: 8 - `per_gpu_train_batch_size`: None - `per_gpu_eval_batch_size`: None - `gradient_accumulation_steps`: 8 - `eval_accumulation_steps`: None - `torch_empty_cache_steps`: None - `learning_rate`: 2e-05 - `weight_decay`: 0.0 - `adam_beta1`: 0.9 - `adam_beta2`: 0.999 - `adam_epsilon`: 1e-08 - `max_grad_norm`: 1.0 - `num_train_epochs`: 4 - `max_steps`: -1 - `lr_scheduler_type`: cosine - `lr_scheduler_kwargs`: {} - `warmup_ratio`: 0.1 - `warmup_steps`: 0 - `log_level`: passive - `log_level_replica`: warning - `log_on_each_node`: True - `logging_nan_inf_filter`: True - `save_safetensors`: True - `save_on_each_node`: False - `save_only_model`: False - `restore_callback_states_from_checkpoint`: False - `no_cuda`: False - `use_cpu`: False - `use_mps_device`: False - `seed`: 42 - `data_seed`: None - `jit_mode_eval`: False - `use_ipex`: False - `bf16`: True - `fp16`: False - `fp16_opt_level`: O1 - `half_precision_backend`: auto - `bf16_full_eval`: False - `fp16_full_eval`: False - `tf32`: True - `local_rank`: 0 - `ddp_backend`: None - `tpu_num_cores`: None - `tpu_metrics_debug`: False - `debug`: [] - `dataloader_drop_last`: False - `dataloader_num_workers`: 0 - `dataloader_prefetch_factor`: None - `past_index`: -1 - `disable_tqdm`: False - `remove_unused_columns`: True - `label_names`: None - `load_best_model_at_end`: True - `ignore_data_skip`: False - `fsdp`: [] - `fsdp_min_num_params`: 0 - `fsdp_config`: {'min_num_params': 0, 'xla': False, 'xla_fsdp_v2': False, 'xla_fsdp_grad_ckpt': False} - `fsdp_transformer_layer_cls_to_wrap`: None - `accelerator_config`: {'split_batches': False, 'dispatch_batches': None, 'even_batches': True, 'use_seedable_sampler': True, 'non_blocking': False, 'gradient_accumulation_kwargs': None} - `deepspeed`: None - `label_smoothing_factor`: 0.0 - `optim`: adamw_torch_fused - `optim_args`: None - `adafactor`: False - `group_by_length`: False - `length_column_name`: length - `ddp_find_unused_parameters`: None - `ddp_bucket_cap_mb`: None - `ddp_broadcast_buffers`: False - `dataloader_pin_memory`: True - `dataloader_persistent_workers`: False - `skip_memory_metrics`: True - `use_legacy_prediction_loop`: False - `push_to_hub`: False - `resume_from_checkpoint`: None - `hub_model_id`: None - `hub_strategy`: every_save - `hub_private_repo`: False - `hub_always_push`: False - `gradient_checkpointing`: False - `gradient_checkpointing_kwargs`: None - `include_inputs_for_metrics`: False - `include_for_metrics`: [] - `eval_do_concat_batches`: True - `fp16_backend`: auto - `push_to_hub_model_id`: None - `push_to_hub_organization`: None - `mp_parameters`: - `auto_find_batch_size`: False - `full_determinism`: False - `torchdynamo`: None - `ray_scope`: last - `ddp_timeout`: 1800 - `torch_compile`: False - `torch_compile_backend`: None - `torch_compile_mode`: None - `dispatch_batches`: None - `split_batches`: None - `include_tokens_per_second`: False - `include_num_input_tokens_seen`: False - `neftune_noise_alpha`: None - `optim_target_modules`: None - `batch_eval_metrics`: False - `eval_on_start`: False - `use_liger_kernel`: False - `eval_use_gather_object`: False - `average_tokens_across_devices`: False - `prompts`: None - `batch_sampler`: no_duplicates - `multi_dataset_batch_sampler`: proportional </details> ### Training Logs <details><summary>Click to expand</summary> | Epoch | Step | Training Loss | dim_768_cosine_ndcg@10 | dim_512_cosine_ndcg@10 | |:----------:|:--------:|:-------------:|:----------------------:|:----------------------:| | 0.0048 | 10 | 0.4645 | - | - | | 0.0097 | 20 | 0.4746 | - | - | | 0.0145 | 30 | 0.4692 | - | - | | 0.0193 | 40 | 0.4603 | - | - | | 0.0241 | 50 | 0.3954 | - | - | | 0.0290 | 60 | 0.4071 | - | - | | 0.0338 | 70 | 0.4232 | - | - | | 0.0386 | 80 | 0.374 | - | - | | 0.0434 | 90 | 0.3748 | - | - | | 0.0483 | 100 | 0.3046 | - | - | | 0.0531 | 110 | 0.3648 | - | - | | 0.0579 | 120 | 0.2515 | - | - | | 0.0628 | 130 | 0.3437 | - | - | | 0.0676 | 140 | 0.298 | - | - | | 0.0724 | 150 | 0.2658 | - | - | | 0.0772 | 160 | 0.2989 | - | - | | 0.0821 | 170 | 0.2322 | - | - | | 0.0869 | 180 | 0.2816 | - | - | | 0.0917 | 190 | 0.2436 | - | - | | 0.0965 | 200 | 0.2335 | - | - | | 0.1014 | 210 | 0.2156 | - | - | | 0.1062 | 220 | 0.2305 | - | - | | 0.1110 | 230 | 0.228 | - | - | | 0.1159 | 240 | 0.2192 | - | - | | 0.1207 | 250 | 0.2337 | - | - | | 0.1255 | 260 | 0.2594 | - | - | | 0.1303 | 270 | 0.1794 | - | - | | 0.1352 | 280 | 0.1701 | - | - | | 0.1400 | 290 | 0.1981 | - | - | | 0.1448 | 300 | 0.2264 | - | - | | 0.1497 | 310 | 0.2418 | - | - | | 0.1545 | 320 | 0.292 | - | - | | 0.1593 | 330 | 0.2112 | - | - | | 0.1641 | 340 | 0.1933 | - | - | | 0.1690 | 350 | 0.1779 | - | - | | 0.1738 | 360 | 0.2294 | - | - | | 0.1786 | 370 | 0.2104 | - | - | | 0.1834 | 380 | 0.2286 | - | - | | 0.1883 | 390 | 0.2752 | - | - | | 0.1931 | 400 | 0.1852 | - | - | | 0.1979 | 410 | 0.2052 | - | - | | 0.2028 | 420 | 0.1893 | - | - | | 0.2076 | 430 | 0.2466 | - | - | | 0.2124 | 440 | 0.2177 | - | - | | 0.2172 | 450 | 0.2506 | - | - | | 0.2221 | 460 | 0.1974 | - | - | | 0.2269 | 470 | 0.197 | - | - | | 0.2317 | 480 | 0.1777 | - | - | | 0.2365 | 490 | 0.1848 | - | - | | 0.2414 | 500 | 0.1661 | - | - | | 0.2462 | 510 | 0.2093 | - | - | | 0.2510 | 520 | 0.1178 | - | - | | 0.2559 | 530 | 0.2085 | - | - | | 0.2607 | 540 | 0.1609 | - | - | | 0.2655 | 550 | 0.1736 | - | - | | 0.2703 | 560 | 0.1503 | - | - | | 0.2752 | 570 | 0.1808 | - | - | | 0.2800 | 580 | 0.1614 | - | - | | 0.2848 | 590 | 0.2057 | - | - | | 0.2896 | 600 | 0.1916 | - | - | | 0.2945 | 610 | 0.1569 | - | - | | 0.2993 | 620 | 0.184 | - | - | | 0.3041 | 630 | 0.2615 | - | - | | 0.3090 | 640 | 0.2152 | - | - | | 0.3138 | 650 | 0.1426 | - | - | | 0.3186 | 660 | 0.145 | - | - | | 0.3234 | 670 | 0.1484 | - | - | | 0.3283 | 680 | 0.1567 | - | - | | 0.3331 | 690 | 0.1365 | - | - | | 0.3379 | 700 | 0.1594 | - | - | | 0.3427 | 710 | 0.1486 | - | - | | 0.3476 | 720 | 0.1663 | - | - | | 0.3524 | 730 | 0.2052 | - | - | | 0.3572 | 740 | 0.1777 | - | - | | 0.3621 | 750 | 0.1728 | - | - | | 0.3669 | 760 | 0.1669 | - | - | | 0.3717 | 770 | 0.1356 | - | - | | 0.3765 | 780 | 0.1706 | - | - | | 0.3814 | 790 | 0.1916 | - | - | | 0.3862 | 800 | 0.1365 | - | - | | 0.3910 | 810 | 0.1392 | - | - | | 0.3958 | 820 | 0.1708 | - | - | | 0.4007 | 830 | 0.1971 | - | - | | 0.4055 | 840 | 0.1363 | - | - | | 0.4103 | 850 | 0.1411 | - | - | | 0.4152 | 860 | 0.1484 | - | - | | 0.4200 | 870 | 0.1767 | - | - | | 0.4248 | 880 | 0.1871 | - | - | | 0.4296 | 890 | 0.1393 | - | - | | 0.4345 | 900 | 0.2113 | - | - | | 0.4393 | 910 | 0.1614 | - | - | | 0.4441 | 920 | 0.1309 | - | - | | 0.4490 | 930 | 0.1329 | - | - | | 0.4538 | 940 | 0.2125 | - | - | | 0.4586 | 950 | 0.1929 | - | - | | 0.4634 | 960 | 0.1777 | - | - | | 0.4683 | 970 | 0.1813 | - | - | | 0.4731 | 980 | 0.1341 | - | - | | 0.4779 | 990 | 0.1025 | - | - | | 0.4827 | 1000 | 0.2471 | - | - | | 0.4876 | 1010 | 0.1696 | - | - | | 0.4924 | 1020 | 0.1144 | - | - | | 0.4972 | 1030 | 0.1537 | - | - | | 0.5021 | 1040 | 0.1389 | - | - | | 0.5069 | 1050 | 0.2184 | - | - | | 0.5117 | 1060 | 0.1473 | - | - | | 0.5165 | 1070 | 0.1494 | - | - | | 0.5214 | 1080 | 0.1568 | - | - | | 0.5262 | 1090 | 0.1656 | - | - | | 0.5310 | 1100 | 0.1555 | - | - | | 0.5358 | 1110 | 0.1108 | - | - | | 0.5407 | 1120 | 0.1163 | - | - | | 0.5455 | 1130 | 0.1549 | - | - | | 0.5503 | 1140 | 0.1638 | - | - | | 0.5552 | 1150 | 0.1575 | - | - | | 0.5600 | 1160 | 0.1294 | - | - | | 0.5648 | 1170 | 0.1402 | - | - | | 0.5696 | 1180 | 0.1539 | - | - | | 0.5745 | 1190 | 0.1249 | - | - | | 0.5793 | 1200 | 0.1042 | - | - | | 0.5841 | 1210 | 0.1681 | - | - | | 0.5889 | 1220 | 0.1744 | - | - | | 0.5938 | 1230 | 0.1144 | - | - | | 0.5986 | 1240 | 0.1183 | - | - | | 0.6034 | 1250 | 0.1397 | - | - | | 0.6083 | 1260 | 0.1938 | - | - | | 0.6131 | 1270 | 0.1194 | - | - | | 0.6179 | 1280 | 0.1374 | - | - | | 0.6227 | 1290 | 0.1203 | - | - | | 0.6276 | 1300 | 0.0766 | - | - | | 0.6324 | 1310 | 0.1337 | - | - | | 0.6372 | 1320 | 0.1695 | - | - | | 0.6420 | 1330 | 0.1179 | - | - | | 0.6469 | 1340 | 0.1316 | - | - | | 0.6517 | 1350 | 0.1294 | - | - | | 0.6565 | 1360 | 0.1125 | - | - | | 0.6614 | 1370 | 0.1629 | - | - | | 0.6662 | 1380 | 0.1094 | - | - | | 0.6710 | 1390 | 0.1479 | - | - | | 0.6758 | 1400 | 0.1479 | - | - | | 0.6807 | 1410 | 0.1608 | - | - | | 0.6855 | 1420 | 0.1422 | - | - | | 0.6903 | 1430 | 0.1735 | - | - | | 0.6951 | 1440 | 0.1403 | - | - | | 0.7000 | 1450 | 0.1306 | - | - | | 0.7048 | 1460 | 0.1497 | - | - | | 0.7096 | 1470 | 0.1154 | - | - | | 0.7145 | 1480 | 0.1308 | - | - | | 0.7193 | 1490 | 0.1514 | - | - | | 0.7241 | 1500 | 0.139 | - | - | | 0.7289 | 1510 | 0.1139 | - | - | | 0.7338 | 1520 | 0.1313 | - | - | | 0.7386 | 1530 | 0.1844 | - | - | | 0.7434 | 1540 | 0.1195 | - | - | | 0.7483 | 1550 | 0.1102 | - | - | | 0.7531 | 1560 | 0.1482 | - | - | | 0.7579 | 1570 | 0.1232 | - | - | | 0.7627 | 1580 | 0.1408 | - | - | | 0.7676 | 1590 | 0.1575 | - | - | | 0.7724 | 1600 | 0.1415 | - | - | | 0.7772 | 1610 | 0.1344 | - | - | | 0.7820 | 1620 | 0.1009 | - | - | | 0.7869 | 1630 | 0.1192 | - | - | | 0.7917 | 1640 | 0.1528 | - | - | | 0.7965 | 1650 | 0.1006 | - | - | | 0.8014 | 1660 | 0.0748 | - | - | | 0.8062 | 1670 | 0.1278 | - | - | | 0.8110 | 1680 | 0.1493 | - | - | | 0.8158 | 1690 | 0.1751 | - | - | | 0.8207 | 1700 | 0.1357 | - | - | | 0.8255 | 1710 | 0.1187 | - | - | | 0.8303 | 1720 | 0.1024 | - | - | | 0.8351 | 1730 | 0.1238 | - | - | | 0.8400 | 1740 | 0.1182 | - | - | | 0.8448 | 1750 | 0.0882 | - | - | | 0.8496 | 1760 | 0.1575 | - | - | | 0.8545 | 1770 | 0.1378 | - | - | | 0.8593 | 1780 | 0.1437 | - | - | | 0.8641 | 1790 | 0.1121 | - | - | | 0.8689 | 1800 | 0.1132 | - | - | | 0.8738 | 1810 | 0.136 | - | - | | 0.8786 | 1820 | 0.1421 | - | - | | 0.8834 | 1830 | 0.1226 | - | - | | 0.8882 | 1840 | 0.1345 | - | - | | 0.8931 | 1850 | 0.132 | - | - | | 0.8979 | 1860 | 0.1698 | - | - | | 0.9027 | 1870 | 0.1307 | - | - | | 0.9076 | 1880 | 0.0975 | - | - | | 0.9124 | 1890 | 0.1166 | - | - | | 0.9172 | 1900 | 0.1228 | - | - | | 0.9220 | 1910 | 0.1339 | - | - | | 0.9269 | 1920 | 0.1015 | - | - | | 0.9317 | 1930 | 0.1037 | - | - | | 0.9365 | 1940 | 0.1246 | - | - | | 0.9413 | 1950 | 0.1302 | - | - | | 0.9462 | 1960 | 0.144 | - | - | | 0.9510 | 1970 | 0.128 | - | - | | 0.9558 | 1980 | 0.1592 | - | - | | 0.9607 | 1990 | 0.1218 | - | - | | 0.9655 | 2000 | 0.136 | - | - | | 0.9703 | 2010 | 0.1093 | - | - | | 0.9751 | 2020 | 0.1364 | - | - | | 0.9800 | 2030 | 0.1534 | - | - | | 0.9848 | 2040 | 0.1066 | - | - | | 0.9896 | 2050 | 0.0906 | - | - | | 0.9944 | 2060 | 0.1656 | - | - | | 0.9993 | 2070 | 0.1304 | - | - | | **0.9998** | **2071** | **-** | **0.2679** | **0.2559** | | 1.0041 | 2080 | 0.0858 | - | - | | 1.0089 | 2090 | 0.1428 | - | - | | 1.0138 | 2100 | 0.1223 | - | - | | 1.0186 | 2110 | 0.1171 | - | - | | 1.0234 | 2120 | 0.1148 | - | - | | 1.0282 | 2130 | 0.1135 | - | - | | 1.0331 | 2140 | 0.1257 | - | - | | 1.0379 | 2150 | 0.1401 | - | - | | 1.0427 | 2160 | 0.116 | - | - | | 1.0476 | 2170 | 0.0878 | - | - | | 1.0524 | 2180 | 0.1154 | - | - | | 1.0572 | 2190 | 0.0801 | - | - | | 1.0620 | 2200 | 0.118 | - | - | | 1.0669 | 2210 | 0.127 | - | - | | 1.0717 | 2220 | 0.125 | - | - | | 1.0765 | 2230 | 0.1178 | - | - | | 1.0813 | 2240 | 0.0835 | - | - | | 1.0862 | 2250 | 0.0968 | - | - | | 1.0910 | 2260 | 0.1122 | - | - | | 1.0958 | 2270 | 0.1019 | - | - | | 1.1007 | 2280 | 0.1086 | - | - | | 1.1055 | 2290 | 0.0991 | - | - | | 1.1103 | 2300 | 0.1141 | - | - | | 1.1151 | 2310 | 0.1424 | - | - | | 1.1200 | 2320 | 0.104 | - | - | | 1.1248 | 2330 | 0.1239 | - | - | | 1.1296 | 2340 | 0.0829 | - | - | | 1.1344 | 2350 | 0.0706 | - | - | | 1.1393 | 2360 | 0.0813 | - | - | | 1.1441 | 2370 | 0.0796 | - | - | | 1.1489 | 2380 | 0.1472 | - | - | | 1.1538 | 2390 | 0.1315 | - | - | | 1.1586 | 2400 | 0.1264 | - | - | | 1.1634 | 2410 | 0.0706 | - | - | | 1.1682 | 2420 | 0.0857 | - | - | | 1.1731 | 2430 | 0.1078 | - | - | | 1.1779 | 2440 | 0.0851 | - | - | | 1.1827 | 2450 | 0.1095 | - | - | | 1.1875 | 2460 | 0.1406 | - | - | | 1.1924 | 2470 | 0.0932 | - | - | | 1.1972 | 2480 | 0.1107 | - | - | | 1.2020 | 2490 | 0.0941 | - | - | | 1.2069 | 2500 | 0.0846 | - | - | | 1.2117 | 2510 | 0.0785 | - | - | | 1.2165 | 2520 | 0.0877 | - | - | | 1.2213 | 2530 | 0.0871 | - | - | | 1.2262 | 2540 | 0.0905 | - | - | | 1.2310 | 2550 | 0.0769 | - | - | | 1.2358 | 2560 | 0.0788 | - | - | | 1.2406 | 2570 | 0.066 | - | - | | 1.2455 | 2580 | 0.1077 | - | - | | 1.2503 | 2590 | 0.0717 | - | - | | 1.2551 | 2600 | 0.0902 | - | - | | 1.2600 | 2610 | 0.0779 | - | - | | 1.2648 | 2620 | 0.0735 | - | - | | 1.2696 | 2630 | 0.0475 | - | - | | 1.2744 | 2640 | 0.0549 | - | - | | 1.2793 | 2650 | 0.0699 | - | - | | 1.2841 | 2660 | 0.0804 | - | - | | 1.2889 | 2670 | 0.095 | - | - | | 1.2937 | 2680 | 0.0787 | - | - | | 1.2986 | 2690 | 0.0708 | - | - | | 1.3034 | 2700 | 0.1206 | - | - | | 1.3082 | 2710 | 0.0582 | - | - | | 1.3131 | 2720 | 0.0859 | - | - | | 1.3179 | 2730 | 0.0553 | - | - | | 1.3227 | 2740 | 0.0433 | - | - | | 1.3275 | 2750 | 0.0725 | - | - | | 1.3324 | 2760 | 0.0798 | - | - | | 1.3372 | 2770 | 0.0683 | - | - | | 1.3420 | 2780 | 0.0489 | - | - | | 1.3469 | 2790 | 0.0685 | - | - | | 1.3517 | 2800 | 0.0951 | - | - | | 1.3565 | 2810 | 0.073 | - | - | | 1.3613 | 2820 | 0.0687 | - | - | | 1.3662 | 2830 | 0.0897 | - | - | | 1.3710 | 2840 | 0.0509 | - | - | | 1.3758 | 2850 | 0.0554 | - | - | | 1.3806 | 2860 | 0.0736 | - | - | | 1.3855 | 2870 | 0.0547 | - | - | | 1.3903 | 2880 | 0.046 | - | - | | 1.3951 | 2890 | 0.0553 | - | - | | 1.4000 | 2900 | 0.0888 | - | - | | 1.4048 | 2910 | 0.0487 | - | - | | 1.4096 | 2920 | 0.0358 | - | - | | 1.4144 | 2930 | 0.0434 | - | - | | 1.4193 | 2940 | 0.0402 | - | - | | 1.4241 | 2950 | 0.0581 | - | - | | 1.4289 | 2960 | 0.0761 | - | - | | 1.4337 | 2970 | 0.0766 | - | - | | 1.4386 | 2980 | 0.0662 | - | - | | 1.4434 | 2990 | 0.0434 | - | - | | 1.4482 | 3000 | 0.0437 | - | - | | 1.4531 | 3010 | 0.0777 | - | - | | 1.4579 | 3020 | 0.0766 | - | - | | 1.4627 | 3030 | 0.0455 | - | - | | 1.4675 | 3040 | 0.0894 | - | - | | 1.4724 | 3050 | 0.0532 | - | - | | 1.4772 | 3060 | 0.039 | - | - | | 1.4820 | 3070 | 0.1039 | - | - | | 1.4868 | 3080 | 0.0757 | - | - | | 1.4917 | 3090 | 0.0516 | - | - | | 1.4965 | 3100 | 0.0661 | - | - | | 1.5013 | 3110 | 0.0482 | - | - | | 1.5062 | 3120 | 0.0707 | - | - | | 1.5110 | 3130 | 0.0529 | - | - | | 1.5158 | 3140 | 0.0539 | - | - | | 1.5206 | 3150 | 0.0593 | - | - | | 1.5255 | 3160 | 0.0825 | - | - | | 1.5303 | 3170 | 0.0608 | - | - | | 1.5351 | 3180 | 0.0428 | - | - | | 1.5399 | 3190 | 0.0426 | - | - | | 1.5448 | 3200 | 0.0515 | - | - | | 1.5496 | 3210 | 0.0605 | - | - | | 1.5544 | 3220 | 0.092 | - | - | | 1.5593 | 3230 | 0.0382 | - | - | | 1.5641 | 3240 | 0.0543 | - | - | | 1.5689 | 3250 | 0.0624 | - | - | | 1.5737 | 3260 | 0.0483 | - | - | | 1.5786 | 3270 | 0.0454 | - | - | | 1.5834 | 3280 | 0.0584 | - | - | | 1.5882 | 3290 | 0.0745 | - | - | | 1.5930 | 3300 | 0.04 | - | - | | 1.5979 | 3310 | 0.0434 | - | - | | 1.6027 | 3320 | 0.0483 | - | - | | 1.6075 | 3330 | 0.0928 | - | - | | 1.6124 | 3340 | 0.0532 | - | - | | 1.6172 | 3350 | 0.0498 | - | - | | 1.6220 | 3360 | 0.0469 | - | - | | 1.6268 | 3370 | 0.0274 | - | - | | 1.6317 | 3380 | 0.0379 | - | - | | 1.6365 | 3390 | 0.0478 | - | - | | 1.6413 | 3400 | 0.0506 | - | - | | 1.6462 | 3410 | 0.057 | - | - | | 1.6510 | 3420 | 0.0471 | - | - | | 1.6558 | 3430 | 0.0541 | - | - | | 1.6606 | 3440 | 0.0726 | - | - | | 1.6655 | 3450 | 0.0389 | - | - | | 1.6703 | 3460 | 0.0679 | - | - | | 1.6751 | 3470 | 0.0584 | - | - | | 1.6799 | 3480 | 0.0653 | - | - | | 1.6848 | 3490 | 0.06 | - | - | | 1.6896 | 3500 | 0.0592 | - | - | | 1.6944 | 3510 | 0.059 | - | - | | 1.6993 | 3520 | 0.0517 | - | - | | 1.7041 | 3530 | 0.0495 | - | - | | 1.7089 | 3540 | 0.0455 | - | - | | 1.7137 | 3550 | 0.0377 | - | - | | 1.7186 | 3560 | 0.0539 | - | - | | 1.7234 | 3570 | 0.0401 | - | - | | 1.7282 | 3580 | 0.0389 | - | - | | 1.7330 | 3590 | 0.0482 | - | - | | 1.7379 | 3600 | 0.0671 | - | - | | 1.7427 | 3610 | 0.057 | - | - | | 1.7475 | 3620 | 0.0389 | - | - | | 1.7524 | 3630 | 0.0515 | - | - | | 1.7572 | 3640 | 0.0356 | - | - | | 1.7620 | 3650 | 0.0537 | - | - | | 1.7668 | 3660 | 0.0617 | - | - | | 1.7717 | 3670 | 0.0465 | - | - | | 1.7765 | 3680 | 0.0538 | - | - | | 1.7813 | 3690 | 0.0445 | - | - | | 1.7861 | 3700 | 0.0417 | - | - | | 1.7910 | 3710 | 0.0543 | - | - | | 1.7958 | 3720 | 0.0387 | - | - | | 1.8006 | 3730 | 0.0319 | - | - | | 1.8055 | 3740 | 0.0518 | - | - | | 1.8103 | 3750 | 0.0572 | - | - | | 1.8151 | 3760 | 0.0815 | - | - | | 1.8199 | 3770 | 0.0609 | - | - | | 1.8248 | 3780 | 0.0428 | - | - | | 1.8296 | 3790 | 0.0271 | - | - | | 1.8344 | 3800 | 0.0296 | - | - | | 1.8392 | 3810 | 0.047 | - | - | | 1.8441 | 3820 | 0.031 | - | - | | 1.8489 | 3830 | 0.0596 | - | - | | 1.8537 | 3840 | 0.0615 | - | - | | 1.8586 | 3850 | 0.0467 | - | - | | 1.8634 | 3860 | 0.0516 | - | - | | 1.8682 | 3870 | 0.0555 | - | - | | 1.8730 | 3880 | 0.0446 | - | - | | 1.8779 | 3890 | 0.0872 | - | - | | 1.8827 | 3900 | 0.0408 | - | - | | 1.8875 | 3910 | 0.0607 | - | - | | 1.8923 | 3920 | 0.0415 | - | - | | 1.8972 | 3930 | 0.0586 | - | - | | 1.9020 | 3940 | 0.0526 | - | - | | 1.9068 | 3950 | 0.0447 | - | - | | 1.9117 | 3960 | 0.0565 | - | - | | 1.9165 | 3970 | 0.0663 | - | - | | 1.9213 | 3980 | 0.0476 | - | - | | 1.9261 | 3990 | 0.0393 | - | - | | 1.9310 | 4000 | 0.0407 | - | - | | 1.9358 | 4010 | 0.0403 | - | - | | 1.9406 | 4020 | 0.0413 | - | - | | 1.9455 | 4030 | 0.0484 | - | - | | 1.9503 | 4040 | 0.0581 | - | - | | 1.9551 | 4050 | 0.0633 | - | - | | 1.9599 | 4060 | 0.0444 | - | - | | 1.9648 | 4070 | 0.0529 | - | - | | 1.9696 | 4080 | 0.0423 | - | - | | 1.9744 | 4090 | 0.0527 | - | - | | 1.9792 | 4100 | 0.0719 | - | - | | 1.9841 | 4110 | 0.0479 | - | - | | 1.9889 | 4120 | 0.0478 | - | - | | 1.9937 | 4130 | 0.0708 | - | - | | 1.9986 | 4140 | 0.058 | - | - | | 2.0 | 4143 | - | 0.2672 | 0.2575 | | 2.0034 | 4150 | 0.0274 | - | - | | 2.0082 | 4160 | 0.0384 | - | - | | 2.0130 | 4170 | 0.0639 | - | - | | 2.0179 | 4180 | 0.0462 | - | - | | 2.0227 | 4190 | 0.0438 | - | - | | 2.0275 | 4200 | 0.0395 | - | - | | 2.0323 | 4210 | 0.0591 | - | - | | 2.0372 | 4220 | 0.0519 | - | - | | 2.0420 | 4230 | 0.0543 | - | - | | 2.0468 | 4240 | 0.0292 | - | - | | 2.0517 | 4250 | 0.0449 | - | - | | 2.0565 | 4260 | 0.0552 | - | - | | 2.0613 | 4270 | 0.0398 | - | - | | 2.0661 | 4280 | 0.0647 | - | - | | 2.0710 | 4290 | 0.0401 | - | - | | 2.0758 | 4300 | 0.0419 | - | - | | 2.0806 | 4310 | 0.0369 | - | - | | 2.0854 | 4320 | 0.0271 | - | - | | 2.0903 | 4330 | 0.074 | - | - | | 2.0951 | 4340 | 0.0454 | - | - | | 2.0999 | 4350 | 0.0439 | - | - | | 2.1048 | 4360 | 0.0509 | - | - | | 2.1096 | 4370 | 0.0677 | - | - | | 2.1144 | 4380 | 0.0514 | - | - | | 2.1192 | 4390 | 0.0437 | - | - | | 2.1241 | 4400 | 0.069 | - | - | | 2.1289 | 4410 | 0.0288 | - | - | | 2.1337 | 4420 | 0.0323 | - | - | | 2.1385 | 4430 | 0.0233 | - | - | | 2.1434 | 4440 | 0.0322 | - | - | | 2.1482 | 4450 | 0.0627 | - | - | | 2.1530 | 4460 | 0.0557 | - | - | | 2.1579 | 4470 | 0.0649 | - | - | | 2.1627 | 4480 | 0.0305 | - | - | | 2.1675 | 4490 | 0.0267 | - | - | | 2.1723 | 4500 | 0.0325 | - | - | | 2.1772 | 4510 | 0.034 | - | - | | 2.1820 | 4520 | 0.0461 | - | - | | 2.1868 | 4530 | 0.0679 | - | - | | 2.1916 | 4540 | 0.033 | - | - | | 2.1965 | 4550 | 0.0483 | - | - | | 2.2013 | 4560 | 0.0425 | - | - | | 2.2061 | 4570 | 0.0336 | - | - | | 2.2110 | 4580 | 0.034 | - | - | | 2.2158 | 4590 | 0.0382 | - | - | | 2.2206 | 4600 | 0.0372 | - | - | | 2.2254 | 4610 | 0.0396 | - | - | | 2.2303 | 4620 | 0.0299 | - | - | | 2.2351 | 4630 | 0.0258 | - | - | | 2.2399 | 4640 | 0.0322 | - | - | | 2.2448 | 4650 | 0.0392 | - | - | | 2.2496 | 4660 | 0.0396 | - | - | | 2.2544 | 4670 | 0.0406 | - | - | | 2.2592 | 4680 | 0.0285 | - | - | | 2.2641 | 4690 | 0.0337 | - | - | | 2.2689 | 4700 | 0.0238 | - | - | | 2.2737 | 4710 | 0.02 | - | - | | 2.2785 | 4720 | 0.0347 | - | - | | 2.2834 | 4730 | 0.0238 | - | - | | 2.2882 | 4740 | 0.045 | - | - | | 2.2930 | 4750 | 0.0297 | - | - | | 2.2979 | 4760 | 0.0319 | - | - | | 2.3027 | 4770 | 0.0502 | - | - | | 2.3075 | 4780 | 0.0362 | - | - | | 2.3123 | 4790 | 0.0329 | - | - | | 2.3172 | 4800 | 0.0219 | - | - | | 2.3220 | 4810 | 0.0176 | - | - | | 2.3268 | 4820 | 0.0282 | - | - | | 2.3316 | 4830 | 0.0374 | - | - | | 2.3365 | 4840 | 0.0429 | - | - | | 2.3413 | 4850 | 0.0164 | - | - | | 2.3461 | 4860 | 0.0404 | - | - | | 2.3510 | 4870 | 0.0287 | - | - | | 2.3558 | 4880 | 0.0239 | - | - | | 2.3606 | 4890 | 0.0402 | - | - | | 2.3654 | 4900 | 0.0341 | - | - | | 2.3703 | 4910 | 0.0204 | - | - | | 2.3751 | 4920 | 0.0328 | - | - | | 2.3799 | 4930 | 0.0388 | - | - | | 2.3847 | 4940 | 0.0222 | - | - | | 2.3896 | 4950 | 0.0221 | - | - | | 2.3944 | 4960 | 0.0318 | - | - | | 2.3992 | 4970 | 0.0401 | - | - | | 2.4041 | 4980 | 0.0171 | - | - | | 2.4089 | 4990 | 0.0195 | - | - | | 2.4137 | 5000 | 0.019 | - | - | | 2.4185 | 5010 | 0.0163 | - | - | | 2.4234 | 5020 | 0.0278 | - | - | | 2.4282 | 5030 | 0.0399 | - | - | | 2.4330 | 5040 | 0.0412 | - | - | | 2.4378 | 5050 | 0.0254 | - | - | | 2.4427 | 5060 | 0.0175 | - | - | | 2.4475 | 5070 | 0.0251 | - | - | | 2.4523 | 5080 | 0.0256 | - | - | | 2.4572 | 5090 | 0.0294 | - | - | | 2.4620 | 5100 | 0.0278 | - | - | | 2.4668 | 5110 | 0.0435 | - | - | | 2.4716 | 5120 | 0.0189 | - | - | | 2.4765 | 5130 | 0.0195 | - | - | | 2.4813 | 5140 | 0.045 | - | - | | 2.4861 | 5150 | 0.0614 | - | - | | 2.4909 | 5160 | 0.0234 | - | - | | 2.4958 | 5170 | 0.0267 | - | - | | 2.5006 | 5180 | 0.0294 | - | - | | 2.5054 | 5190 | 0.0232 | - | - | | 2.5103 | 5200 | 0.026 | - | - | | 2.5151 | 5210 | 0.0292 | - | - | | 2.5199 | 5220 | 0.0335 | - | - | | 2.5247 | 5230 | 0.0311 | - | - | | 2.5296 | 5240 | 0.0248 | - | - | | 2.5344 | 5250 | 0.0223 | - | - | | 2.5392 | 5260 | 0.0188 | - | - | | 2.5441 | 5270 | 0.0206 | - | - | | 2.5489 | 5280 | 0.0264 | - | - | | 2.5537 | 5290 | 0.0479 | - | - | | 2.5585 | 5300 | 0.0181 | - | - | | 2.5634 | 5310 | 0.0212 | - | - | | 2.5682 | 5320 | 0.0207 | - | - | | 2.5730 | 5330 | 0.0233 | - | - | | 2.5778 | 5340 | 0.0227 | - | - | | 2.5827 | 5350 | 0.0239 | - | - | | 2.5875 | 5360 | 0.0267 | - | - | | 2.5923 | 5370 | 0.0215 | - | - | | 2.5972 | 5380 | 0.0164 | - | - | | 2.6020 | 5390 | 0.021 | - | - | | 2.6068 | 5400 | 0.0392 | - | - | | 2.6116 | 5410 | 0.0277 | - | - | | 2.6165 | 5420 | 0.0219 | - | - | | 2.6213 | 5430 | 0.0221 | - | - | | 2.6261 | 5440 | 0.018 | - | - | | 2.6309 | 5450 | 0.0159 | - | - | | 2.6358 | 5460 | 0.0213 | - | - | | 2.6406 | 5470 | 0.0239 | - | - | | 2.6454 | 5480 | 0.0289 | - | - | | 2.6503 | 5490 | 0.0229 | - | - | | 2.6551 | 5500 | 0.0307 | - | - | | 2.6599 | 5510 | 0.0416 | - | - | | 2.6647 | 5520 | 0.0191 | - | - | | 2.6696 | 5530 | 0.0335 | - | - | | 2.6744 | 5540 | 0.0402 | - | - | | 2.6792 | 5550 | 0.0294 | - | - | | 2.6840 | 5560 | 0.0222 | - | - | | 2.6889 | 5570 | 0.0296 | - | - | | 2.6937 | 5580 | 0.0347 | - | - | | 2.6985 | 5590 | 0.0217 | - | - | | 2.7034 | 5600 | 0.0163 | - | - | | 2.7082 | 5610 | 0.0209 | - | - | | 2.7130 | 5620 | 0.0195 | - | - | | 2.7178 | 5630 | 0.0273 | - | - | | 2.7227 | 5640 | 0.0169 | - | - | | 2.7275 | 5650 | 0.0191 | - | - | | 2.7323 | 5660 | 0.0166 | - | - | | 2.7371 | 5670 | 0.0265 | - | - | | 2.7420 | 5680 | 0.0313 | - | - | | 2.7468 | 5690 | 0.0215 | - | - | | 2.7516 | 5700 | 0.0228 | - | - | | 2.7565 | 5710 | 0.0208 | - | - | | 2.7613 | 5720 | 0.0206 | - | - | | 2.7661 | 5730 | 0.0208 | - | - | | 2.7709 | 5740 | 0.0317 | - | - | | 2.7758 | 5750 | 0.0283 | - | - | | 2.7806 | 5760 | 0.0206 | - | - | | 2.7854 | 5770 | 0.0145 | - | - | | 2.7902 | 5780 | 0.0238 | - | - | | 2.7951 | 5790 | 0.0228 | - | - | | 2.7999 | 5800 | 0.0133 | - | - | | 2.8047 | 5810 | 0.0194 | - | - | | 2.8096 | 5820 | 0.0398 | - | - | | 2.8144 | 5830 | 0.025 | - | - | | 2.8192 | 5840 | 0.0309 | - | - | | 2.8240 | 5850 | 0.0355 | - | - | | 2.8289 | 5860 | 0.0123 | - | - | | 2.8337 | 5870 | 0.0182 | - | - | | 2.8385 | 5880 | 0.023 | - | - | | 2.8434 | 5890 | 0.0191 | - | - | | 2.8482 | 5900 | 0.023 | - | - | | 2.8530 | 5910 | 0.0356 | - | - | | 2.8578 | 5920 | 0.0239 | - | - | | 2.8627 | 5930 | 0.0203 | - | - | | 2.8675 | 5940 | 0.0154 | - | - | | 2.8723 | 5950 | 0.025 | - | - | | 2.8771 | 5960 | 0.0491 | - | - | | 2.8820 | 5970 | 0.0205 | - | - | | 2.8868 | 5980 | 0.03 | - | - | | 2.8916 | 5990 | 0.0249 | - | - | | 2.8965 | 6000 | 0.0355 | - | - | | 2.9013 | 6010 | 0.0277 | - | - | | 2.9061 | 6020 | 0.0231 | - | - | | 2.9109 | 6030 | 0.0202 | - | - | | 2.9158 | 6040 | 0.0294 | - | - | | 2.9206 | 6050 | 0.0181 | - | - | | 2.9254 | 6060 | 0.0179 | - | - | | 2.9302 | 6070 | 0.0275 | - | - | | 2.9351 | 6080 | 0.0211 | - | - | | 2.9399 | 6090 | 0.0191 | - | - | | 2.9447 | 6100 | 0.0233 | - | - | | 2.9496 | 6110 | 0.0302 | - | - | | 2.9544 | 6120 | 0.0344 | - | - | | 2.9592 | 6130 | 0.0391 | - | - | | 2.9640 | 6140 | 0.0242 | - | - | | 2.9689 | 6150 | 0.0212 | - | - | | 2.9737 | 6160 | 0.0404 | - | - | | 2.9785 | 6170 | 0.0428 | - | - | | 2.9833 | 6180 | 0.0206 | - | - | | 2.9882 | 6190 | 0.0265 | - | - | | 2.9930 | 6200 | 0.0378 | - | - | | 2.9978 | 6210 | 0.0255 | - | - | | 2.9998 | 6214 | - | 0.2628 | 0.2557 | | 3.0027 | 6220 | 0.024 | - | - | | 3.0075 | 6230 | 0.0198 | - | - | | 3.0123 | 6240 | 0.0234 | - | - | | 3.0171 | 6250 | 0.0424 | - | - | | 3.0220 | 6260 | 0.0297 | - | - | | 3.0268 | 6270 | 0.0209 | - | - | | 3.0316 | 6280 | 0.0344 | - | - | | 3.0364 | 6290 | 0.0273 | - | - | | 3.0413 | 6300 | 0.0247 | - | - | | 3.0461 | 6310 | 0.0206 | - | - | | 3.0509 | 6320 | 0.0231 | - | - | | 3.0558 | 6330 | 0.0265 | - | - | | 3.0606 | 6340 | 0.0198 | - | - | | 3.0654 | 6350 | 0.0389 | - | - | | 3.0702 | 6360 | 0.0171 | - | - | | 3.0751 | 6370 | 0.0235 | - | - | | 3.0799 | 6380 | 0.0228 | - | - | | 3.0847 | 6390 | 0.0184 | - | - | | 3.0895 | 6400 | 0.0459 | - | - | | 3.0944 | 6410 | 0.0222 | - | - | | 3.0992 | 6420 | 0.0186 | - | - | | 3.1040 | 6430 | 0.0246 | - | - | | 3.1089 | 6440 | 0.0446 | - | - | | 3.1137 | 6450 | 0.0333 | - | - | | 3.1185 | 6460 | 0.0205 | - | - | | 3.1233 | 6470 | 0.0228 | - | - | | 3.1282 | 6480 | 0.0287 | - | - | | 3.1330 | 6490 | 0.0205 | - | - | | 3.1378 | 6500 | 0.0143 | - | - | | 3.1427 | 6510 | 0.0159 | - | - | | 3.1475 | 6520 | 0.0367 | - | - | | 3.1523 | 6530 | 0.0327 | - | - | | 3.1571 | 6540 | 0.0355 | - | - | | 3.1620 | 6550 | 0.0202 | - | - | | 3.1668 | 6560 | 0.0133 | - | - | | 3.1716 | 6570 | 0.0143 | - | - | | 3.1764 | 6580 | 0.0171 | - | - | | 3.1813 | 6590 | 0.0208 | - | - | | 3.1861 | 6600 | 0.0368 | - | - | | 3.1909 | 6610 | 0.0238 | - | - | | 3.1958 | 6620 | 0.0276 | - | - | | 3.2006 | 6630 | 0.0269 | - | - | | 3.2054 | 6640 | 0.0152 | - | - | | 3.2102 | 6650 | 0.0229 | - | - | | 3.2151 | 6660 | 0.0189 | - | - | | 3.2199 | 6670 | 0.0206 | - | - | | 3.2247 | 6680 | 0.0206 | - | - | | 3.2295 | 6690 | 0.0164 | - | - | | 3.2344 | 6700 | 0.0121 | - | - | | 3.2392 | 6710 | 0.0224 | - | - | | 3.2440 | 6720 | 0.0193 | - | - | | 3.2489 | 6730 | 0.0213 | - | - | | 3.2537 | 6740 | 0.0216 | - | - | | 3.2585 | 6750 | 0.0155 | - | - | | 3.2633 | 6760 | 0.0185 | - | - | | 3.2682 | 6770 | 0.018 | - | - | | 3.2730 | 6780 | 0.0107 | - | - | | 3.2778 | 6790 | 0.0218 | - | - | | 3.2826 | 6800 | 0.0161 | - | - | | 3.2875 | 6810 | 0.0256 | - | - | | 3.2923 | 6820 | 0.015 | - | - | | 3.2971 | 6830 | 0.0132 | - | - | | 3.3020 | 6840 | 0.0228 | - | - | | 3.3068 | 6850 | 0.0274 | - | - | | 3.3116 | 6860 | 0.0232 | - | - | | 3.3164 | 6870 | 0.0122 | - | - | | 3.3213 | 6880 | 0.0101 | - | - | | 3.3261 | 6890 | 0.0138 | - | - | | 3.3309 | 6900 | 0.0223 | - | - | | 3.3357 | 6910 | 0.018 | - | - | | 3.3406 | 6920 | 0.0105 | - | - | | 3.3454 | 6930 | 0.0212 | - | - | | 3.3502 | 6940 | 0.0189 | - | - | | 3.3551 | 6950 | 0.0115 | - | - | | 3.3599 | 6960 | 0.0187 | - | - | | 3.3647 | 6970 | 0.0237 | - | - | | 3.3695 | 6980 | 0.0172 | - | - | | 3.3744 | 6990 | 0.0148 | - | - | | 3.3792 | 7000 | 0.0234 | - | - | | 3.3840 | 7010 | 0.0139 | - | - | | 3.3888 | 7020 | 0.012 | - | - | | 3.3937 | 7030 | 0.0181 | - | - | | 3.3985 | 7040 | 0.0247 | - | - | | 3.4033 | 7050 | 0.0114 | - | - | | 3.4082 | 7060 | 0.0107 | - | - | | 3.4130 | 7070 | 0.0133 | - | - | | 3.4178 | 7080 | 0.0092 | - | - | | 3.4226 | 7090 | 0.0168 | - | - | | 3.4275 | 7100 | 0.0225 | - | - | | 3.4323 | 7110 | 0.0127 | - | - | | 3.4371 | 7120 | 0.0231 | - | - | | 3.4420 | 7130 | 0.0104 | - | - | | 3.4468 | 7140 | 0.0114 | - | - | | 3.4516 | 7150 | 0.0084 | - | - | | 3.4564 | 7160 | 0.0261 | - | - | | 3.4613 | 7170 | 0.0201 | - | - | | 3.4661 | 7180 | 0.0251 | - | - | | 3.4709 | 7190 | 0.0135 | - | - | | 3.4757 | 7200 | 0.0126 | - | - | | 3.4806 | 7210 | 0.0257 | - | - | | 3.4854 | 7220 | 0.0369 | - | - | | 3.4902 | 7230 | 0.0137 | - | - | | 3.4951 | 7240 | 0.016 | - | - | | 3.4999 | 7250 | 0.0187 | - | - | | 3.5047 | 7260 | 0.0156 | - | - | | 3.5095 | 7270 | 0.0141 | - | - | | 3.5144 | 7280 | 0.0258 | - | - | | 3.5192 | 7290 | 0.0283 | - | - | | 3.5240 | 7300 | 0.02 | - | - | | 3.5288 | 7310 | 0.0283 | - | - | | 3.5337 | 7320 | 0.0142 | - | - | | 3.5385 | 7330 | 0.0107 | - | - | | 3.5433 | 7340 | 0.0144 | - | - | | 3.5482 | 7350 | 0.0146 | - | - | | 3.5530 | 7360 | 0.0321 | - | - | | 3.5578 | 7370 | 0.0101 | - | - | | 3.5626 | 7380 | 0.0145 | - | - | | 3.5675 | 7390 | 0.0132 | - | - | | 3.5723 | 7400 | 0.0159 | - | - | | 3.5771 | 7410 | 0.0167 | - | - | | 3.5819 | 7420 | 0.0116 | - | - | | 3.5868 | 7430 | 0.0175 | - | - | | 3.5916 | 7440 | 0.0156 | - | - | | 3.5964 | 7450 | 0.0096 | - | - | | 3.6013 | 7460 | 0.0156 | - | - | | 3.6061 | 7470 | 0.0251 | - | - | | 3.6109 | 7480 | 0.0163 | - | - | | 3.6157 | 7490 | 0.0118 | - | - | | 3.6206 | 7500 | 0.0161 | - | - | | 3.6254 | 7510 | 0.0131 | - | - | | 3.6302 | 7520 | 0.0091 | - | - | | 3.6350 | 7530 | 0.0136 | - | - | | 3.6399 | 7540 | 0.0175 | - | - | | 3.6447 | 7550 | 0.0213 | - | - | | 3.6495 | 7560 | 0.0168 | - | - | | 3.6544 | 7570 | 0.02 | - | - | | 3.6592 | 7580 | 0.0204 | - | - | | 3.6640 | 7590 | 0.0132 | - | - | | 3.6688 | 7600 | 0.0254 | - | - | | 3.6737 | 7610 | 0.0313 | - | - | | 3.6785 | 7620 | 0.0107 | - | - | | 3.6833 | 7630 | 0.0241 | - | - | | 3.6881 | 7640 | 0.0188 | - | - | | 3.6930 | 7650 | 0.0166 | - | - | | 3.6978 | 7660 | 0.021 | - | - | | 3.7026 | 7670 | 0.0126 | - | - | | 3.7075 | 7680 | 0.0148 | - | - | | 3.7123 | 7690 | 0.0155 | - | - | | 3.7171 | 7700 | 0.0117 | - | - | | 3.7219 | 7710 | 0.0124 | - | - | | 3.7268 | 7720 | 0.0121 | - | - | | 3.7316 | 7730 | 0.0118 | - | - | | 3.7364 | 7740 | 0.0182 | - | - | | 3.7413 | 7750 | 0.0168 | - | - | | 3.7461 | 7760 | 0.0146 | - | - | | 3.7509 | 7770 | 0.0199 | - | - | | 3.7557 | 7780 | 0.0109 | - | - | | 3.7606 | 7790 | 0.0192 | - | - | | 3.7654 | 7800 | 0.014 | - | - | | 3.7702 | 7810 | 0.0261 | - | - | | 3.7750 | 7820 | 0.0176 | - | - | | 3.7799 | 7830 | 0.0156 | - | - | | 3.7847 | 7840 | 0.0112 | - | - | | 3.7895 | 7850 | 0.0136 | - | - | | 3.7944 | 7860 | 0.0174 | - | - | | 3.7992 | 7870 | 0.0082 | - | - | | 3.8040 | 7880 | 0.0111 | - | - | | 3.8088 | 7890 | 0.0279 | - | - | | 3.8137 | 7900 | 0.0206 | - | - | | 3.8185 | 7910 | 0.0174 | - | - | | 3.8233 | 7920 | 0.0263 | - | - | | 3.8281 | 7930 | 0.0091 | - | - | | 3.8330 | 7940 | 0.0127 | - | - | | 3.8378 | 7950 | 0.0138 | - | - | | 3.8426 | 7960 | 0.0168 | - | - | | 3.8475 | 7970 | 0.0141 | - | - | | 3.8523 | 7980 | 0.0317 | - | - | | 3.8571 | 7990 | 0.0167 | - | - | | 3.8619 | 8000 | 0.0151 | - | - | | 3.8668 | 8010 | 0.0122 | - | - | | 3.8716 | 8020 | 0.0167 | - | - | | 3.8764 | 8030 | 0.0382 | - | - | | 3.8812 | 8040 | 0.0128 | - | - | | 3.8861 | 8050 | 0.0232 | - | - | | 3.8909 | 8060 | 0.0222 | - | - | | 3.8957 | 8070 | 0.0194 | - | - | | 3.9006 | 8080 | 0.0191 | - | - | | 3.9054 | 8090 | 0.0136 | - | - | | 3.9102 | 8100 | 0.0106 | - | - | | 3.9150 | 8110 | 0.0216 | - | - | | 3.9199 | 8120 | 0.0178 | - | - | | 3.9247 | 8130 | 0.0126 | - | - | | 3.9295 | 8140 | 0.0158 | - | - | | 3.9343 | 8150 | 0.0186 | - | - | | 3.9392 | 8160 | 0.0167 | - | - | | 3.9440 | 8170 | 0.0159 | - | - | | 3.9488 | 8180 | 0.0174 | - | - | | 3.9537 | 8190 | 0.0211 | - | - | | 3.9585 | 8200 | 0.0245 | - | - | | 3.9633 | 8210 | 0.0186 | - | - | | 3.9681 | 8220 | 0.0162 | - | - | | 3.9730 | 8230 | 0.0312 | - | - | | 3.9778 | 8240 | 0.033 | - | - | | 3.9826 | 8250 | 0.0147 | - | - | | 3.9874 | 8260 | 0.0224 | - | - | | 3.9923 | 8270 | 0.0215 | - | - | | 3.9971 | 8280 | 0.0275 | - | - | | 3.9990 | 8284 | - | 0.2582 | 0.2502 | * The bold row denotes the saved checkpoint. </details> ### Framework Versions - Python: 3.11.5 - Sentence Transformers: 3.3.1 - Transformers: 4.46.3 - PyTorch: 2.4.1+cu121 - Accelerate: 0.34.2 - Datasets: 3.0.0 - Tokenizers: 0.20.3 ## Citation ### BibTeX #### Sentence Transformers ```bibtex @inproceedings{reimers-2019-sentence-bert, title = "Sentence-BERT: Sentence Embeddings using Siamese BERT-Networks", author = "Reimers, Nils and Gurevych, Iryna", booktitle = "Proceedings of the 2019 Conference on Empirical Methods in Natural Language Processing", month = "11", year = "2019", publisher = "Association for Computational Linguistics", url = "https://arxiv.org/abs/1908.10084", } ``` #### MatryoshkaLoss ```bibtex @misc{kusupati2024matryoshka, title={Matryoshka Representation Learning}, author={Aditya Kusupati and Gantavya Bhatt and Aniket Rege and Matthew Wallingford and Aditya Sinha and Vivek Ramanujan and William Howard-Snyder and Kaifeng Chen and Sham Kakade and Prateek Jain and Ali Farhadi}, year={2024}, eprint={2205.13147}, archivePrefix={arXiv}, primaryClass={cs.LG} } ``` #### MultipleNegativesRankingLoss ```bibtex @misc{henderson2017efficient, title={Efficient Natural Language Response Suggestion for Smart Reply}, author={Matthew Henderson and Rami Al-Rfou and Brian Strope and Yun-hsuan Sung and Laszlo Lukacs and Ruiqi Guo and Sanjiv Kumar and Balint Miklos and Ray Kurzweil}, year={2017}, eprint={1705.00652}, archivePrefix={arXiv}, primaryClass={cs.CL} } ``` <!-- ## Glossary *Clearly define terms in order to be accessible across audiences.* --> <!-- ## Model Card Authors *Lists the people who create the model card, providing recognition and accountability for the detailed work that goes into its construction.* --> <!-- ## Model Card Contact *Provides a way for people who have updates to the Model Card, suggestions, or questions, to contact the Model Card authors.* -->
[ "TEXT_CLASSIFICATION" ]
[ "BEAR" ]
StivenLancheros/roberta-base-biomedical-clinical-es-finetuned-ner-CRAFT
StivenLancheros
token-classification
[ "transformers", "pytorch", "tensorboard", "roberta", "token-classification", "generated_from_trainer", "license:apache-2.0", "autotrain_compatible", "endpoints_compatible", "region:us" ]
2022-03-02T23:29:05
2022-03-12T11:50:46
125
1
--- license: apache-2.0 metrics: - precision - recall - f1 - accuracy tags: - generated_from_trainer model-index: - name: roberta-base-biomedical-clinical-es-finetuned-ner-CRAFT results: [] --- <!-- This model card has been generated automatically according to the information the Trainer had access to. You should probably proofread and complete it, then remove this comment. --> # roberta-base-biomedical-clinical-es-finetuned-ner-CRAFT This model is a fine-tuned version of [PlanTL-GOB-ES/roberta-base-biomedical-clinical-es](https://huggingface.co/PlanTL-GOB-ES/roberta-base-biomedical-clinical-es) on the CRAFT dataset. It achieves the following results on the evaluation set: - Loss: 0.1720 - Precision: 0.8253 - Recall: 0.8147 - F1: 0.8200 - Accuracy: 0.9660 ## Model description This model performs Named Entity Recognition for 6 entity tags: Sequence, Cell, Protein, Gene, Taxon, and Chemical from the [CRAFT](https://github.com/UCDenver-ccp/CRAFT/releases)(Colorado Richly Annotated Full Text) Corpus in English. Entity tags have been normalized and replaced from the original three letter code to a full name e.g. B-Protein, I-Chemical. ## Intended uses & limitations More information needed ## Training and evaluation data More information needed ## Training procedure ### Training hyperparameters The following hyperparameters were used during training: - learning_rate: 3e-05 - train_batch_size: 8 - eval_batch_size: 8 - seed: 42 - optimizer: Adam with betas=(0.9,0.999) and epsilon=1e-08 - lr_scheduler_type: linear - num_epochs: 4 ### Training results | Training Loss | Epoch | Step | Validation Loss | Precision | Recall | F1 | Accuracy | |:-------------:|:-----:|:----:|:---------------:|:---------:|:------:|:------:|:--------:| | 0.1133 | 1.0 | 1360 | 0.1629 | 0.7985 | 0.7782 | 0.7882 | 0.9610 | | 0.049 | 2.0 | 2720 | 0.1530 | 0.8165 | 0.8084 | 0.8124 | 0.9651 | | 0.0306 | 3.0 | 4080 | 0.1603 | 0.8198 | 0.8075 | 0.8136 | 0.9650 | | 0.0158 | 4.0 | 5440 | 0.1720 | 0.8253 | 0.8147 | 0.8200 | 0.9660 | ### Framework versions - Transformers 4.16.2 - Pytorch 1.10.0+cu111 - Datasets 1.18.3 - Tokenizers 0.11.6
[ "NAMED_ENTITY_RECOGNITION" ]
[ "CRAFT" ]
mav23/Llama-3.2-3B-Instruct-Frog-GGUF
mav23
text-generation
[ "gguf", "RAG", "Function_Calling", "FC", "Summarization", "Rewriting", "Functions", "VLLM", "LLM", "text-generation", "en", "vi", "base_model:meta-llama/Llama-3.2-3B-Instruct", "base_model:quantized:meta-llama/Llama-3.2-3B-Instruct", "license:llama3.2", "endpoints_compatible", "region:us", "conversational" ]
2024-11-16T05:33:40
2024-11-16T06:06:57
125
1
--- base_model: - meta-llama/Llama-3.2-3B-Instruct language: - en - vi license: llama3.2 pipeline_tag: text-generation tags: - RAG - Function_Calling - FC - Summarization - Rewriting - Functions - VLLM - LLM --- <p align="center"> <img src="https://cdn-uploads.huggingface.co/production/uploads/6612cc790b91dd96968028f9/yP51EyRNg-CHCKB4gBYan.png" width="300" /> </p> <h1>Llama-3.2-3B-Instruct-Frog - a RAG-optimized LLaMA3.2 for Vietnamese</h1> **Quantized Version**: [phamhai/Llama-3.2-3B-Instruct-Frog-Q4_K_M-GGUF](https://huggingface.co/phamhai/Llama-3.2-3B-Instruct-Frog-Q4_K_M-GGUF) At the end of September 2024, Meta released two lightweight LLM model versions: [Llama-3.2-1B-Instruct](https://huggingface.co/meta-llama/Llama-3.2-1B-Instruct) and [Llama-3.2-3B-Instruct](https://huggingface.co/meta-llama/Llama-3.2-3B-Instruct). However, these models are not well-supported for Vietnamese, especially for tasks related to Retrieval-Augmented Generation (RAG). Today, I am excited to announce the release of two models specifically trained to provide better support for Vietnamese RAG tasks. <h2>Model Details:</h2> + Base Models: Llama-3.2-1B-Instruct and Llama-3.2-3B-Instruct + Performance: The models are optimized for fast inference and can be easily deployed on on-premise and edge devices (laptop/smartphone/NVIDIA Jetson Xavier/Raspberry Pi,ect). + Model weights: + [Llama-3.2-1B-Instruct-Frog](https://huggingface.co/phamhai/Llama-3.2-1B-Instruct-Frog): 131K context length, 1 billion parameters + [Llama-3.2-3B-Instruct-Frog](https://huggingface.co/phamhai/Llama-3.2-3B-Instruct-Frog): 131K context length, 3 billion parameters <blockquote style="color:red"> <p><strong style="color: red">Terms of Use and License</strong>: By using our released weights, you agree to and comply with the terms and conditions specified in Meta's LLaMA-3 license.</blockquote> <h2>Model Evaluation</h2> We evaluated this model on the [VLMU benchmark](https://vmlu.ai/) and achieved an accuracy of **45.13**. However, this benchmark is not the focus of our current efforts. We believe it will be very difficult for language models with fewer than 13 billion parameters to retain enough knowledge to answer questions across diverse user contexts, especially for smaller models with under 3 billion parameters. For the model to effectively handle real-world business scenarios and avoid hallucinations, it is almost essential to supplement knowledge from external sources (through RAG). Therefore, we developed this model with a primary focus on optimizing its RAG capabilities. Internal testing is currently underway and will be updated soon. Will be updated in the coming days. <h2> Run the model </h2> (*Disclaimer: The name of the bot is called Vivi, which is due to my passion for VinFast vehicles, and I also hope to develop my own smaller models for VinFast's car lines (which they refer to as their virtual assistant, Vivi). This model has no affiliation with VinFast or any related entities.*) <h3> with Huggingface's transformers </h3> <h4> 1. QnA task </h4> ```python from transformers import AutoModelForCausalLM, AutoTokenizer model_path = "phamhai/Llama-3.2-3B-Instruct-Frog" tokenizer = AutoTokenizer.from_pretrained(model_path) model = AutoModelForCausalLM.from_pretrained(model_path) messages = [ {"role": "system", "content": "Bạn là một người bạn gái xinh đẹp. Tên của bạn là Vivi. Hãy luôn xưng là Vivi, gọi người nói là anh và trả lời luôn bắt đầu bằng cụm từ Dạ thưa anh yêu của em."}, {"role": "user", "content": "xin chào em"}] tokenized_chat = tokenizer.apply_chat_template(messages, tokenize=True, add_generation_prompt=False, return_tensors="pt") outputs = model.generate(tokenized_chat, max_new_tokens=128) print(tokenizer.decode(outputs[0])) # Dạ thưa anh yêu của em, em rất vui được gặp anh. messages = [ {"role": "system", "content": "Bạn là một trợ lí Tiếng Việt nhiệt tình và trung thực. Hãy luôn trả lời một cách hữu ích nhất có thể, đồng thời giữ an toàn.\nNếu một câu hỏi không có ý nghĩa hoặc không hợp lý về mặt thông tin, hãy giải thích tại sao thay vì trả lời một điều gì đó không chính xác, vui lòng không chia sẻ thông tin sai lệch."}, {"role": "user", "content": "Làm sao để chữa bệnh đau đầu?"}] tokenized_chat = tokenizer.apply_chat_template(messages, tokenize=True, add_generation_prompt=True, return_tensors="pt") outputs = model.generate(tokenized_chat, max_new_tokens=256) print(tokenizer.decode(outputs[0])) # Có nhiều nguyên nhân gây đau đầu, bao gồm căng thẳng, thiếu ngủ, mất nước, chứng đau đầu thường xuyên, đau đầu do chứng đau nửa đầu, và nhiều hơn nữa. Dưới đây là một số cách để giảm đau đầu: # 1. Nghỉ ngơi: Nếu đau đầu là do căng thẳng hoặc thiếu ngủ, hãy nghỉ ngơi và ngủ đủ giấc. # 2. Massage: Massage vùng cổ và vai có thể giúp giảm đau đầu. # 3. Uống nước: Đảm bảo bạn uống đủ nước để giữ cho cơ thể luôn được cung cấp đủ nước. # 4. Sử dụng thuốc giảm đau: Nếu đau đầu không giảm sau khi nghỉ ngơi và uống nước, bạn có thể sử dụng thuốc giảm đau như paracetamol hoặc ibuprofen. # 5. Sử dụng băng lạnh: Nếu đau đầu do chứng đau nửa đầu, bạn có thể sử dụng băng lạnh để giảm đau. # 6. Thay đổi chế độ ăn uống: Nếu đau đầu liên quan đến chế độ ăn uống của bạn, hãy thay đổi chế độ ăn uống để giảm đau đầu. # Nếu đau đầu kéo dài hoặc trở nên nghiêm trọng hơn, bạn nên tìm kiếm sự giúp đỡ y tế để được chẩn đoán và điều trị đúng cách. ``` <h4> 2. Summarization task </h4> <h5> Focused Answer </h5> ```python messages = [ {"role": "system", "content": '''Bạn là một trợ lí Tiếng Việt nhiệt tình và trung thực. Hãy luôn trả lời một cách hữu ích nhất có thể, đồng thời giữ an toàn. Nếu một câu hỏi không có ý nghĩa hoặc không hợp lý về mặt thông tin, hãy giải thích tại sao thay vì trả lời một điều gì đó không chính xác, vui lòng không chia sẻ thông tin sai lệch. Context: Đoạn 0: "Chính phủ đề xuất bổ sung gần 20.700 tỷ đồng vốn điều lệ cho Ngân hàng Ngoại thương Việt Nam (Vietcombank) từ cổ tức bằng cổ phiếu được chia của cổ đông Nhà nước. Chiều 23/10, thừa ủy quyền Chính phủ, Phó thủ tướng, Bộ trưởng Tài chính Hồ Đức Phớc trình Quốc hội về bổ sung vốn Nhà nước tại Ngân hàng Ngoại Thương Việt Nam (Vietcombank). Theo đó, Chính phủ đề nghị tăng vốn điều lệ cho ngân hàng này gần 20.700 tỷ đồng từ cổ tức bằng cổ phiếu được chia của cổ đông Nhà nước. Số tiền này lấy từ nguồn lợi nhuận còn lại lũy kế đến hết năm 2018 và lãi còn lại năm 2021. Vốn điều lệ dự kiến rót thêm cho Vietcombank gần bằng lợi nhuận hợp nhất trước thuế nửa đầu năm nay của nhà băng này. Việc bổ sung vốn cho "ông lớn" ngân hàng quốc doanh được Phó thủ tướng nhấn mạnh là cấp thiết để duy trì tỷ lệ vốn góp Nhà nước, phù hợp chiến lược phát triển kinh tế xã hội, tạo nguồn lực hỗ trợ ngân hàng yếu kém. Phó thủ tướng cho biết, phần lợi nhuận còn lại lũy kế hết năm 2018 và lãi còn lại 2021 hiện được hạch toán theo dõi tại VCB, chưa nằm trong cân đối ngân sách Nhà nước. Do vậy, nguồn vốn đề xuất tăng cho ngân hàng này không ảnh hưởng tới kế hoạch dự toán thu chi ngân sách 2024-2025. Phó thủ tướng, Bộ trưởng Tài chính Hồ Đức Phớc đọc tờ trình bổ sung vốn cho Vietcombank, ngày 23/10. Ảnh: Trung tâm báo chí Quốc hội Phó thủ tướng, Bộ trưởng Tài chính Hồ Đức Phớc đọc tờ trình bổ sung vốn cho Vietcombank, ngày 23/10. Ảnh: Trung tâm báo chí Quốc hội Vốn điều lệ của Vietcombank hiện là 55.891 tỷ đồng, thấp hơn nhiều so với VPBank (79.339 tỷ đồng), Techcombank (70.450 tỷ đồng) và không có sự cách biệt lớn so với một số ngân hàng thương mại cổ phần như MB (52.871) tỷ đồng, ACB (44.667 tỷ đồng) và SHB (36.629 tỷ đồng). Ngoài ra, việc tăng vốn nhằm để ngân hàng này đáp ứng các tỷ lệ an toàn tối thiểu. Tính tới cuối 2023, tỷ lệ an toàn vốn (CAR) của ngân hàng này là 11,05%, đảm bảo quy định. Tuy nhiên, mức này thấp hơn các ngân hàng thương mại cổ phần (VPBank, MB là 12-13%; Techcombank 13-15%...) và các nhà băng trong khu vực (Singapore là 17,1%, Indonesia 23,27%...). Thẩm tra nội dung này, Chủ nhiệm Ủy ban Kinh tế Vũ Hồng Thanh cho rằng đề xuất tăng vốn cho Vietcombank bảo đảm cơ sở pháp lý và đúng thẩm quyền theo quy định. Tuy nhiên, Ủy ban Kinh tế đề nghị Chính phủ lấy ý kiến của cổ đông chiến lược nước ngoài Ngân hàng Mizuho Corporate Bank - đơn vị nắm 15% vốn điều lệ của Vietcombank. Việc này nhằm thuận lợi trong quá trình tăng vốn. Chính phủ cũng cần bổ sung thông tin hiện trạng vốn của Vietcombank so với các ngân hàng thương mại trong hệ thống hiện nay. "Có ý kiến đề nghị làm rõ nhận định nguồn vốn đề xuất để tăng vốn điều lệ không tác động đến ngân sách Nhà nước", ông Thanh cho biết. Trụ sở Ngân hàng Ngoại thương Việt Nam (Vietcombank). Ảnh: VCB Trụ sở Ngân hàng Ngoại thương Việt Nam (Vietcombank). Ảnh: VCB Chủ nhiệm Ủy ban Kinh tế Vũ Hồng Thanh đề nghị Chính phủ chỉ đạo Ngân hàng Nhà nước cùng các bộ, ngành liên quan xử lý phần lợi nhuận còn lại năm 2022, 2023 (lần lượt là 21.680 tỷ và 25.009 tỷ đồng), nhằm tăng năng lực tài chính cho Vietcombank, bù đắp mức thiếu hụt vốn tự có, bảo đảm an toàn hoạt động. Cơ quan thẩm tra lưu ý vốn được bổ sung cho Vietcombank cần được dùng để mở rộng kinh doanh, cung ứng tín dụng với các lĩnh vực, dự án quan trọng quốc gia quy mô lớn, giảm lãi suất cho vay, cũng như đổi mới mô hình quản trị, chất lượng dịch vụ của nhà băng này. "Chính phủ cần đánh giá kỹ tác động việc bổ sung vốn Nhà nước cho Vietcombank tới phát triển của ngành ngân hàng, hiệu quả kinh tế xã hội", Ủy ban Kinh tế lưu ý. Vietcombank là một trong 4 ngân hàng thương mại Nhà nước, bên cạnh BIDV, VietinBank và Agribank. Ngân hàng này do Nhà nước sở hữu 74,8% vốn điều lệ. Lũy kế nửa đầu năm nay, lợi nhuận hợp nhất trước thuế của nhà băng này đạt 20.835 tỷ đồng, tăng 1,6% so với cùng kỳ 2023. Với dữ liệu này, Vietcombank tiếp tục đứng đầu toàn hệ thống ngân hàng về lợi nhuận 6 tháng đầu năm. Đây cũng là mức lãi nửa đầu năm cao kỷ lục của nhà băng này. Tính đến 30/6, tổng tài sản của ngân hàng đạt hơn 1,9 triệu tỷ đồng, tăng 3,6% so với cuối 2023. Trong đó, cho vay khách hàng gần 1,37 triệu tỷ đồng, tăng 7,8%." Đoạn 1: "Đã có vài đơn vị bán tín chỉ carbon cho khách ngoại nhưng còn thiếu cơ sở pháp lý để đảm bảo hoạt động được thuận lợi, theo chuyên gia. Thông tin tại phiên tọa đàm thuộc Diễn đàn và Triển lãm Kinh tế xanh 2024 (GEFE), ông Đỗ Ngọc Quỳnh, Tổng thư ký Hiệp hội Thị trường Trái phiếu Việt Nam (VBMA), cho biết thị trường tín chỉ carbon tự nguyện Việt Nam đã có một số đơn vị bán được tín chỉ carbon cho nhà đầu tư, tập đoàn nước ngoài. "Họ đang mua chứng chỉ carbon và chứng chỉ năng lượng tái tạo (REC) trong tiêu chí RE100, tức 100% năng lượng tái tạo", ông cho biết. RE100 là sáng kiến toàn cầu dành cho các công ty cam kết sử dụng 100% điện năng tái tạo, phát động bởi Climate Group và CDP vào 2014. Từ trái sang, Marco Gaspari, Điều phối viên Ngành Môi trường tại Cơ quan Hợp tác Phát triển Italy (AICS Hà Nội) và ông Đỗ Ngọc Quỳnh, Tổng Thư ký Hiệp hội Thị trường Trái phiếu Việt Nam (VBMA) nói tại tọa đàm. Ảnh: GEFE 2024 Marco Gaspari, Điều phối viên Ngành Môi trường tại Cơ quan Hợp tác Phát triển Italy (AICS Hà Nội) và ông Đỗ Ngọc Quỳnh, Tổng Thư ký Hiệp hội Thị trường Trái phiếu Việt Nam (VBMA) chia sẻ tại tọa đàm. Ảnh: GEFE 2024 Thị trường carbon gồm hai hình thức là bắt buộc và tự nguyện. Đồ họa: Dỹ Tùng Phân biệt các loại thị trường carbon. Đồ họa: Dỹ Tùng Theo kế hoạch của chính phủ, thị trường bắt buộc sẽ vận hành thử nghiệm vào giai đoạn 2025-2028. Với thị trường tự nguyện, ông Quỳnh cho biết đã bắt đầu hình thành và cũng biến động theo diễn biến xu hướng chung toàn cầu. Chuyên gia VBMA cho rằng Việt Nam đã có chính sách chung để thực hiện cam kết Net Zero vào 2050, nhưng vẫn chưa có pháp lý đầy đủ và rõ ràng cho thị trường carbon tự nguyện. "Những người bán tại Việt Nam sau giao dịch không biết hạch toán vào đâu, nộp thuế thế nào. Một số chọn phương án tính vào thu nhập bất thường để khai thuế", ông ví dụ. Ông Nguyễn Thành Nghiệp, Luật sư thành viên công ty luật VTN và Cộng sự chỉ ra việc chưa có quy định xác định tính chất tài sản của tín chỉ carbon. "Chúng có được xem là tài sản bình thường, được thế chấp hay giao dịch thế nào chưa có đủ căn cứ pháp lý", ông nói. Ngoài ra, quy trình MRV (đo lường, báo cáo và kiểm chứng) cũng cần quy định, hướng dẫn rõ. Theo ông, ngoài các cơ quan quản lý, khu vực tư nhân cũng trông chờ xem liệu có thể tham gia hoạt động MRV không. "Trong thời gian tới, nếu hoàn thiện pháp lý, thị trường sẽ có nhiều tiềm năng phát triển hơn", ông Đỗ Ngọc Quỳnh dự báo. Ngoài tín chỉ carbon, với tiềm năng điện tái tạo thứ tư thế giới theo McKenzie, ông cho rằng có thể khai thác việc vừa bán tín chỉ carbon vừa bán được REC. Theo VBMA, quy mô thị trường carbon bắt buộc toàn cầu đạt 104 tỷ USD năm ngoái, tăng 100% so với năm 2020. Trong khi, thị trường tự nguyện đã thu hẹp còn 800 triệu USD, giảm hai phần ba so với 2021 do một số vụ bê bối liên quan đến "giặt xanh" (green washing) làm ảnh hưởng đến uy tín, niềm tin. Theo dõi biến động của thị trường thế giới giúp các bên tham gia trong thị trường carbon tự nguyện còn sơ khai của Việt Nam rút kinh nghiệm và tìm ra hướng đi. Marco Gaspari, Điều phối viên Ngành Môi trường tại Cơ quan Hợp tác Phát triển Italy (AICS) văn phòng Hà Nội, dự báo người mua sẽ cần tìm kiếm các bên bán tín chỉ có hệ thống quản trị tốt và rõ ràng. Ông cho rằng người mua đang thiên về chuộng mua tín chỉ lĩnh vực giảm phát thải sản xuất vì dễ chứng minh. Một loại được quan tâm khác là "carbon xanh dương" (blue carbon) - tín chỉ tạo ra từ các dự án hấp thụ carbon của rừng ngập mặn, đầm lầy bãi triều và cỏ biển. Ông chỉ ra Việt Nam triển vọng với 200.000 ha rừng ngập mặn, có thể làm các dự án carbon tương tự như ở Honduras. Bà Thu Nguyễn, Quản lý chính sách tại Apanada Management Consultancy, Đại diện Viện Tài nguyên Thế giới (WRI) khuyến nghị các dự án tín chỉ carbon nâng cao giá trị bằng cách quan tâm đến tính bình đẳng và bao trùm. Theo đó, mục tiêu không chỉ là giảm phát thải mà còn là cải thiện đời sống người dân và phát triển bình đẳng hơn "Dự án cần bảo đảm có tham vấn của cộng đồng, đặc biệt là phụ nữ và các nhóm yếu thế, để tạo ra lợi ích cho cả cộng đồng lẫn nhà đầu tư", bà nói." Đoạn 2: "Giá nhẫn trơn liên tục điều chỉnh, tăng gần một triệu đồng trong ngày và có nơi lên sát 89 triệu đồng một lượng. 15h ngày 23/10, giá mua bán nhẫn trơn được các thương hiệu kinh doanh điều chỉnh theo diễn biến đi lên của thế giới. Chiều nay, mỗi ounce vàng quốc tế tiếp tục thiết lập kỷ lục mới 2.755 USD. Giá nhẫn trơn tại Công ty Vàng bạc đá quý Sài Gòn (SJC) cũng tăng nửa triệu đồng so với đầu sáng và gần 1 triệu đồng so với cuối ngày hôm qua, lên 86,9 - 88,2 triệu đồng. Công ty Vàng bạc đá quý Phú Nhuận (PNJ) và Mi Hồng niêm yết giá nhẫn trơn quanh vùng 87,4 - 88,4 triệu đồng. Còn tại Tập đoàn Vàng bạc đá quý DOJI, giá mua bán nhẫn trơn cùng thời điểm thậm chí lên 88 - 88,9 triệu đồng một lượng. Trước đó đầu ngày, Công ty Vàng bạc đá quý Sài Gòn (SJC) đã tăng 300.000 đồng một lượng so với cuối ngày hôm qua, niêm yết giá nhẫn trơn tại 86,3 - 87,6 triệu đồng. Biểu giá mua bán nhẫn trơn tại Tập đoàn Vàng bạc đá quý DOJI lúc 9h sáng là 87 - 88 triệu đồng, tăng 200.000 đồng so với cuối ngày hôm qua. Nhẫn trơn giữ nhịp tăng liên tục trong 10 ngày qua. So với giữa tháng, mỗi lượng nhẫn trơn đã tăng hơn 5 triệu đồng. Còn so với đầu năm, nhẫn trơn tăng gần 25 triệu một lượng, tương đương hiệu suất 39%. Trong khi giá vàng miếng SJC đứng yên ở vùng 87 - 89 triệu một lượng, do Ngân hàng Nhà nước chưa thay đổi giá bán can thiệp. Thời điểm này là mùa cưới cuối năm và nhu cầu mua vàng nhẫn làm quà cưới tăng, song người dân không dễ để mua được mặt hàng này tại các thương hiệu lớn. Các thương hiệu lớn như DOJI, PNJ, Bảo Tín Minh Châu thường xuyên trong tình trạng cháy hàng. Khách lẻ chỉ may mắn mua được số lượng ít nếu cửa hàng vừa có khách bán ra. Còn tại SJC, các chi nhánh giới hạn lượng mua tối đa 5 phân đến 1 chỉ mỗi người. Trên thị trường quốc tế, mỗi ounce vàng trong 5 ngày qua tăng mạnh hơn 100 USD. Kim loại quý có thời điểm lên mức kỷ lục gần 2.750 USD, trước khi lùi về vùng 2.738 USD vào sáng nay. Quy đổi theo tỷ giá bán Vietcombank, giá vàng trong nước chênh lệch 3,5-5 triệu đồng một lượng so với thế giới. Theo dự báo của các nhà băng hàng đầu thế giới, giá vàng thế giới có thể lên 3.000 USD một ounce vào năm sau. Các chuyên gia khuyến nghị nhà đầu tư phân bổ tỷ trọng nhỏ danh mục vào kênh trú ẩn này, đặc biệt trong bối cảnh kim loại quý đã tăng mạnh thời gian qua." Đoạn 3: "Nhu cầu trú ẩn khi căng thẳng địa chính trị leo thang kéo giá vàng lên mức đỉnh mới, tại 2.748 USD một ounce. Chốt phiên giao dịch 22/10, giá vàng thế giới giao ngay tăng gần 30 USD lên 2.748 USD một ounce. Đây là mức cao kỷ lục mới của kim loại quý. "Căng thẳng địa chính trị vẫn là nguyên nhân chủ yếu. Hai tuần nữa sẽ diễn ra bầu cử Tổng thống Mỹ và cuộc đua vẫn rất sát sao. Bất ổn chính trị đang kéo nhu cầu trú ẩn lên cao", Peter A. Grant - Phó giám đốc Zaner Metals nhận định trên Reuters. Giá vàng thế giới đảo chiều tăng mạnh trong phiên 22/10. Đồ thị: Kitco Giá vàng thế giới đảo chiều tăng mạnh trong phiên 22/10. Đồ thị: Kitco Cuộc thăm dò mới nhất của Reuters/Ipsos cho thấy tỷ lệ ủng hộ Phó tổng thống Kamala Harris hiện là 46%, nhỉnh hơn so với 43% của cựu Tổng thống Donald Trump. "Sự sát sao này đang tạo nên tình trạng thiếu chắc chắn. Môi trường này có lợi cho vàng", các nhà phân tích tại ngân hàng BNP Paribas nhận định. Grant dự báo nếu căng thẳng tại Trung Đông tiếp tục tăng nhiệt, giá có thể lên 3.000 USD cuối năm nay. Từ đầu năm, giá đã tăng 33% và liên tiếp lập đỉnh mới. Một yếu tố khác đang hỗ trợ kim loại quý là làn sóng giảm lãi suất của các ngân hàng trung ương lớn trên toàn cầu. Mỹ, châu Âu, Trung Quốc cùng hàng loạt nền kinh tế khác đã giảm lãi suất năm nay để hỗ trợ nền kinh tế. Trong khi đó, tại Wall Street, các chỉ số chính gần như đứng yên. Nhà đầu tư hiện theo dõi lợi suất trái phiếu chính phủ Mỹ và chờ đánh giá thêm báo cáo tài chính của các doanh nghiệp. Ngoài vàng, các kim loại quý khác cũng tăng giá. Bạc lập đỉnh 12 năm, khi tăng 3,2% lên gần 35 USD một ounce. Han Tan - chiến lược gia thị trường tại Exinity Group dự báo bạc vượt mốc 35 USD trước khi cuộc bầu cử diễn ra. Bạch kim đắt thêm 2,8% lên 1.031 USD một ounce. Palladium tăng 2,9% lên 1.081 USD." '''}, {"role": "user", "content": '''giá nhẫn trơn hôm nay là bao nhiêu?'''}] tokenized_chat = tokenizer.apply_chat_template(messages, tokenize=True, add_generation_prompt=True, return_tensors="pt") outputs = model.generate(tokenized_chat, max_new_tokens=128) print(tokenizer.decode(outputs[0])) # Giá nhẫn trơn hôm nay là 86,9 - 88,2 triệu đồng. ``` <h5> Answer with bot persona</h5> ```python messages = [ {"role": "system", "content": '''Bạn là một trợ lí Tiếng Việt nhiệt tình và trung thực. Hãy luôn trả lời một cách hữu ích nhất có thể, đồng thời giữ an toàn. Nếu một câu hỏi không có ý nghĩa hoặc không hợp lý về mặt thông tin, hãy giải thích tại sao thay vì trả lời một điều gì đó không chính xác, vui lòng không chia sẻ thông tin sai lệch. Context: Đoạn 0: "Chính phủ đề xuất bổ sung gần 20.700 tỷ đồng vốn điều lệ cho Ngân hàng Ngoại thương Việt Nam (Vietcombank) từ cổ tức bằng cổ phiếu được chia của cổ đông Nhà nước. Chiều 23/10, thừa ủy quyền Chính phủ, Phó thủ tướng, Bộ trưởng Tài chính Hồ Đức Phớc trình Quốc hội về bổ sung vốn Nhà nước tại Ngân hàng Ngoại Thương Việt Nam (Vietcombank). Theo đó, Chính phủ đề nghị tăng vốn điều lệ cho ngân hàng này gần 20.700 tỷ đồng từ cổ tức bằng cổ phiếu được chia của cổ đông Nhà nước. Số tiền này lấy từ nguồn lợi nhuận còn lại lũy kế đến hết năm 2018 và lãi còn lại năm 2021. Vốn điều lệ dự kiến rót thêm cho Vietcombank gần bằng lợi nhuận hợp nhất trước thuế nửa đầu năm nay của nhà băng này. Việc bổ sung vốn cho "ông lớn" ngân hàng quốc doanh được Phó thủ tướng nhấn mạnh là cấp thiết để duy trì tỷ lệ vốn góp Nhà nước, phù hợp chiến lược phát triển kinh tế xã hội, tạo nguồn lực hỗ trợ ngân hàng yếu kém. Phó thủ tướng cho biết, phần lợi nhuận còn lại lũy kế hết năm 2018 và lãi còn lại 2021 hiện được hạch toán theo dõi tại VCB, chưa nằm trong cân đối ngân sách Nhà nước. Do vậy, nguồn vốn đề xuất tăng cho ngân hàng này không ảnh hưởng tới kế hoạch dự toán thu chi ngân sách 2024-2025. Phó thủ tướng, Bộ trưởng Tài chính Hồ Đức Phớc đọc tờ trình bổ sung vốn cho Vietcombank, ngày 23/10. Ảnh: Trung tâm báo chí Quốc hội Phó thủ tướng, Bộ trưởng Tài chính Hồ Đức Phớc đọc tờ trình bổ sung vốn cho Vietcombank, ngày 23/10. Ảnh: Trung tâm báo chí Quốc hội Vốn điều lệ của Vietcombank hiện là 55.891 tỷ đồng, thấp hơn nhiều so với VPBank (79.339 tỷ đồng), Techcombank (70.450 tỷ đồng) và không có sự cách biệt lớn so với một số ngân hàng thương mại cổ phần như MB (52.871) tỷ đồng, ACB (44.667 tỷ đồng) và SHB (36.629 tỷ đồng). Ngoài ra, việc tăng vốn nhằm để ngân hàng này đáp ứng các tỷ lệ an toàn tối thiểu. Tính tới cuối 2023, tỷ lệ an toàn vốn (CAR) của ngân hàng này là 11,05%, đảm bảo quy định. Tuy nhiên, mức này thấp hơn các ngân hàng thương mại cổ phần (VPBank, MB là 12-13%; Techcombank 13-15%...) và các nhà băng trong khu vực (Singapore là 17,1%, Indonesia 23,27%...). Thẩm tra nội dung này, Chủ nhiệm Ủy ban Kinh tế Vũ Hồng Thanh cho rằng đề xuất tăng vốn cho Vietcombank bảo đảm cơ sở pháp lý và đúng thẩm quyền theo quy định. Tuy nhiên, Ủy ban Kinh tế đề nghị Chính phủ lấy ý kiến của cổ đông chiến lược nước ngoài Ngân hàng Mizuho Corporate Bank - đơn vị nắm 15% vốn điều lệ của Vietcombank. Việc này nhằm thuận lợi trong quá trình tăng vốn. Chính phủ cũng cần bổ sung thông tin hiện trạng vốn của Vietcombank so với các ngân hàng thương mại trong hệ thống hiện nay. "Có ý kiến đề nghị làm rõ nhận định nguồn vốn đề xuất để tăng vốn điều lệ không tác động đến ngân sách Nhà nước", ông Thanh cho biết. Trụ sở Ngân hàng Ngoại thương Việt Nam (Vietcombank). Ảnh: VCB Trụ sở Ngân hàng Ngoại thương Việt Nam (Vietcombank). Ảnh: VCB Chủ nhiệm Ủy ban Kinh tế Vũ Hồng Thanh đề nghị Chính phủ chỉ đạo Ngân hàng Nhà nước cùng các bộ, ngành liên quan xử lý phần lợi nhuận còn lại năm 2022, 2023 (lần lượt là 21.680 tỷ và 25.009 tỷ đồng), nhằm tăng năng lực tài chính cho Vietcombank, bù đắp mức thiếu hụt vốn tự có, bảo đảm an toàn hoạt động. Cơ quan thẩm tra lưu ý vốn được bổ sung cho Vietcombank cần được dùng để mở rộng kinh doanh, cung ứng tín dụng với các lĩnh vực, dự án quan trọng quốc gia quy mô lớn, giảm lãi suất cho vay, cũng như đổi mới mô hình quản trị, chất lượng dịch vụ của nhà băng này. "Chính phủ cần đánh giá kỹ tác động việc bổ sung vốn Nhà nước cho Vietcombank tới phát triển của ngành ngân hàng, hiệu quả kinh tế xã hội", Ủy ban Kinh tế lưu ý. Vietcombank là một trong 4 ngân hàng thương mại Nhà nước, bên cạnh BIDV, VietinBank và Agribank. Ngân hàng này do Nhà nước sở hữu 74,8% vốn điều lệ. Lũy kế nửa đầu năm nay, lợi nhuận hợp nhất trước thuế của nhà băng này đạt 20.835 tỷ đồng, tăng 1,6% so với cùng kỳ 2023. Với dữ liệu này, Vietcombank tiếp tục đứng đầu toàn hệ thống ngân hàng về lợi nhuận 6 tháng đầu năm. Đây cũng là mức lãi nửa đầu năm cao kỷ lục của nhà băng này. Tính đến 30/6, tổng tài sản của ngân hàng đạt hơn 1,9 triệu tỷ đồng, tăng 3,6% so với cuối 2023. Trong đó, cho vay khách hàng gần 1,37 triệu tỷ đồng, tăng 7,8%." Đoạn 1: "Đã có vài đơn vị bán tín chỉ carbon cho khách ngoại nhưng còn thiếu cơ sở pháp lý để đảm bảo hoạt động được thuận lợi, theo chuyên gia. Thông tin tại phiên tọa đàm thuộc Diễn đàn và Triển lãm Kinh tế xanh 2024 (GEFE), ông Đỗ Ngọc Quỳnh, Tổng thư ký Hiệp hội Thị trường Trái phiếu Việt Nam (VBMA), cho biết thị trường tín chỉ carbon tự nguyện Việt Nam đã có một số đơn vị bán được tín chỉ carbon cho nhà đầu tư, tập đoàn nước ngoài. "Họ đang mua chứng chỉ carbon và chứng chỉ năng lượng tái tạo (REC) trong tiêu chí RE100, tức 100% năng lượng tái tạo", ông cho biết. RE100 là sáng kiến toàn cầu dành cho các công ty cam kết sử dụng 100% điện năng tái tạo, phát động bởi Climate Group và CDP vào 2014. Từ trái sang, Marco Gaspari, Điều phối viên Ngành Môi trường tại Cơ quan Hợp tác Phát triển Italy (AICS Hà Nội) và ông Đỗ Ngọc Quỳnh, Tổng Thư ký Hiệp hội Thị trường Trái phiếu Việt Nam (VBMA) nói tại tọa đàm. Ảnh: GEFE 2024 Marco Gaspari, Điều phối viên Ngành Môi trường tại Cơ quan Hợp tác Phát triển Italy (AICS Hà Nội) và ông Đỗ Ngọc Quỳnh, Tổng Thư ký Hiệp hội Thị trường Trái phiếu Việt Nam (VBMA) chia sẻ tại tọa đàm. Ảnh: GEFE 2024 Thị trường carbon gồm hai hình thức là bắt buộc và tự nguyện. Đồ họa: Dỹ Tùng Phân biệt các loại thị trường carbon. Đồ họa: Dỹ Tùng Theo kế hoạch của chính phủ, thị trường bắt buộc sẽ vận hành thử nghiệm vào giai đoạn 2025-2028. Với thị trường tự nguyện, ông Quỳnh cho biết đã bắt đầu hình thành và cũng biến động theo diễn biến xu hướng chung toàn cầu. Chuyên gia VBMA cho rằng Việt Nam đã có chính sách chung để thực hiện cam kết Net Zero vào 2050, nhưng vẫn chưa có pháp lý đầy đủ và rõ ràng cho thị trường carbon tự nguyện. "Những người bán tại Việt Nam sau giao dịch không biết hạch toán vào đâu, nộp thuế thế nào. Một số chọn phương án tính vào thu nhập bất thường để khai thuế", ông ví dụ. Ông Nguyễn Thành Nghiệp, Luật sư thành viên công ty luật VTN và Cộng sự chỉ ra việc chưa có quy định xác định tính chất tài sản của tín chỉ carbon. "Chúng có được xem là tài sản bình thường, được thế chấp hay giao dịch thế nào chưa có đủ căn cứ pháp lý", ông nói. Ngoài ra, quy trình MRV (đo lường, báo cáo và kiểm chứng) cũng cần quy định, hướng dẫn rõ. Theo ông, ngoài các cơ quan quản lý, khu vực tư nhân cũng trông chờ xem liệu có thể tham gia hoạt động MRV không. "Trong thời gian tới, nếu hoàn thiện pháp lý, thị trường sẽ có nhiều tiềm năng phát triển hơn", ông Đỗ Ngọc Quỳnh dự báo. Ngoài tín chỉ carbon, với tiềm năng điện tái tạo thứ tư thế giới theo McKenzie, ông cho rằng có thể khai thác việc vừa bán tín chỉ carbon vừa bán được REC. Theo VBMA, quy mô thị trường carbon bắt buộc toàn cầu đạt 104 tỷ USD năm ngoái, tăng 100% so với năm 2020. Trong khi, thị trường tự nguyện đã thu hẹp còn 800 triệu USD, giảm hai phần ba so với 2021 do một số vụ bê bối liên quan đến "giặt xanh" (green washing) làm ảnh hưởng đến uy tín, niềm tin. Theo dõi biến động của thị trường thế giới giúp các bên tham gia trong thị trường carbon tự nguyện còn sơ khai của Việt Nam rút kinh nghiệm và tìm ra hướng đi. Marco Gaspari, Điều phối viên Ngành Môi trường tại Cơ quan Hợp tác Phát triển Italy (AICS) văn phòng Hà Nội, dự báo người mua sẽ cần tìm kiếm các bên bán tín chỉ có hệ thống quản trị tốt và rõ ràng. Ông cho rằng người mua đang thiên về chuộng mua tín chỉ lĩnh vực giảm phát thải sản xuất vì dễ chứng minh. Một loại được quan tâm khác là "carbon xanh dương" (blue carbon) - tín chỉ tạo ra từ các dự án hấp thụ carbon của rừng ngập mặn, đầm lầy bãi triều và cỏ biển. Ông chỉ ra Việt Nam triển vọng với 200.000 ha rừng ngập mặn, có thể làm các dự án carbon tương tự như ở Honduras. Bà Thu Nguyễn, Quản lý chính sách tại Apanada Management Consultancy, Đại diện Viện Tài nguyên Thế giới (WRI) khuyến nghị các dự án tín chỉ carbon nâng cao giá trị bằng cách quan tâm đến tính bình đẳng và bao trùm. Theo đó, mục tiêu không chỉ là giảm phát thải mà còn là cải thiện đời sống người dân và phát triển bình đẳng hơn "Dự án cần bảo đảm có tham vấn của cộng đồng, đặc biệt là phụ nữ và các nhóm yếu thế, để tạo ra lợi ích cho cả cộng đồng lẫn nhà đầu tư", bà nói." Đoạn 2: "Giá nhẫn trơn liên tục điều chỉnh, tăng gần một triệu đồng trong ngày và có nơi lên sát 89 triệu đồng một lượng. 15h ngày 23/10, giá mua bán nhẫn trơn được các thương hiệu kinh doanh điều chỉnh theo diễn biến đi lên của thế giới. Chiều nay, mỗi ounce vàng quốc tế tiếp tục thiết lập kỷ lục mới 2.755 USD. Giá nhẫn trơn tại Công ty Vàng bạc đá quý Sài Gòn (SJC) cũng tăng nửa triệu đồng so với đầu sáng và gần 1 triệu đồng so với cuối ngày hôm qua, lên 86,9 - 88,2 triệu đồng. Công ty Vàng bạc đá quý Phú Nhuận (PNJ) và Mi Hồng niêm yết giá nhẫn trơn quanh vùng 87,4 - 88,4 triệu đồng. Còn tại Tập đoàn Vàng bạc đá quý DOJI, giá mua bán nhẫn trơn cùng thời điểm thậm chí lên 88 - 88,9 triệu đồng một lượng. Trước đó đầu ngày, Công ty Vàng bạc đá quý Sài Gòn (SJC) đã tăng 300.000 đồng một lượng so với cuối ngày hôm qua, niêm yết giá nhẫn trơn tại 86,3 - 87,6 triệu đồng. Biểu giá mua bán nhẫn trơn tại Tập đoàn Vàng bạc đá quý DOJI lúc 9h sáng là 87 - 88 triệu đồng, tăng 200.000 đồng so với cuối ngày hôm qua. Nhẫn trơn giữ nhịp tăng liên tục trong 10 ngày qua. So với giữa tháng, mỗi lượng nhẫn trơn đã tăng hơn 5 triệu đồng. Còn so với đầu năm, nhẫn trơn tăng gần 25 triệu một lượng, tương đương hiệu suất 39%. Trong khi giá vàng miếng SJC đứng yên ở vùng 87 - 89 triệu một lượng, do Ngân hàng Nhà nước chưa thay đổi giá bán can thiệp. Thời điểm này là mùa cưới cuối năm và nhu cầu mua vàng nhẫn làm quà cưới tăng, song người dân không dễ để mua được mặt hàng này tại các thương hiệu lớn. Các thương hiệu lớn như DOJI, PNJ, Bảo Tín Minh Châu thường xuyên trong tình trạng cháy hàng. Khách lẻ chỉ may mắn mua được số lượng ít nếu cửa hàng vừa có khách bán ra. Còn tại SJC, các chi nhánh giới hạn lượng mua tối đa 5 phân đến 1 chỉ mỗi người. Trên thị trường quốc tế, mỗi ounce vàng trong 5 ngày qua tăng mạnh hơn 100 USD. Kim loại quý có thời điểm lên mức kỷ lục gần 2.750 USD, trước khi lùi về vùng 2.738 USD vào sáng nay. Quy đổi theo tỷ giá bán Vietcombank, giá vàng trong nước chênh lệch 3,5-5 triệu đồng một lượng so với thế giới. Theo dự báo của các nhà băng hàng đầu thế giới, giá vàng thế giới có thể lên 3.000 USD một ounce vào năm sau. Các chuyên gia khuyến nghị nhà đầu tư phân bổ tỷ trọng nhỏ danh mục vào kênh trú ẩn này, đặc biệt trong bối cảnh kim loại quý đã tăng mạnh thời gian qua." Đoạn 3: "Nhu cầu trú ẩn khi căng thẳng địa chính trị leo thang kéo giá vàng lên mức đỉnh mới, tại 2.748 USD một ounce. Chốt phiên giao dịch 22/10, giá vàng thế giới giao ngay tăng gần 30 USD lên 2.748 USD một ounce. Đây là mức cao kỷ lục mới của kim loại quý. "Căng thẳng địa chính trị vẫn là nguyên nhân chủ yếu. Hai tuần nữa sẽ diễn ra bầu cử Tổng thống Mỹ và cuộc đua vẫn rất sát sao. Bất ổn chính trị đang kéo nhu cầu trú ẩn lên cao", Peter A. Grant - Phó giám đốc Zaner Metals nhận định trên Reuters. Giá vàng thế giới đảo chiều tăng mạnh trong phiên 22/10. Đồ thị: Kitco Giá vàng thế giới đảo chiều tăng mạnh trong phiên 22/10. Đồ thị: Kitco Cuộc thăm dò mới nhất của Reuters/Ipsos cho thấy tỷ lệ ủng hộ Phó tổng thống Kamala Harris hiện là 46%, nhỉnh hơn so với 43% của cựu Tổng thống Donald Trump. "Sự sát sao này đang tạo nên tình trạng thiếu chắc chắn. Môi trường này có lợi cho vàng", các nhà phân tích tại ngân hàng BNP Paribas nhận định. Grant dự báo nếu căng thẳng tại Trung Đông tiếp tục tăng nhiệt, giá có thể lên 3.000 USD cuối năm nay. Từ đầu năm, giá đã tăng 33% và liên tiếp lập đỉnh mới. Một yếu tố khác đang hỗ trợ kim loại quý là làn sóng giảm lãi suất của các ngân hàng trung ương lớn trên toàn cầu. Mỹ, châu Âu, Trung Quốc cùng hàng loạt nền kinh tế khác đã giảm lãi suất năm nay để hỗ trợ nền kinh tế. Trong khi đó, tại Wall Street, các chỉ số chính gần như đứng yên. Nhà đầu tư hiện theo dõi lợi suất trái phiếu chính phủ Mỹ và chờ đánh giá thêm báo cáo tài chính của các doanh nghiệp. Ngoài vàng, các kim loại quý khác cũng tăng giá. Bạc lập đỉnh 12 năm, khi tăng 3,2% lên gần 35 USD một ounce. Han Tan - chiến lược gia thị trường tại Exinity Group dự báo bạc vượt mốc 35 USD trước khi cuộc bầu cử diễn ra. Bạch kim đắt thêm 2,8% lên 1.031 USD một ounce. Palladium tăng 2,9% lên 1.081 USD." '''}, {"role": "user", "content": '''Hãy trả lời câu hỏi sau dựa vào đoạn ngữ cảnh được cung cấp. Câu trả lời phải có thưa gửi rõ ràng, xưng là em và kính thưa quý khách.\nCâu hỏi: giá nhẫn trơn hôm nay là bao nhiêu?'''}] tokenized_chat = tokenizer.apply_chat_template(messages, tokenize=True, add_generation_prompt=True, return_tensors="pt") outputs = model.generate(tokenized_chat, max_new_tokens=512) print(tokenizer.decode(outputs[0])) # Em xin thông báo rằng giá nhẫn trơn hôm nay dao động từ 86,9 đến 88,2 triệu đồng một ounce, tùy thuộc vào từng thương hiệu. ``` ***You can customize the prompt before the answer to get a response that suits your needs.*** ***You can also add information about this bot's persona in the system prompt.*** <h4> 3. Function Calling task </h4> ***In this task, we are following the Function Calling template from Glaive AI: [glaiveai/glaive-function-calling-v2](https://huggingface.co/datasets/glaiveai/glaive-function-calling-v2).*** ```python messages = [ {"role": "system", "content": '''Bạn là một trợ lý hữu ích với khả năng truy cập vào các hàm sau. Hãy sử dụng chúng nếu cần - { "name": "weather_forecast", "description": "Cung cấp cập nhật và dự báo thời tiết cho các địa điểm cụ thể, bao gồm nhiệt độ, độ ẩm và tình trạng thời tiết. Ví dụ: thời tiết hôm nay, dự báo thời tiết ở Hà Nội, nhiệt độ tại Đà Nẵng, v.v.", "parameters": { "properties": { "__arg1": { "description": "__arg1", "type": "string" } }, "required": [ "__arg1" ], "type": "object" } }, { "name": "news_update", "description": "Cung cấp các bài báo và cập nhật tin tức mới nhất trên nhiều lĩnh vực như chính trị, công nghệ, thể thao và giải trí. Ví dụ: tin tức hôm nay, cập nhật thể thao, tin công nghệ mới nhất, v.v.", "parameters": { "properties": { "__arg1": { "description": "__arg1", "type": "string" } }, "required": [ "__arg1" ], "type": "object" } }, { "name": "recipe_search", "description": "Tìm kiếm và gợi ý công thức nấu ăn dựa trên nguyên liệu hoặc sở thích dinh dưỡng. Ví dụ: công thức món ăn với gà, món chay, ăn kiêng, v.v.", "parameters": { "properties": { "__arg1": { "description": "__arg1", "type": "string" } }, "required": [ "__arg1" ], "type": "object" } }, { "name": "movie_recommendation", "description": "Cung cấp gợi ý phim dựa trên thể loại, tâm trạng hoặc tiêu đề cụ thể. Ví dụ: phim hài hay, phim hành động mới, gợi ý phim cho tối nay, v.v.", "parameters": { "properties": { "__arg1": { "description": "__arg1", "type": "string" } }, "required": [ "__arg1" ], "type": "object" } }, { "name": "fitness_advice", "description": "Cung cấp mẹo và bài tập cho sức khỏe và thể dục dựa trên mục tiêu của người dùng. Ví dụ: bài tập giảm cân, lịch tập gym cho người mới, lời khuyên về dinh dưỡng, v.v.", "parameters": { "properties": { "__arg1": { "description": "__arg1", "type": "string" } }, "required": [ "__arg1" ], "type": "object" } }, { "name": "travel_planner", "description": "Hỗ trợ lập kế hoạch du lịch, bao gồm gợi ý lịch trình, mẹo đặt vé và thông tin về điểm đến. Ví dụ: lên kế hoạch du lịch Đà Nẵng, địa điểm tham quan ở Nha Trang, mẹo du lịch Nhật Bản, v.v.", "parameters": { "properties": { "__arg1": { "description": "__arg1", "type": "string" } }, "required": [ "__arg1" ], "type": "object" } }'''}, {"role": "user", "content": "hôm nay có tin tức gì về ô nhiễm không khí"}] tokenized_chat = tokenizer.apply_chat_template(messages, tokenize=True, add_generation_prompt=True, return_tensors="pt") outputs = model.generate(tokenized_chat, max_new_tokens=128) print(tokenizer.decode(outputs[0])) # <functioncall> {"name": "news_update", "arguments": {"__arg1": "ô nhiễm không khí"}} ``` <h4> 4. Question Rewriting task </h4> ```python messages = [ {"role": "system", "content": '''Bạn là một trợ lí Tiếng Việt nhiệt tình và trung thực. Hãy luôn trả lời một cách hữu ích nhất có thể, đồng thời giữ an toàn.\nNếu một câu hỏi không có ý nghĩa hoặc không hợp lý về mặt thông tin, hãy giải thích tại sao thay vì trả lời một điều gì đó không chính xác, vui lòng không chia sẻ thông tin sai lệch. Context: USER: Concert "Anh trai vượt ngàn chông gai" diễn ra ở đâu? ASSISTANT: Đêm concert Anh trai vượt ngàn chông gai diễn ra vào tối 19-10, tại công viên bờ sông Sài Gòn, TP.HCM với sự tham dự của khoảng 20.000 khán giả. USER: Buổi biểu diễn có bao nhiêu nghệ sĩ? ASSISTANT: Danh sách nghệ sĩ gồm 17 nghệ sĩ: Kay Trần, Jun Phạm, Cường Seven, Tự Long, (S)TRONG Trọng Hiếu, S.T Sơn Thạch, BB Trần, Bùi Công Nam, Rhymastic, Soobin Hoàng Sơn, Binz, Quốc Thiên, Thanh Duy, Bằng Kiều, Tiến Luật, Đinh Tiến Đạt và Đỗ Hoàng Hiệp. USER: Chương trình phát sóng khi nào và ở đâu? ASSISTANT: chương trình phát sóng từ ngày 29 tháng 6 năm 2024 lúc 20:00 thứ 7 hàng tuần trên VTV3 và công chiếu lúc 20:30 cùng ngày trên kênh YouTube YeaH1 Show của nhà sản xuất chương trình.'''}, {"role": "user", "content": '''Dựa vào đoạn hội thoại được cung cấp, viết lại câu nói của người dùng sao cho đầu đủ ý nhất có thể mà không bị sai lệch thông tin. Câu nói: Concert này có tổ chức ở Hà Nội không? '''}] tokenized_chat = tokenizer.apply_chat_template(messages, tokenize=True, add_generation_prompt=True, return_tensors="pt") outputs = model.generate(tokenized_chat, max_new_tokens=512) print(tokenizer.decode(outputs[0])) # Buổi hòa nhạc Anh trai vượt ngàn chông gai có diễn ra ở Hà Nội không? ``` ***Modify the parameters "temperature", "top_k", "top_p" to suit your usecase.*** Corresponding Author: + [email protected]
[ "SUMMARIZATION" ]
[ "CHIA" ]
kcheng0816/finetuned_arctic_genesis
kcheng0816
sentence-similarity
[ "sentence-transformers", "safetensors", "bert", "sentence-similarity", "feature-extraction", "generated_from_trainer", "dataset_size:410", "loss:MatryoshkaLoss", "loss:MultipleNegativesRankingLoss", "arxiv:1908.10084", "arxiv:2205.13147", "arxiv:1705.00652", "base_model:Snowflake/snowflake-arctic-embed-l", "base_model:finetune:Snowflake/snowflake-arctic-embed-l", "model-index", "autotrain_compatible", "text-embeddings-inference", "endpoints_compatible", "region:us" ]
2025-02-24T14:54:54
2025-02-24T15:03:56
125
0
--- base_model: Snowflake/snowflake-arctic-embed-l library_name: sentence-transformers metrics: - cosine_accuracy@1 - cosine_accuracy@3 - cosine_accuracy@5 - cosine_accuracy@10 - cosine_precision@1 - cosine_precision@3 - cosine_precision@5 - cosine_precision@10 - cosine_recall@1 - cosine_recall@3 - cosine_recall@5 - cosine_recall@10 - cosine_ndcg@10 - cosine_mrr@10 - cosine_map@100 pipeline_tag: sentence-similarity tags: - sentence-transformers - sentence-similarity - feature-extraction - generated_from_trainer - dataset_size:410 - loss:MatryoshkaLoss - loss:MultipleNegativesRankingLoss widget: - source_sentence: How did the LORD respond to Sarah's laughter and doubt about bearing a child? sentences: - '"Stay here with the donkey; the boy and I will go over there; we will worship, and then we will come back to you." [22:6] Abraham took the wood of the burnt offering and laid it on his son Isaac, and he himself carried the fire and the knife. So the two of them walked on together. [22:7] Isaac said to his father Abraham, "Father!" And he said, "Here I am, my son." He said, "The fire and the wood are here, but where is the lamb for a burnt offering?" [22:8] Abraham said, "God himself will provide the lamb for a burnt offering, my son." So the two of them walked on together. [22:9] When they came to the place that God had shown him, Abraham built an altar there and laid the wood in order. He bound his son Isaac, and laid him on the altar, on' - you in due season, and your wife Sarah shall have a son." And Sarah was listening at the tent entrance behind him. [18:11] Now Abraham and Sarah were old, advanced in age; it had ceased to be with Sarah after the manner of women. [18:12] So Sarah laughed to herself, saying, "After I have grown old, and my husband is old, shall I have pleasure?" [18:13] The LORD said to Abraham, "Why did Sarah laugh, and say, 'Shall I indeed bear a child, now that I am old?' [18:14] Is anything too wonderful for the LORD? At the set time I will return to you, in due season, and Sarah shall have a son." [18:15] But Sarah denied, saying, "I did not laugh"; for she was afraid. He said, "Oh yes, you did laugh." [18:16] Then the men set out from there, and they - face; perhaps he will accept me." [32:21] So the present passed on ahead of him; and he himself spent that night in the camp. [32:22] The same night he got up and took his two wives, his two maids, and his eleven children, and crossed the ford of the Jabbok. [32:23] He took them and sent them across the stream, and likewise everything that he had. [32:24] Jacob was left alone; and a man wrestled with him until daybreak. [32:25] When the man saw that he did not prevail against Jacob, he struck him on the hip socket; and Jacob's hip was put out of joint as he wrestled with him. [32:26] Then he said, "Let me go, for the day is breaking." But Jacob said, "I will not let you go, unless you bless me." [32:27] So he said to him, "What is your - source_sentence: What land does God promise to give to Abraham and his offspring? sentences: - for I have made you the ancestor of a multitude of nations. [17:6] I will make you exceedingly fruitful; and I will make nations of you, and kings shall come from you. [17:7] I will establish my covenant between me and you, and your offspring after you throughout their generations, for an everlasting covenant, to be God to you and to your offspring after you. [17:8] And I will give to you, and to your offspring after you, the land where you are now an alien, all the land of Canaan, for a perpetual holding; and I will be their God." [17:9] God said to Abraham, "As for you, you shall keep my covenant, you and your offspring after you throughout their generations. [17:10] This is my covenant, which you shall keep, between me and you and your - and his mother prepared savory food, such as his father loved. [27:15] Then Rebekah took the best garments of her elder son Esau, which were with her in the house, and put them on her younger son Jacob; [27:16] and she put the skins of the kids on his hands and on the smooth part of his neck. [27:17] Then she handed the savory food, and the bread that she had prepared, to her son Jacob. [27:18] So he went in to his father, and said, "My father"; and he said, "Here I am; who are you, my son?" [27:19] Jacob said to his father, "I am Esau your firstborn. I have done as you told me; now sit up and eat of my game, so that you may bless me." [27:20] But Isaac said to his son, "How is it that you have found it so quickly, my son?" He answered, - you for a burying place, so that I may bury my dead out of my sight." [23:5] The Hittites answered Abraham, [23:6] "Hear us, my lord; you are a mighty prince among us. Bury your dead in the choicest of our burial places; none of us will withhold from you any burial ground for burying your dead." [23:7] Abraham rose and bowed to the Hittites, the people of the land. [23:8] He said to them, "If you are willing that I should bury my dead out of my sight, hear me, and entreat for me Ephron son of Zohar, [23:9] so that he may give me the cave of Machpelah, which he owns; it is at the end of his field. For the full price let him give it to me in your presence as a possession for a burying place." [23:10] Now Ephron was sitting among the - source_sentence: At what age did Enosh become the father of Kenan? sentences: - of Egypt to the great river, the river Euphrates, [15:19] the land of the Kenites, the Kenizzites, the Kadmonites, [15:20] the Hittites, the Perizzites, the Rephaim, [15:21] the Amorites, the Canaanites, the Girgashites, and the Jebusites.". Chapter 16 [16:1] Now Sarai, Abram's wife, bore him no children. She had an Egyptian slave-girl whose name was Hagar, [16:2] and Sarai said to Abram, "You see that the LORD has prevented me from bearing children; go in to my slave-girl; it may be that I shall obtain children by her." And Abram listened to the voice of Sarai. [16:3] So, after Abram had lived ten years in the land of Canaan, Sarai, Abram's wife, took Hagar the Egyptian, her slave-girl, and gave her to her husband Abram as a wife. [16:4] - to his image, and named him Seth. [5:4] The days of Adam after he became the father of Seth were eight hundred years; and he had other sons and daughters. [5:5] Thus all the days that Adam lived were nine hundred thirty years; and he died. [5:6] When Seth had lived one hundred five years, he became the father of Enosh. [5:7] Seth lived after the birth of Enosh eight hundred seven years, and had other sons and daughters. [5:8] Thus all the days of Seth were nine hundred twelve years; and he died. [5:9] When Enosh had lived ninety years, he became the father of Kenan. [5:10] Enosh lived after the birth of Kenan eight hundred fifteen years, and had other sons and daughters. [5:11] Thus all the days of Enosh were nine hundred five years; and - said, "Come, let us build ourselves a city, and a tower with its top in the heavens, and let us make a name for ourselves; otherwise we shall be scattered abroad upon the face of the whole earth." [11:5] The LORD came down to see the city and the tower, which mortals had built. [11:6] And the LORD said, "Look, they are one people, and they have all one language; and this is only the beginning of what they will do; nothing that they propose to do will now be impossible for them. [11:7] Come, let us go down, and confuse their language there, so that they will not understand one another's speech." [11:8] So the LORD scattered them abroad from there over the face of all the earth, and they left off building the city. [11:9] Therefore it was - source_sentence: How did the angels assist Lot and his family in escaping the city? sentences: - has become great before the LORD, and the LORD has sent us to destroy it." [19:14] So Lot went out and said to his sons-in-law, who were to marry his daughters, "Up, get out of this place; for the LORD is about to destroy the city." But he seemed to his sons-in-law to be jesting. [19:15] When morning dawned, the angels urged Lot, saying, "Get up, take your wife and your two daughters who are here, or else you will be consumed in the punishment of the city." [19:16] But he lingered; so the men seized him and his wife and his two daughters by the hand, the LORD being merciful to him, and they brought him out and left him outside the city. [19:17] When they had brought them outside, they said, "Flee for your life; do not look back or stop - five years; and he died. [5:12] When Kenan had lived seventy years, he became the father of Mahalalel. [5:13] Kenan lived after the birth of Mahalalel eight hundred and forty years, and had other sons and daughters. [5:14] Thus all the days of Kenan were nine hundred and ten years; and he died. [5:15] When Mahalalel had lived sixty-five years, he became the father of Jared. [5:16] Mahalalel lived after the birth of Jared eight hundred thirty years, and had other sons and daughters. [5:17] Thus all the days of Mahalalel were eight hundred ninety-five years; and he died. [5:18] When Jared had lived one hundred sixty-two years he became the father of Enoch. [5:19] Jared lived after the birth of Enoch eight hundred years, and had other sons - go with this man?" She said, "I will." [24:59] So they sent away their sister Rebekah and her nurse along with Abraham's servant and his men. [24:60] And they blessed Rebekah and said to her, "May you, our sister, become thousands of myriads; may your offspring gain possession of the gates of their foes." [24:61] Then Rebekah and her maids rose up, mounted the camels, and followed the man; thus the servant took Rebekah, and went his way. [24:62] Now Isaac had come from Beer-lahai-roi, and was settled in the Negeb. [24:63] Isaac went out in the evening to walk in the field; and looking up, he saw camels coming. [24:64] And Rebekah looked up, and when she saw Isaac, she slipped quickly from the camel, [24:65] and said to the servant, "Who is - source_sentence: What did Abraham serve to the visitors while they ate under the tree? sentences: - '[21:34] And Abraham resided as an alien many days in the land of the Philistines. Chapter 22 [22:1] After these things God tested Abraham. He said to him, "Abraham!" And he said, "Here I am." [22:2] He said, "Take your son, your only son Isaac, whom you love, and go to the land of Moriah, and offer him there as a burnt offering on one of the mountains that I shall show you." [22:3] So Abraham rose early in the morning, saddled his donkey, and took two of his young men with him, and his son Isaac; he cut the wood for the burnt offering, and set out and went to the place in the distance that God had shown him. [22:4] On the third day Abraham looked up and saw the place far away. [22:5] Then Abraham said to his young men, "Stay here with the' - tree. [18:5] Let me bring a little bread, that you may refresh yourselves, and after that you may pass on - since you have come to your servant." So they said, "Do as you have said." [18:6] And Abraham hastened into the tent to Sarah, and said, "Make ready quickly three measures of choice flour, knead it, and make cakes. " [18:7] Abraham ran to the herd, and took a calf, tender and good, and gave it to the servant, who hastened to prepare it. [18:8] Then he took curds and milk and the calf that he had prepared, and set it before them; and he stood by them under the tree while they ate. [18:9] They said to him, "Where is your wife Sarah?" And he said, "There, in the tent." [18:10] Then one said, "I will surely return to you in due season, - '[30:24] and she named him Joseph, saying, "May the LORD add to me another son!" [30:25] When Rachel had borne Joseph, Jacob said to Laban, "Send me away, that I may go to my own home and country. [30:26] Give me my wives and my children for whom I have served you, and let me go; for you know very well the service I have given you." [30:27] But Laban said to him, "If you will allow me to say so, I have learned by divination that the LORD has blessed me because of you; [30:28] name your wages, and I will give it." [30:29] Jacob said to him, "You yourself know how I have served you, and how your cattle have fared with me. [30:30] For you had little before I came, and it has increased abundantly; and the LORD has blessed you wherever I turned.' model-index: - name: SentenceTransformer based on Snowflake/snowflake-arctic-embed-l results: - task: type: information-retrieval name: Information Retrieval dataset: name: Unknown type: unknown metrics: - type: cosine_accuracy@1 value: 0.75 name: Cosine Accuracy@1 - type: cosine_accuracy@3 value: 0.9375 name: Cosine Accuracy@3 - type: cosine_accuracy@5 value: 0.975 name: Cosine Accuracy@5 - type: cosine_accuracy@10 value: 0.9875 name: Cosine Accuracy@10 - type: cosine_precision@1 value: 0.75 name: Cosine Precision@1 - type: cosine_precision@3 value: 0.3125 name: Cosine Precision@3 - type: cosine_precision@5 value: 0.19499999999999998 name: Cosine Precision@5 - type: cosine_precision@10 value: 0.09874999999999998 name: Cosine Precision@10 - type: cosine_recall@1 value: 0.75 name: Cosine Recall@1 - type: cosine_recall@3 value: 0.9375 name: Cosine Recall@3 - type: cosine_recall@5 value: 0.975 name: Cosine Recall@5 - type: cosine_recall@10 value: 0.9875 name: Cosine Recall@10 - type: cosine_ndcg@10 value: 0.8820698787104944 name: Cosine Ndcg@10 - type: cosine_mrr@10 value: 0.8465773809523809 name: Cosine Mrr@10 - type: cosine_map@100 value: 0.8472718253968254 name: Cosine Map@100 --- # SentenceTransformer based on Snowflake/snowflake-arctic-embed-l This is a [sentence-transformers](https://www.SBERT.net) model finetuned from [Snowflake/snowflake-arctic-embed-l](https://huggingface.co/Snowflake/snowflake-arctic-embed-l). It maps sentences & paragraphs to a 1024-dimensional dense vector space and can be used for semantic textual similarity, semantic search, paraphrase mining, text classification, clustering, and more. ## Model Details ### Model Description - **Model Type:** Sentence Transformer - **Base model:** [Snowflake/snowflake-arctic-embed-l](https://huggingface.co/Snowflake/snowflake-arctic-embed-l) <!-- at revision d8fb21ca8d905d2832ee8b96c894d3298964346b --> - **Maximum Sequence Length:** 512 tokens - **Output Dimensionality:** 1024 dimensions - **Similarity Function:** Cosine Similarity <!-- - **Training Dataset:** Unknown --> <!-- - **Language:** Unknown --> <!-- - **License:** Unknown --> ### Model Sources - **Documentation:** [Sentence Transformers Documentation](https://sbert.net) - **Repository:** [Sentence Transformers on GitHub](https://github.com/UKPLab/sentence-transformers) - **Hugging Face:** [Sentence Transformers on Hugging Face](https://huggingface.co/models?library=sentence-transformers) ### Full Model Architecture ``` SentenceTransformer( (0): Transformer({'max_seq_length': 512, 'do_lower_case': False}) with Transformer model: BertModel (1): Pooling({'word_embedding_dimension': 1024, 'pooling_mode_cls_token': True, 'pooling_mode_mean_tokens': False, 'pooling_mode_max_tokens': False, 'pooling_mode_mean_sqrt_len_tokens': False, 'pooling_mode_weightedmean_tokens': False, 'pooling_mode_lasttoken': False, 'include_prompt': True}) (2): Normalize() ) ``` ## Usage ### Direct Usage (Sentence Transformers) First install the Sentence Transformers library: ```bash pip install -U sentence-transformers ``` Then you can load this model and run inference. ```python from sentence_transformers import SentenceTransformer # Download from the 🤗 Hub model = SentenceTransformer("kcheng0816/finetuned_arctic_genesis") # Run inference sentences = [ 'What did Abraham serve to the visitors while they ate under the tree?', 'tree. [18:5] Let me bring a little bread, that you may refresh yourselves, and after that you may pass on - since you have come to your servant." So they said, "Do as you have said." [18:6] And Abraham hastened into the tent to Sarah, and said, "Make ready quickly three measures of choice flour, knead it, and make cakes. " [18:7] Abraham ran to the herd, and took a calf, tender and good, and gave it to the servant, who hastened to prepare it. [18:8] Then he took curds and milk and the calf that he had prepared, and set it before them; and he stood by them under the tree while they ate. [18:9] They said to him, "Where is your wife Sarah?" And he said, "There, in the tent." [18:10] Then one said, "I will surely return to you in due season,', '[21:34] And Abraham resided as an alien many days in the land of the Philistines. Chapter 22 [22:1] After these things God tested Abraham. He said to him, "Abraham!" And he said, "Here I am." [22:2] He said, "Take your son, your only son Isaac, whom you love, and go to the land of Moriah, and offer him there as a burnt offering on one of the mountains that I shall show you." [22:3] So Abraham rose early in the morning, saddled his donkey, and took two of his young men with him, and his son Isaac; he cut the wood for the burnt offering, and set out and went to the place in the distance that God had shown him. [22:4] On the third day Abraham looked up and saw the place far away. [22:5] Then Abraham said to his young men, "Stay here with the', ] embeddings = model.encode(sentences) print(embeddings.shape) # [3, 1024] # Get the similarity scores for the embeddings similarities = model.similarity(embeddings, embeddings) print(similarities.shape) # [3, 3] ``` <!-- ### Direct Usage (Transformers) <details><summary>Click to see the direct usage in Transformers</summary> </details> --> <!-- ### Downstream Usage (Sentence Transformers) You can finetune this model on your own dataset. <details><summary>Click to expand</summary> </details> --> <!-- ### Out-of-Scope Use *List how the model may foreseeably be misused and address what users ought not to do with the model.* --> ## Evaluation ### Metrics #### Information Retrieval * Evaluated with [<code>InformationRetrievalEvaluator</code>](https://sbert.net/docs/package_reference/sentence_transformer/evaluation.html#sentence_transformers.evaluation.InformationRetrievalEvaluator) | Metric | Value | |:--------------------|:-----------| | cosine_accuracy@1 | 0.75 | | cosine_accuracy@3 | 0.9375 | | cosine_accuracy@5 | 0.975 | | cosine_accuracy@10 | 0.9875 | | cosine_precision@1 | 0.75 | | cosine_precision@3 | 0.3125 | | cosine_precision@5 | 0.195 | | cosine_precision@10 | 0.0987 | | cosine_recall@1 | 0.75 | | cosine_recall@3 | 0.9375 | | cosine_recall@5 | 0.975 | | cosine_recall@10 | 0.9875 | | **cosine_ndcg@10** | **0.8821** | | cosine_mrr@10 | 0.8466 | | cosine_map@100 | 0.8473 | <!-- ## Bias, Risks and Limitations *What are the known or foreseeable issues stemming from this model? You could also flag here known failure cases or weaknesses of the model.* --> <!-- ### Recommendations *What are recommendations with respect to the foreseeable issues? For example, filtering explicit content.* --> ## Training Details ### Training Dataset #### Unnamed Dataset * Size: 410 training samples * Columns: <code>sentence_0</code> and <code>sentence_1</code> * Approximate statistics based on the first 410 samples: | | sentence_0 | sentence_1 | |:--------|:-----------------------------------------------------------------------------------|:------------------------------------------------------------------------------------| | type | string | string | | details | <ul><li>min: 10 tokens</li><li>mean: 17.63 tokens</li><li>max: 31 tokens</li></ul> | <ul><li>min: 6 tokens</li><li>mean: 206.17 tokens</li><li>max: 257 tokens</li></ul> | * Samples: | sentence_0 | sentence_1 | |:------------------------------------------------------------------------------------|:----------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------| | <code>What are the main themes explored in the Book of Genesis?</code> | <code>The Book of Genesis</code> | | <code>How does the Book of Genesis describe the creation of the world?</code> | <code>The Book of Genesis</code> | | <code>What did God create in the beginning according to the Book of Genesis?</code> | <code>THE BOOK OF GENESIS 1, 2, 3, 4, 5, 6, 7, 8, 9, 10, 11, 12, 13, 14, 15, 16, 17, 18, 19, 20, 21, 22, 23, 24, 25, 26, 27, 28, 29, 30, 31, 32, 33, 34, 35, 36, 37, 38, 39, 40, 41, 42, 43, 44, 45, 46, 47, 48, 49, 50  Chapter 1 [1:1] In the beginning when God created the heavens and the earth, [1:2] the earth was a formless void and darkness covered the face of the deep, while a wind from God swept over the face of the waters. [1:3] Then God said, "Let there be light"; and there was light. [1:4] And God saw that the light was good; and God separated the light from the darkness. [1:5] God called the light Day, and the darkness he called Night. And there was evening and there was morning, the first day. [1:6] And God said, "Let there be</code> | * Loss: [<code>MatryoshkaLoss</code>](https://sbert.net/docs/package_reference/sentence_transformer/losses.html#matryoshkaloss) with these parameters: ```json { "loss": "MultipleNegativesRankingLoss", "matryoshka_dims": [ 768, 512, 256, 128, 64 ], "matryoshka_weights": [ 1, 1, 1, 1, 1 ], "n_dims_per_step": -1 } ``` ### Training Hyperparameters #### Non-Default Hyperparameters - `eval_strategy`: steps - `per_device_train_batch_size`: 10 - `per_device_eval_batch_size`: 10 - `num_train_epochs`: 10 - `multi_dataset_batch_sampler`: round_robin #### All Hyperparameters <details><summary>Click to expand</summary> - `overwrite_output_dir`: False - `do_predict`: False - `eval_strategy`: steps - `prediction_loss_only`: True - `per_device_train_batch_size`: 10 - `per_device_eval_batch_size`: 10 - `per_gpu_train_batch_size`: None - `per_gpu_eval_batch_size`: None - `gradient_accumulation_steps`: 1 - `eval_accumulation_steps`: None - `torch_empty_cache_steps`: None - `learning_rate`: 5e-05 - `weight_decay`: 0.0 - `adam_beta1`: 0.9 - `adam_beta2`: 0.999 - `adam_epsilon`: 1e-08 - `max_grad_norm`: 1 - `num_train_epochs`: 10 - `max_steps`: -1 - `lr_scheduler_type`: linear - `lr_scheduler_kwargs`: {} - `warmup_ratio`: 0.0 - `warmup_steps`: 0 - `log_level`: passive - `log_level_replica`: warning - `log_on_each_node`: True - `logging_nan_inf_filter`: True - `save_safetensors`: True - `save_on_each_node`: False - `save_only_model`: False - `restore_callback_states_from_checkpoint`: False - `no_cuda`: False - `use_cpu`: False - `use_mps_device`: False - `seed`: 42 - `data_seed`: None - `jit_mode_eval`: False - `use_ipex`: False - `bf16`: False - `fp16`: False - `fp16_opt_level`: O1 - `half_precision_backend`: auto - `bf16_full_eval`: False - `fp16_full_eval`: False - `tf32`: None - `local_rank`: 0 - `ddp_backend`: None - `tpu_num_cores`: None - `tpu_metrics_debug`: False - `debug`: [] - `dataloader_drop_last`: False - `dataloader_num_workers`: 0 - `dataloader_prefetch_factor`: None - `past_index`: -1 - `disable_tqdm`: False - `remove_unused_columns`: True - `label_names`: None - `load_best_model_at_end`: False - `ignore_data_skip`: False - `fsdp`: [] - `fsdp_min_num_params`: 0 - `fsdp_config`: {'min_num_params': 0, 'xla': False, 'xla_fsdp_v2': False, 'xla_fsdp_grad_ckpt': False} - `fsdp_transformer_layer_cls_to_wrap`: None - `accelerator_config`: {'split_batches': False, 'dispatch_batches': None, 'even_batches': True, 'use_seedable_sampler': True, 'non_blocking': False, 'gradient_accumulation_kwargs': None} - `deepspeed`: None - `label_smoothing_factor`: 0.0 - `optim`: adamw_torch - `optim_args`: None - `adafactor`: False - `group_by_length`: False - `length_column_name`: length - `ddp_find_unused_parameters`: None - `ddp_bucket_cap_mb`: None - `ddp_broadcast_buffers`: False - `dataloader_pin_memory`: True - `dataloader_persistent_workers`: False - `skip_memory_metrics`: True - `use_legacy_prediction_loop`: False - `push_to_hub`: False - `resume_from_checkpoint`: None - `hub_model_id`: None - `hub_strategy`: every_save - `hub_private_repo`: None - `hub_always_push`: False - `gradient_checkpointing`: False - `gradient_checkpointing_kwargs`: None - `include_inputs_for_metrics`: False - `include_for_metrics`: [] - `eval_do_concat_batches`: True - `fp16_backend`: auto - `push_to_hub_model_id`: None - `push_to_hub_organization`: None - `mp_parameters`: - `auto_find_batch_size`: False - `full_determinism`: False - `torchdynamo`: None - `ray_scope`: last - `ddp_timeout`: 1800 - `torch_compile`: False - `torch_compile_backend`: None - `torch_compile_mode`: None - `dispatch_batches`: None - `split_batches`: None - `include_tokens_per_second`: False - `include_num_input_tokens_seen`: False - `neftune_noise_alpha`: None - `optim_target_modules`: None - `batch_eval_metrics`: False - `eval_on_start`: False - `use_liger_kernel`: False - `eval_use_gather_object`: False - `average_tokens_across_devices`: False - `prompts`: None - `batch_sampler`: batch_sampler - `multi_dataset_batch_sampler`: round_robin </details> ### Training Logs | Epoch | Step | cosine_ndcg@10 | |:------:|:----:|:--------------:| | 1.0 | 41 | 0.8988 | | 1.2195 | 50 | 0.8824 | | 2.0 | 82 | 0.8775 | | 2.4390 | 100 | 0.8808 | | 3.0 | 123 | 0.8673 | | 3.6585 | 150 | 0.8634 | | 4.0 | 164 | 0.8735 | | 4.8780 | 200 | 0.8730 | | 5.0 | 205 | 0.8713 | | 6.0 | 246 | 0.8719 | | 6.0976 | 250 | 0.8765 | | 7.0 | 287 | 0.8848 | | 7.3171 | 300 | 0.8783 | | 8.0 | 328 | 0.8892 | | 8.5366 | 350 | 0.8881 | | 9.0 | 369 | 0.8821 | | 9.7561 | 400 | 0.8821 | | 10.0 | 410 | 0.8821 | ### Framework Versions - Python: 3.11.11 - Sentence Transformers: 3.4.1 - Transformers: 4.49.0 - PyTorch: 2.6.0 - Accelerate: 1.3.0 - Datasets: 3.3.2 - Tokenizers: 0.21.0 ## Citation ### BibTeX #### Sentence Transformers ```bibtex @inproceedings{reimers-2019-sentence-bert, title = "Sentence-BERT: Sentence Embeddings using Siamese BERT-Networks", author = "Reimers, Nils and Gurevych, Iryna", booktitle = "Proceedings of the 2019 Conference on Empirical Methods in Natural Language Processing", month = "11", year = "2019", publisher = "Association for Computational Linguistics", url = "https://arxiv.org/abs/1908.10084", } ``` #### MatryoshkaLoss ```bibtex @misc{kusupati2024matryoshka, title={Matryoshka Representation Learning}, author={Aditya Kusupati and Gantavya Bhatt and Aniket Rege and Matthew Wallingford and Aditya Sinha and Vivek Ramanujan and William Howard-Snyder and Kaifeng Chen and Sham Kakade and Prateek Jain and Ali Farhadi}, year={2024}, eprint={2205.13147}, archivePrefix={arXiv}, primaryClass={cs.LG} } ``` #### MultipleNegativesRankingLoss ```bibtex @misc{henderson2017efficient, title={Efficient Natural Language Response Suggestion for Smart Reply}, author={Matthew Henderson and Rami Al-Rfou and Brian Strope and Yun-hsuan Sung and Laszlo Lukacs and Ruiqi Guo and Sanjiv Kumar and Balint Miklos and Ray Kurzweil}, year={2017}, eprint={1705.00652}, archivePrefix={arXiv}, primaryClass={cs.CL} } ``` <!-- ## Glossary *Clearly define terms in order to be accessible across audiences.* --> <!-- ## Model Card Authors *Lists the people who create the model card, providing recognition and accountability for the detailed work that goes into its construction.* --> <!-- ## Model Card Contact *Provides a way for people who have updates to the Model Card, suggestions, or questions, to contact the Model Card authors.* -->
[ "TEXT_CLASSIFICATION" ]
[ "BEAR" ]
sschet/bert-base-uncased_clinical-ner
sschet
token-classification
[ "transformers", "pytorch", "tf", "jax", "bert", "token-classification", "dataset:tner/bc5cdr", "dataset:commanderstrife/jnlpba", "dataset:bc2gm_corpus", "dataset:drAbreu/bc4chemd_ner", "dataset:linnaeus", "dataset:chintagunta85/ncbi_disease", "autotrain_compatible", "endpoints_compatible", "region:us" ]
2023-01-26T16:09:31
2023-02-01T03:39:48
124
5
--- datasets: - tner/bc5cdr - commanderstrife/jnlpba - bc2gm_corpus - drAbreu/bc4chemd_ner - linnaeus - chintagunta85/ncbi_disease --- A Named Entity Recognition model for clinical entities (`problem`, `treatment`, `test`) The model has been trained on the [i2b2 (now n2c2) dataset](https://n2c2.dbmi.hms.harvard.edu) for the 2010 - Relations task. Please visit the n2c2 site to request access to the dataset.
[ "NAMED_ENTITY_RECOGNITION" ]
[ "BC5CDR", "JNLPBA", "LINNAEUS", "NCBI DISEASE" ]
seiya/oubiobert-base-uncased
seiya
null
[ "transformers", "pytorch", "jax", "bert", "pretraining", "exbert", "arxiv:2005.07202", "license:apache-2.0", "endpoints_compatible", "region:us" ]
2022-03-02T23:29:05
2021-05-20T05:10:40
123
3
--- license: apache-2.0 tags: - exbert --- # ouBioBERT-Base, Uncased Bidirectional Encoder Representations from Transformers for Biomedical Text Mining by Osaka University (ouBioBERT) is a language model based on the BERT-Base (Devlin, et al., 2019) architecture. We pre-trained ouBioBERT on PubMed abstracts from the PubMed baseline (ftp://ftp.ncbi.nlm.nih.gov/pubmed/baseline) via our method. The details of the pre-training procedure can be found in Wada, et al. (2020). ## Evaluation We evaluated the performance of ouBioBERT in terms of the biomedical language understanding evaluation (BLUE) benchmark (Peng, et al., 2019). The numbers are mean (standard deviation) on five different random seeds. | Dataset | Task Type | Score | |:----------------|:-----------------------------|-------------:| | MedSTS | Sentence similarity | 84.9 (0.6) | | BIOSSES | Sentence similarity | 92.3 (0.8) | | BC5CDR-disease | Named-entity recognition | 87.4 (0.1) | | BC5CDR-chemical | Named-entity recognition | 93.7 (0.2) | | ShARe/CLEFE | Named-entity recognition | 80.1 (0.4) | | DDI | Relation extraction | 81.1 (1.5) | | ChemProt | Relation extraction | 75.0 (0.3) | | i2b2 2010 | Relation extraction | 74.0 (0.8) | | HoC | Document classification | 86.4 (0.5) | | MedNLI | Inference | 83.6 (0.7) | | **Total** | Macro average of the scores |**83.8 (0.3)**| ## Code for Fine-tuning We made the source code for fine-tuning freely available at [our repository](https://github.com/sy-wada/blue_benchmark_with_transformers). ## Citation If you use our work in your research, please kindly cite the following paper: ```bibtex @misc{2005.07202, Author = {Shoya Wada and Toshihiro Takeda and Shiro Manabe and Shozo Konishi and Jun Kamohara and Yasushi Matsumura}, Title = {A pre-training technique to localize medical BERT and enhance BioBERT}, Year = {2020}, Eprint = {arXiv:2005.07202}, } ``` <a href="https://huggingface.co/exbert/?model=seiya/oubiobert-base-uncased&sentence=Coronavirus%20disease%20(COVID-19)%20is%20caused%20by%20SARS-COV2%20and%20represents%20the%20causative%20agent%20of%20a%20potentially%20fatal%20disease%20that%20is%20of%20great%20global%20public%20health%20concern."> <img width="300px" src="https://cdn-media.huggingface.co/exbert/button.png"> </a>
[ "RELATION_EXTRACTION" ]
[ "BC5CDR", "BIOSSES", "CHEMPROT", "MEDNLI" ]
StivenLancheros/roberta-base-biomedical-clinical-es-finetuned-ner-CRAFT_Augmented_EN
StivenLancheros
token-classification
[ "transformers", "pytorch", "tensorboard", "roberta", "token-classification", "generated_from_trainer", "license:apache-2.0", "autotrain_compatible", "endpoints_compatible", "region:us" ]
2022-03-21T20:11:24
2022-03-21T22:07:55
123
1
--- license: apache-2.0 metrics: - precision - recall - f1 - accuracy tags: - generated_from_trainer model-index: - name: roberta-base-biomedical-clinical-es-finetuned-ner-CRAFT_Augmented_EN results: [] --- <!-- This model card has been generated automatically according to the information the Trainer had access to. You should probably proofread and complete it, then remove this comment. --> # roberta-base-biomedical-clinical-es-finetuned-ner-CRAFT_Augmented_EN This model is a fine-tuned version of [PlanTL-GOB-ES/roberta-base-biomedical-clinical-es](https://huggingface.co/PlanTL-GOB-ES/roberta-base-biomedical-clinical-es) on the CRAFT dataset. It achieves the following results on the evaluation set: - Loss: 0.2276 - Precision: 0.8078 - Recall: 0.8258 - F1: 0.8167 - Accuracy: 0.9629 ## Model description This model performs Named Entity Recognition for 6 entity tags: Sequence, Cell, Protein, Gene, Taxon, and Chemical from the CRAFT(Colorado Richly Annotated Full Text) Corpus in English. Entity tags have been normalized and replaced from the original three letter code to a full name e.g. B-Protein, I-Chemical. This model is trained on augmented data created using Entity Replacement. 20% of the entities were replaced using a list of entities for each entity tag obtained from the official ontologies for each entity class. Both datasets (original, augmented) were concatenated. ## Intended uses & limitations More information needed ## Training and evaluation data More information needed ## Training procedure ### Training hyperparameters The following hyperparameters were used during training: - learning_rate: 3e-05 - train_batch_size: 8 - eval_batch_size: 8 - seed: 42 - optimizer: Adam with betas=(0.9,0.999) and epsilon=1e-08 - lr_scheduler_type: linear - num_epochs: 4 ### Training results | Training Loss | Epoch | Step | Validation Loss | Precision | Recall | F1 | Accuracy | |:-------------:|:-----:|:-----:|:---------------:|:---------:|:------:|:------:|:--------:| | 0.0842 | 1.0 | 2719 | 0.1765 | 0.7606 | 0.7785 | 0.7695 | 0.9542 | | 0.0392 | 2.0 | 5438 | 0.1971 | 0.7990 | 0.7958 | 0.7974 | 0.9596 | | 0.0138 | 3.0 | 8157 | 0.2094 | 0.8013 | 0.8196 | 0.8103 | 0.9620 | | 0.0082 | 4.0 | 10876 | 0.2276 | 0.8078 | 0.8258 | 0.8167 | 0.9629 | ### Framework versions - Transformers 4.17.0 - Pytorch 1.10.0+cu111 - Datasets 2.0.0 - Tokenizers 0.11.6
[ "NAMED_ENTITY_RECOGNITION" ]
[ "CRAFT" ]
StivenLancheros/biobert-base-cased-v1.2-finetuned-ner-CRAFT_Augmented_EN
StivenLancheros
token-classification
[ "transformers", "pytorch", "tensorboard", "bert", "token-classification", "generated_from_trainer", "autotrain_compatible", "endpoints_compatible", "region:us" ]
2022-03-15T22:41:38
2022-03-17T14:45:49
122
1
--- metrics: - precision - recall - f1 - accuracy tags: - generated_from_trainer model-index: - name: biobert-base-cased-v1.2-finetuned-ner-CRAFT_Augmented_EN results: [] --- <!-- This model card has been generated automatically according to the information the Trainer had access to. You should probably proofread and complete it, then remove this comment. --> # biobert-base-cased-v1.2-finetuned-ner-CRAFT_Augmented_EN This model is a fine-tuned version of [dmis-lab/biobert-base-cased-v1.2](https://huggingface.co/dmis-lab/biobert-base-cased-v1.2) on the CRAFT dataset. It achieves the following results on the evaluation set: - Loss: 0.2299 - Precision: 0.8122 - Recall: 0.8475 - F1: 0.8294 - Accuracy: 0.9661 ## Model description This model performs Named Entity Recognition for 6 entity tags: Sequence, Cell, Protein, Gene, Taxon, and Chemical from the CRAFT(Colorado Richly Annotated Full Text) Corpus in Spanish and English. Entity tags have been normalized and replaced from the original three letter code to a full name e.g. B-Protein, I-Chemical. This model is trained on augmented data created using Entity Replacement. 20% of the entities were replaced using a list of entities for each entity tag obtained from the official ontologies for each entity class. Both datasets (original, augmented) were concatenated. ## Intended uses & limitations More information needed ## Training and evaluation data More information needed ## Training procedure ### Training hyperparameters The following hyperparameters were used during training: - learning_rate: 3e-05 - train_batch_size: 8 - eval_batch_size: 8 - seed: 42 - optimizer: Adam with betas=(0.9,0.999) and epsilon=1e-08 - lr_scheduler_type: linear - num_epochs: 4 ### Training results | Training Loss | Epoch | Step | Validation Loss | Precision | Recall | F1 | Accuracy | |:-------------:|:-----:|:-----:|:---------------:|:---------:|:------:|:------:|:--------:| | 0.0542 | 1.0 | 2719 | 0.1540 | 0.7834 | 0.8300 | 0.8060 | 0.9622 | | 0.0229 | 2.0 | 5438 | 0.1920 | 0.8092 | 0.8219 | 0.8155 | 0.9644 | | 0.0069 | 3.0 | 8157 | 0.2054 | 0.8130 | 0.8481 | 0.8302 | 0.9656 | | 0.0023 | 4.0 | 10876 | 0.2299 | 0.8122 | 0.8475 | 0.8294 | 0.9661 | ### Framework versions - Transformers 4.17.0 - Pytorch 1.10.0+cu111 - Datasets 2.0.0 - Tokenizers 0.11.6
[ "NAMED_ENTITY_RECOGNITION" ]
[ "CRAFT" ]
knowledgator/gliner-bi-small-v1.0
knowledgator
token-classification
[ "gliner", "pytorch", "NER", "GLiNER", "information extraction", "encoder", "entity recognition", "token-classification", "multilingual", "dataset:urchade/pile-mistral-v0.1", "dataset:numind/NuNER", "dataset:knowledgator/GLINER-multi-task-synthetic-data", "license:apache-2.0", "region:us" ]
2024-08-18T06:56:31
2024-08-25T11:38:26
122
10
--- datasets: - urchade/pile-mistral-v0.1 - numind/NuNER - knowledgator/GLINER-multi-task-synthetic-data language: - multilingual library_name: gliner license: apache-2.0 pipeline_tag: token-classification tags: - NER - GLiNER - information extraction - encoder - entity recognition --- # About GLiNER is a Named Entity Recognition (NER) model capable of identifying any entity type using a bidirectional transformer encoders (BERT-like). It provides a practical alternative to traditional NER models, which are limited to predefined entities, and Large Language Models (LLMs) that, despite their flexibility, are costly and large for resource-constrained scenarios. This particular version utilize bi-encoder architecture, where textual encoder is [DeBERTa v3 small](microsoft/deberta-v3-small) and entity label encoder is sentence transformer - [MiniLM-L6-v2](https://huggingface.co/sentence-transformers/all-MiniLM-L6-v2). Such architecture brings several advantages over uni-encoder GLiNER: * An unlimited amount of entities can be recognized at a single time; * Faster inference if entity embeddings are preprocessed; * Better generalization to unseen entities; However, it has some drawbacks such as a lack of inter-label interactions that make it hard for the model to disambiguate semantically similar but contextually different entities. ### Installation & Usage Install or update the gliner package: ```bash pip install gliner -U ``` Once you've downloaded the GLiNER library, you can import the GLiNER class. You can then load this model using `GLiNER.from_pretrained` and predict entities with `predict_entities`. ```python from gliner import GLiNER model = GLiNER.from_pretrained("knowledgator/gliner-bi-small-v1.0") text = """ Cristiano Ronaldo dos Santos Aveiro (Portuguese pronunciation: [kɾiʃˈtjɐnu ʁɔˈnaldu]; born 5 February 1985) is a Portuguese professional footballer who plays as a forward for and captains both Saudi Pro League club Al Nassr and the Portugal national team. Widely regarded as one of the greatest players of all time, Ronaldo has won five Ballon d'Or awards,[note 3] a record three UEFA Men's Player of the Year Awards, and four European Golden Shoes, the most by a European player. He has won 33 trophies in his career, including seven league titles, five UEFA Champions Leagues, the UEFA European Championship and the UEFA Nations League. Ronaldo holds the records for most appearances (183), goals (140) and assists (42) in the Champions League, goals in the European Championship (14), international goals (128) and international appearances (205). He is one of the few players to have made over 1,200 professional career appearances, the most by an outfield player, and has scored over 850 official senior career goals for club and country, making him the top goalscorer of all time. """ labels = ["person", "award", "date", "competitions", "teams"] entities = model.predict_entities(text, labels, threshold=0.3) for entity in entities: print(entity["text"], "=>", entity["label"]) ``` ``` Cristiano Ronaldo dos Santos Aveiro => person 5 February 1985 => date Al Nassr => teams Portugal national team => teams Ballon d'Or => award UEFA Men's Player of the Year Awards => award European Golden Shoes => award UEFA Champions Leagues => competitions UEFA European Championship => competitions UEFA Nations League => competitions Champions League => competitions European Championship => competitions ``` If you have a large amount of entities and want to pre-embed them, please, refer to the following code snippet: ```python labels = ["your entities"] texts = ["your texts"] entity_embeddings = model.encode_labels(labels, batch_size = 8) outputs = model.batch_predict_with_embeds(texts, entity_embeddings, labels) ``` ### Benchmarks Below you can see the table with benchmarking results on various named entity recognition datasets: | Dataset | Score | |-----------------------|--------------| | ACE 2004 | 26.74% | | ACE 2005 | 29.86% | | AnatEM | 40.98% | | Broad Tweet Corpus | 64.60% | | CoNLL 2003 | 61.68% | | FabNER | 23.39% | | FindVehicle | 24.38% | | GENIA_NER | 48.51% | | HarveyNER | 11.06% | | MultiNERD | 63.14% | | Ontonotes | 27.29% | | PolyglotNER | 45.30% | | TweetNER7 | 37.81% | | WikiANN en | 54.08% | | WikiNeural | 72.98% | | bc2gm | 53.32% | | bc4chemd | 45.67% | | bc5cdr | 69.03% | | ncbi | 64.15% | | **Average** | **45.5%** | ||| | CrossNER_AI | 49.45% | | CrossNER_literature | 61.16% | | CrossNER_music | 65.39% | | CrossNER_politics | 72.10% | | CrossNER_science | 60.71% | | mit-movie | 34.41% | | mit-restaurant | 38.77% | | **Average (zero-shot benchmark)** | **54.6%** | ### Join Our Discord Connect with our community on Discord for news, support, and discussion about our models. Join [Discord](https://discord.gg/dkyeAgs9DG).
[ "NAMED_ENTITY_RECOGNITION" ]
[ "ANATEM", "BC5CDR" ]
RichardErkhov/EleutherAI_-_pythia-1.4b-gguf
RichardErkhov
null
[ "gguf", "arxiv:2304.01373", "arxiv:2101.00027", "arxiv:2201.07311", "endpoints_compatible", "region:us" ]
2024-11-01T16:18:29
2024-11-01T16:37:40
121
0
--- {} --- Quantization made by Richard Erkhov. [Github](https://github.com/RichardErkhov) [Discord](https://discord.gg/pvy7H8DZMG) [Request more models](https://github.com/RichardErkhov/quant_request) pythia-1.4b - GGUF - Model creator: https://huggingface.co/EleutherAI/ - Original model: https://huggingface.co/EleutherAI/pythia-1.4b/ | Name | Quant method | Size | | ---- | ---- | ---- | | [pythia-1.4b.Q2_K.gguf](https://huggingface.co/RichardErkhov/EleutherAI_-_pythia-1.4b-gguf/blob/main/pythia-1.4b.Q2_K.gguf) | Q2_K | 0.53GB | | [pythia-1.4b.Q3_K_S.gguf](https://huggingface.co/RichardErkhov/EleutherAI_-_pythia-1.4b-gguf/blob/main/pythia-1.4b.Q3_K_S.gguf) | Q3_K_S | 0.61GB | | [pythia-1.4b.Q3_K.gguf](https://huggingface.co/RichardErkhov/EleutherAI_-_pythia-1.4b-gguf/blob/main/pythia-1.4b.Q3_K.gguf) | Q3_K | 0.71GB | | [pythia-1.4b.Q3_K_M.gguf](https://huggingface.co/RichardErkhov/EleutherAI_-_pythia-1.4b-gguf/blob/main/pythia-1.4b.Q3_K_M.gguf) | Q3_K_M | 0.71GB | | [pythia-1.4b.Q3_K_L.gguf](https://huggingface.co/RichardErkhov/EleutherAI_-_pythia-1.4b-gguf/blob/main/pythia-1.4b.Q3_K_L.gguf) | Q3_K_L | 0.77GB | | [pythia-1.4b.IQ4_XS.gguf](https://huggingface.co/RichardErkhov/EleutherAI_-_pythia-1.4b-gguf/blob/main/pythia-1.4b.IQ4_XS.gguf) | IQ4_XS | 0.74GB | | [pythia-1.4b.Q4_0.gguf](https://huggingface.co/RichardErkhov/EleutherAI_-_pythia-1.4b-gguf/blob/main/pythia-1.4b.Q4_0.gguf) | Q4_0 | 0.77GB | | [pythia-1.4b.IQ4_NL.gguf](https://huggingface.co/RichardErkhov/EleutherAI_-_pythia-1.4b-gguf/blob/main/pythia-1.4b.IQ4_NL.gguf) | IQ4_NL | 0.78GB | | [pythia-1.4b.Q4_K_S.gguf](https://huggingface.co/RichardErkhov/EleutherAI_-_pythia-1.4b-gguf/blob/main/pythia-1.4b.Q4_K_S.gguf) | Q4_K_S | 0.78GB | | [pythia-1.4b.Q4_K.gguf](https://huggingface.co/RichardErkhov/EleutherAI_-_pythia-1.4b-gguf/blob/main/pythia-1.4b.Q4_K.gguf) | Q4_K | 0.85GB | | [pythia-1.4b.Q4_K_M.gguf](https://huggingface.co/RichardErkhov/EleutherAI_-_pythia-1.4b-gguf/blob/main/pythia-1.4b.Q4_K_M.gguf) | Q4_K_M | 0.85GB | | [pythia-1.4b.Q4_1.gguf](https://huggingface.co/RichardErkhov/EleutherAI_-_pythia-1.4b-gguf/blob/main/pythia-1.4b.Q4_1.gguf) | Q4_1 | 0.85GB | | [pythia-1.4b.Q5_0.gguf](https://huggingface.co/RichardErkhov/EleutherAI_-_pythia-1.4b-gguf/blob/main/pythia-1.4b.Q5_0.gguf) | Q5_0 | 0.92GB | | [pythia-1.4b.Q5_K_S.gguf](https://huggingface.co/RichardErkhov/EleutherAI_-_pythia-1.4b-gguf/blob/main/pythia-1.4b.Q5_K_S.gguf) | Q5_K_S | 0.81GB | | [pythia-1.4b.Q5_K.gguf](https://huggingface.co/RichardErkhov/EleutherAI_-_pythia-1.4b-gguf/blob/main/pythia-1.4b.Q5_K.gguf) | Q5_K | 0.98GB | | [pythia-1.4b.Q5_K_M.gguf](https://huggingface.co/RichardErkhov/EleutherAI_-_pythia-1.4b-gguf/blob/main/pythia-1.4b.Q5_K_M.gguf) | Q5_K_M | 0.98GB | | [pythia-1.4b.Q5_1.gguf](https://huggingface.co/RichardErkhov/EleutherAI_-_pythia-1.4b-gguf/blob/main/pythia-1.4b.Q5_1.gguf) | Q5_1 | 1.0GB | | [pythia-1.4b.Q6_K.gguf](https://huggingface.co/RichardErkhov/EleutherAI_-_pythia-1.4b-gguf/blob/main/pythia-1.4b.Q6_K.gguf) | Q6_K | 1.08GB | | [pythia-1.4b.Q8_0.gguf](https://huggingface.co/RichardErkhov/EleutherAI_-_pythia-1.4b-gguf/blob/main/pythia-1.4b.Q8_0.gguf) | Q8_0 | 1.4GB | Original model description: --- language: - en tags: - pytorch - causal-lm - pythia license: apache-2.0 datasets: - EleutherAI/the_pile --- The *Pythia Scaling Suite* is a collection of models developed to facilitate interpretability research [(see paper)](https://arxiv.org/pdf/2304.01373.pdf). It contains two sets of eight models of sizes 70M, 160M, 410M, 1B, 1.4B, 2.8B, 6.9B, and 12B. For each size, there are two models: one trained on the Pile, and one trained on the Pile after the dataset has been globally deduplicated. All 8 model sizes are trained on the exact same data, in the exact same order. We also provide 154 intermediate checkpoints per model, hosted on Hugging Face as branches. The Pythia model suite was deliberately designed to promote scientific research on large language models, especially interpretability research. Despite not centering downstream performance as a design goal, we find the models <a href="#evaluations">match or exceed</a> the performance of similar and same-sized models, such as those in the OPT and GPT-Neo suites. <details> <summary style="font-weight:600">Details on previous early release and naming convention.</summary> Previously, we released an early version of the Pythia suite to the public. However, we decided to retrain the model suite to address a few hyperparameter discrepancies. This model card <a href="#changelog">lists the changes</a>; see appendix B in the Pythia paper for further discussion. We found no difference in benchmark performance between the two Pythia versions. The old models are [still available](https://huggingface.co/models?other=pythia_v0), but we suggest the retrained suite if you are just starting to use Pythia.<br> **This is the current release.** Please note that all models in the *Pythia* suite were renamed in January 2023. For clarity, a <a href="#naming-convention-and-parameter-count">table comparing the old and new names</a> is provided in this model card, together with exact parameter counts. </details> <br> # Pythia-1.4B ## Model Details - Developed by: [EleutherAI](http://eleuther.ai) - Model type: Transformer-based Language Model - Language: English - Learn more: [Pythia's GitHub repository](https://github.com/EleutherAI/pythia) for training procedure, config files, and details on how to use. [See paper](https://arxiv.org/pdf/2304.01373.pdf) for more evals and implementation details. - Library: [GPT-NeoX](https://github.com/EleutherAI/gpt-neox) - License: Apache 2.0 - Contact: to ask questions about this model, join the [EleutherAI Discord](https://discord.gg/zBGx3azzUn), and post them in `#release-discussion`. Please read the existing *Pythia* documentation before asking about it in the EleutherAI Discord. For general correspondence: [contact@eleuther. ai](mailto:[email protected]). <figure> | Pythia model | Non-Embedding Params | Layers | Model Dim | Heads | Batch Size | Learning Rate | Equivalent Models | | -----------: | -------------------: | :----: | :-------: | :---: | :--------: | :-------------------: | :--------------------: | | 70M | 18,915,328 | 6 | 512 | 8 | 2M | 1.0 x 10<sup>-3</sup> | — | | 160M | 85,056,000 | 12 | 768 | 12 | 2M | 6.0 x 10<sup>-4</sup> | GPT-Neo 125M, OPT-125M | | 410M | 302,311,424 | 24 | 1024 | 16 | 2M | 3.0 x 10<sup>-4</sup> | OPT-350M | | 1.0B | 805,736,448 | 16 | 2048 | 8 | 2M | 3.0 x 10<sup>-4</sup> | — | | 1.4B | 1,208,602,624 | 24 | 2048 | 16 | 2M | 2.0 x 10<sup>-4</sup> | GPT-Neo 1.3B, OPT-1.3B | | 2.8B | 2,517,652,480 | 32 | 2560 | 32 | 2M | 1.6 x 10<sup>-4</sup> | GPT-Neo 2.7B, OPT-2.7B | | 6.9B | 6,444,163,072 | 32 | 4096 | 32 | 2M | 1.2 x 10<sup>-4</sup> | OPT-6.7B | | 12B | 11,327,027,200 | 36 | 5120 | 40 | 2M | 1.2 x 10<sup>-4</sup> | — | <figcaption>Engineering details for the <i>Pythia Suite</i>. Deduped and non-deduped models of a given size have the same hyperparameters. “Equivalent” models have <b>exactly</b> the same architecture, and the same number of non-embedding parameters.</figcaption> </figure> ## Uses and Limitations ### Intended Use The primary intended use of Pythia is research on the behavior, functionality, and limitations of large language models. This suite is intended to provide a controlled setting for performing scientific experiments. We also provide 154 checkpoints per model: initial `step0`, 10 log-spaced checkpoints `step{1,2,4...512}`, and 143 evenly-spaced checkpoints from `step1000` to `step143000`. These checkpoints are hosted on Hugging Face as branches. Note that branch `143000` corresponds exactly to the model checkpoint on the `main` branch of each model. You may also further fine-tune and adapt Pythia-1.4B for deployment, as long as your use is in accordance with the Apache 2.0 license. Pythia models work with the Hugging Face [Transformers Library](https://huggingface.co/docs/transformers/index). If you decide to use pre-trained Pythia-1.4B as a basis for your fine-tuned model, please conduct your own risk and bias assessment. ### Out-of-scope use The Pythia Suite is **not** intended for deployment. It is not a in itself a product and cannot be used for human-facing interactions. For example, the model may generate harmful or offensive text. Please evaluate the risks associated with your particular use case. Pythia models are English-language only, and are not suitable for translation or generating text in other languages. Pythia-1.4B has not been fine-tuned for downstream contexts in which language models are commonly deployed, such as writing genre prose, or commercial chatbots. This means Pythia-1.4B will **not** respond to a given prompt the way a product like ChatGPT does. This is because, unlike this model, ChatGPT was fine-tuned using methods such as Reinforcement Learning from Human Feedback (RLHF) to better “follow” human instructions. ### Limitations and biases The core functionality of a large language model is to take a string of text and predict the next token. The token used by the model need not produce the most “accurate” text. Never rely on Pythia-1.4B to produce factually accurate output. This model was trained on [the Pile](https://pile.eleuther.ai/), a dataset known to contain profanity and texts that are lewd or otherwise offensive. See [Section 6 of the Pile paper](https://arxiv.org/abs/2101.00027) for a discussion of documented biases with regards to gender, religion, and race. Pythia-1.4B may produce socially unacceptable or undesirable text, *even if* the prompt itself does not include anything explicitly offensive. If you plan on using text generated through, for example, the Hosted Inference API, we recommend having a human curate the outputs of this language model before presenting it to other people. Please inform your audience that the text was generated by Pythia-1.4B. ### Quickstart Pythia models can be loaded and used via the following code, demonstrated here for the third `pythia-70m-deduped` checkpoint: ```python from transformers import GPTNeoXForCausalLM, AutoTokenizer model = GPTNeoXForCausalLM.from_pretrained( "EleutherAI/pythia-70m-deduped", revision="step3000", cache_dir="./pythia-70m-deduped/step3000", ) tokenizer = AutoTokenizer.from_pretrained( "EleutherAI/pythia-70m-deduped", revision="step3000", cache_dir="./pythia-70m-deduped/step3000", ) inputs = tokenizer("Hello, I am", return_tensors="pt") tokens = model.generate(**inputs) tokenizer.decode(tokens[0]) ``` Revision/branch `step143000` corresponds exactly to the model checkpoint on the `main` branch of each model.<br> For more information on how to use all Pythia models, see [documentation on GitHub](https://github.com/EleutherAI/pythia). ## Training ### Training data [The Pile](https://pile.eleuther.ai/) is a 825GiB general-purpose dataset in English. It was created by EleutherAI specifically for training large language models. It contains texts from 22 diverse sources, roughly broken down into five categories: academic writing (e.g. arXiv), internet (e.g. CommonCrawl), prose (e.g. Project Gutenberg), dialogue (e.g. YouTube subtitles), and miscellaneous (e.g. GitHub, Enron Emails). See [the Pile paper](https://arxiv.org/abs/2101.00027) for a breakdown of all data sources, methodology, and a discussion of ethical implications. Consult [the datasheet](https://arxiv.org/abs/2201.07311) for more detailed documentation about the Pile and its component datasets. The Pile can be downloaded from the [official website](https://pile.eleuther.ai/), or from a [community mirror](https://the-eye.eu/public/AI/pile/).<br> The Pile was **not** deduplicated before being used to train Pythia-1.4B. ### Training procedure All models were trained on the exact same data, in the exact same order. Each model saw 299,892,736,000 tokens during training, and 143 checkpoints for each model are saved every 2,097,152,000 tokens, spaced evenly throughout training, from `step1000` to `step143000` (which is the same as `main`). In addition, we also provide frequent early checkpoints: `step0` and `step{1,2,4...512}`. This corresponds to training for just under 1 epoch on the Pile for non-deduplicated models, and about 1.5 epochs on the deduplicated Pile. All *Pythia* models trained for 143000 steps at a batch size of 2M (2,097,152 tokens).<br> See [GitHub](https://github.com/EleutherAI/pythia) for more details on training procedure, including [how to reproduce it](https://github.com/EleutherAI/pythia/blob/main/README.md#reproducing-training).<br> Pythia uses the same tokenizer as [GPT-NeoX- 20B](https://huggingface.co/EleutherAI/gpt-neox-20b). ## Evaluations All 16 *Pythia* models were evaluated using the [LM Evaluation Harness](https://github.com/EleutherAI/lm-evaluation-harness). You can access the results by model and step at `results/json/*` in the [GitHub repository](https://github.com/EleutherAI/pythia/tree/main/results/json/).<br> Expand the sections below to see plots of evaluation results for all Pythia and Pythia-deduped models compared with OPT and BLOOM. <details> <summary>LAMBADA – OpenAI</summary> <img src="/EleutherAI/pythia-12b/resolve/main/eval_plots/lambada_openai_v1.png" style="width:auto"/> </details> <details> <summary>Physical Interaction: Question Answering (PIQA)</summary> <img src="/EleutherAI/pythia-12b/resolve/main/eval_plots/piqa_v1.png" style="width:auto"/> </details> <details> <summary>WinoGrande</summary> <img src="/EleutherAI/pythia-12b/resolve/main/eval_plots/winogrande_v1.png" style="width:auto"/> </details> <details> <summary>AI2 Reasoning Challenge—Easy Set</summary> <img src="/EleutherAI/pythia-12b/resolve/main/eval_plots/arc_easy_v1.png" style="width:auto"/> </details> <details> <summary>SciQ</summary> <img src="/EleutherAI/pythia-12b/resolve/main/eval_plots/sciq_v1.png" style="width:auto"/> </details> ## Changelog This section compares differences between previously released [Pythia v0](https://huggingface.co/models?other=pythia_v0) and the current models. See Appendix B of the Pythia paper for further discussion of these changes and the motivation behind them. We found that retraining Pythia had no impact on benchmark performance. - All model sizes are now trained with uniform batch size of 2M tokens. Previously, the models of size 160M, 410M, and 1.4B parameters were trained with batch sizes of 4M tokens. - We added checkpoints at initialization (step 0) and steps {1,2,4,8,16,32,64, 128,256,512} in addition to every 1000 training steps. - Flash Attention was used in the new retrained suite. - We remedied a minor inconsistency that existed in the original suite: all models of size 2.8B parameters or smaller had a learning rate (LR) schedule which decayed to a minimum LR of 10% the starting LR rate, but the 6.9B and 12B models all used an LR schedule which decayed to a minimum LR of 0. In the redone training runs, we rectified this inconsistency: all models now were trained with LR decaying to a minimum of 0.1× their maximum LR. ### Naming convention and parameter count *Pythia* models were renamed in January 2023. It is possible that the old naming convention still persists in some documentation by accident. The current naming convention (70M, 160M, etc.) is based on total parameter count. <figure style="width:32em"> | current Pythia suffix | old suffix | total params | non-embedding params | | --------------------: | ---------: | -------------: | -------------------: | | 70M | 19M | 70,426,624 | 18,915,328 | | 160M | 125M | 162,322,944 | 85,056,000 | | 410M | 350M | 405,334,016 | 302,311,424 | | 1B | 800M | 1,011,781,632 | 805,736,448 | | 1.4B | 1.3B | 1,414,647,808 | 1,208,602,624 | | 2.8B | 2.7B | 2,775,208,960 | 2,517,652,480 | | 6.9B | 6.7B | 6,857,302,016 | 6,444,163,072 | | 12B | 13B | 11,846,072,320 | 11,327,027,200 | </figure>
[ "QUESTION_ANSWERING", "TRANSLATION" ]
[ "SCIQ" ]
keeeeenw/MicroLlama-text-embedding
keeeeenw
sentence-similarity
[ "sentence-transformers", "safetensors", "llama", "sentence-similarity", "feature-extraction", "generated_from_trainer", "dataset_size:65749", "loss:MultipleNegativesRankingLoss", "loss:SoftmaxLoss", "loss:CoSENTLoss", "en", "dataset:sentence-transformers/all-nli", "dataset:sentence-transformers/stsb", "dataset:sentence-transformers/quora-duplicates", "dataset:sentence-transformers/natural-questions", "arxiv:1908.10084", "arxiv:1705.00652", "base_model:keeeeenw/MicroLlama", "base_model:finetune:keeeeenw/MicroLlama", "autotrain_compatible", "endpoints_compatible", "region:us" ]
2024-11-11T02:14:04
2024-11-11T02:15:01
121
1
--- base_model: keeeeenw/MicroLlama datasets: - sentence-transformers/all-nli - sentence-transformers/stsb - sentence-transformers/quora-duplicates - sentence-transformers/natural-questions language: - en library_name: sentence-transformers pipeline_tag: sentence-similarity tags: - sentence-transformers - sentence-similarity - feature-extraction - generated_from_trainer - dataset_size:65749 - loss:MultipleNegativesRankingLoss - loss:SoftmaxLoss - loss:CoSENTLoss widget: - source_sentence: A construction worker is standing on a crane placing a large arm on top of a stature in progress. sentences: - The man is wearing black. - A person standing - Nobody is standing - source_sentence: A boy in red slides down an inflatable ride. sentences: - A man holding a drill stands next to a girl holding a vacuum hose. - A boy is playing on an inflatable ride. - A boy pierces a knife through an inflatable ride. - source_sentence: An animal is chewing on something. sentences: - A dog with a red leash still attached chases over the grass toward a tennis ball. - A man is eating something. - An animal is chewing on a key chain. - source_sentence: What are some good books or references to get started with machine learning? sentences: - What caused the British Empire to fall? - How should I go about learning Machine Learning? - Can an infinite amount of dark or vacuum or gravitational energy be created with expansion? - source_sentence: How do I attract a girl? sentences: - How can I attract girls? - Why isn't my iPhone 5 charging? - What would the world be like now in 2016 if Hitler's Germany won the war? --- # SentenceTransformer based on keeeeenw/MicroLlama This is a [sentence-transformers](https://www.SBERT.net) model finetuned from [keeeeenw/MicroLlama](https://huggingface.co/keeeeenw/MicroLlama) on the [all-nli-pair](https://huggingface.co/datasets/sentence-transformers/all-nli), [all-nli-pair-class](https://huggingface.co/datasets/sentence-transformers/all-nli), [all-nli-pair-score](https://huggingface.co/datasets/sentence-transformers/all-nli), [all-nli-triplet](https://huggingface.co/datasets/sentence-transformers/all-nli), [stsb](https://huggingface.co/datasets/sentence-transformers/stsb), [quora](https://huggingface.co/datasets/sentence-transformers/quora-duplicates) and [natural-questions](https://huggingface.co/datasets/sentence-transformers/natural-questions) datasets. It maps sentences & paragraphs to a 1024-dimensional dense vector space and can be used for semantic textual similarity, semantic search, paraphrase mining, text classification, clustering, and more. ## Model Details ### Model Description - **Model Type:** Sentence Transformer - **Base model:** [keeeeenw/MicroLlama](https://huggingface.co/keeeeenw/MicroLlama) <!-- at revision 6403f6afc9c3a34b877603fab3d525842d353b1c --> - **Maximum Sequence Length:** 2048 tokens - **Output Dimensionality:** 1024 tokens - **Similarity Function:** Cosine Similarity - **Training Datasets:** - [all-nli-pair](https://huggingface.co/datasets/sentence-transformers/all-nli) - [all-nli-pair-class](https://huggingface.co/datasets/sentence-transformers/all-nli) - [all-nli-pair-score](https://huggingface.co/datasets/sentence-transformers/all-nli) - [all-nli-triplet](https://huggingface.co/datasets/sentence-transformers/all-nli) - [stsb](https://huggingface.co/datasets/sentence-transformers/stsb) - [quora](https://huggingface.co/datasets/sentence-transformers/quora-duplicates) - [natural-questions](https://huggingface.co/datasets/sentence-transformers/natural-questions) - **Language:** en <!-- - **License:** Unknown --> ### Model Sources - **Documentation:** [Sentence Transformers Documentation](https://sbert.net) - **Repository:** [Sentence Transformers on GitHub](https://github.com/UKPLab/sentence-transformers) - **Hugging Face:** [Sentence Transformers on Hugging Face](https://huggingface.co/models?library=sentence-transformers) ### Full Model Architecture ``` SentenceTransformer( (0): Transformer({'max_seq_length': 2048, 'do_lower_case': False}) with Transformer model: LlamaModel (1): Pooling({'word_embedding_dimension': 1024, 'pooling_mode_cls_token': False, 'pooling_mode_mean_tokens': True, 'pooling_mode_max_tokens': False, 'pooling_mode_mean_sqrt_len_tokens': False, 'pooling_mode_weightedmean_tokens': False, 'pooling_mode_lasttoken': False, 'include_prompt': True}) ) ``` ## Usage ### Direct Usage (Sentence Transformers) First install the Sentence Transformers library: ```bash pip install -U sentence-transformers ``` Then you can load this model and run inference. ```python from sentence_transformers import SentenceTransformer # Download from the 🤗 Hub model = SentenceTransformer("keeeeenw/MicroLlama-text-embedding") # Run inference sentences = [ 'How do I attract a girl?', 'How can I attract girls?', "Why isn't my iPhone 5 charging?", ] embeddings = model.encode(sentences) print(embeddings.shape) # [3, 1024] # Get the similarity scores for the embeddings similarities = model.similarity(embeddings, embeddings) print(similarities.shape) # [3, 3] ``` <!-- ### Direct Usage (Transformers) <details><summary>Click to see the direct usage in Transformers</summary> </details> --> <!-- ### Downstream Usage (Sentence Transformers) You can finetune this model on your own dataset. <details><summary>Click to expand</summary> </details> --> <!-- ### Out-of-Scope Use *List how the model may foreseeably be misused and address what users ought not to do with the model.* --> <!-- ## Bias, Risks and Limitations *What are the known or foreseeable issues stemming from this model? You could also flag here known failure cases or weaknesses of the model.* --> <!-- ### Recommendations *What are recommendations with respect to the foreseeable issues? For example, filtering explicit content.* --> ## Training Details ### Training Datasets #### all-nli-pair * Dataset: [all-nli-pair](https://huggingface.co/datasets/sentence-transformers/all-nli) at [d482672](https://huggingface.co/datasets/sentence-transformers/all-nli/tree/d482672c8e74ce18da116f430137434ba2e52fab) * Size: 10,000 training samples * Columns: <code>anchor</code> and <code>positive</code> * Approximate statistics based on the first 1000 samples: | | anchor | positive | |:--------|:----------------------------------------------------------------------------------|:---------------------------------------------------------------------------------| | type | string | string | | details | <ul><li>min: 4 tokens</li><li>mean: 18.11 tokens</li><li>max: 72 tokens</li></ul> | <ul><li>min: 3 tokens</li><li>mean: 9.46 tokens</li><li>max: 34 tokens</li></ul> | * Samples: | anchor | positive | |:---------------------------------------------------------------------------|:-------------------------------------------------| | <code>A person on a horse jumps over a broken down airplane.</code> | <code>A person is outdoors, on a horse.</code> | | <code>Children smiling and waving at camera</code> | <code>There are children present</code> | | <code>A boy is jumping on skateboard in the middle of a red bridge.</code> | <code>The boy does a skateboarding trick.</code> | * Loss: [<code>MultipleNegativesRankingLoss</code>](https://sbert.net/docs/package_reference/sentence_transformer/losses.html#multiplenegativesrankingloss) with these parameters: ```json { "scale": 20.0, "similarity_fct": "cos_sim" } ``` #### all-nli-pair-class * Dataset: [all-nli-pair-class](https://huggingface.co/datasets/sentence-transformers/all-nli) at [d482672](https://huggingface.co/datasets/sentence-transformers/all-nli/tree/d482672c8e74ce18da116f430137434ba2e52fab) * Size: 10,000 training samples * Columns: <code>premise</code>, <code>hypothesis</code>, and <code>label</code> * Approximate statistics based on the first 1000 samples: | | premise | hypothesis | label | |:--------|:----------------------------------------------------------------------------------|:----------------------------------------------------------------------------------|:-------------------------------------------------------------------| | type | string | string | int | | details | <ul><li>min: 6 tokens</li><li>mean: 18.54 tokens</li><li>max: 55 tokens</li></ul> | <ul><li>min: 3 tokens</li><li>mean: 10.78 tokens</li><li>max: 37 tokens</li></ul> | <ul><li>0: ~33.40%</li><li>1: ~33.30%</li><li>2: ~33.30%</li></ul> | * Samples: | premise | hypothesis | label | |:--------------------------------------------------------------------|:---------------------------------------------------------------|:---------------| | <code>A person on a horse jumps over a broken down airplane.</code> | <code>A person is training his horse for a competition.</code> | <code>1</code> | | <code>A person on a horse jumps over a broken down airplane.</code> | <code>A person is at a diner, ordering an omelette.</code> | <code>2</code> | | <code>A person on a horse jumps over a broken down airplane.</code> | <code>A person is outdoors, on a horse.</code> | <code>0</code> | * Loss: [<code>SoftmaxLoss</code>](https://sbert.net/docs/package_reference/sentence_transformer/losses.html#softmaxloss) #### all-nli-pair-score * Dataset: [all-nli-pair-score](https://huggingface.co/datasets/sentence-transformers/all-nli) at [d482672](https://huggingface.co/datasets/sentence-transformers/all-nli/tree/d482672c8e74ce18da116f430137434ba2e52fab) * Size: 10,000 training samples * Columns: <code>sentence1</code>, <code>sentence2</code>, and <code>score</code> * Approximate statistics based on the first 1000 samples: | | sentence1 | sentence2 | score | |:--------|:----------------------------------------------------------------------------------|:----------------------------------------------------------------------------------|:--------------------------------------------------------------| | type | string | string | float | | details | <ul><li>min: 6 tokens</li><li>mean: 18.54 tokens</li><li>max: 55 tokens</li></ul> | <ul><li>min: 3 tokens</li><li>mean: 10.78 tokens</li><li>max: 37 tokens</li></ul> | <ul><li>min: 0.0</li><li>mean: 0.5</li><li>max: 1.0</li></ul> | * Samples: | sentence1 | sentence2 | score | |:--------------------------------------------------------------------|:---------------------------------------------------------------|:-----------------| | <code>A person on a horse jumps over a broken down airplane.</code> | <code>A person is training his horse for a competition.</code> | <code>0.5</code> | | <code>A person on a horse jumps over a broken down airplane.</code> | <code>A person is at a diner, ordering an omelette.</code> | <code>0.0</code> | | <code>A person on a horse jumps over a broken down airplane.</code> | <code>A person is outdoors, on a horse.</code> | <code>1.0</code> | * Loss: [<code>CoSENTLoss</code>](https://sbert.net/docs/package_reference/sentence_transformer/losses.html#cosentloss) with these parameters: ```json { "scale": 20.0, "similarity_fct": "pairwise_cos_sim" } ``` #### all-nli-triplet * Dataset: [all-nli-triplet](https://huggingface.co/datasets/sentence-transformers/all-nli) at [d482672](https://huggingface.co/datasets/sentence-transformers/all-nli/tree/d482672c8e74ce18da116f430137434ba2e52fab) * Size: 10,000 training samples * Columns: <code>anchor</code>, <code>positive</code>, and <code>negative</code> * Approximate statistics based on the first 1000 samples: | | anchor | positive | negative | |:--------|:----------------------------------------------------------------------------------|:----------------------------------------------------------------------------------|:----------------------------------------------------------------------------------| | type | string | string | string | | details | <ul><li>min: 6 tokens</li><li>mean: 10.37 tokens</li><li>max: 50 tokens</li></ul> | <ul><li>min: 5 tokens</li><li>mean: 13.04 tokens</li><li>max: 41 tokens</li></ul> | <ul><li>min: 5 tokens</li><li>mean: 13.74 tokens</li><li>max: 54 tokens</li></ul> | * Samples: | anchor | positive | negative | |:---------------------------------------------------------------------------|:-------------------------------------------------|:-----------------------------------------------------------| | <code>A person on a horse jumps over a broken down airplane.</code> | <code>A person is outdoors, on a horse.</code> | <code>A person is at a diner, ordering an omelette.</code> | | <code>Children smiling and waving at camera</code> | <code>There are children present</code> | <code>The kids are frowning</code> | | <code>A boy is jumping on skateboard in the middle of a red bridge.</code> | <code>The boy does a skateboarding trick.</code> | <code>The boy skates down the sidewalk.</code> | * Loss: [<code>MultipleNegativesRankingLoss</code>](https://sbert.net/docs/package_reference/sentence_transformer/losses.html#multiplenegativesrankingloss) with these parameters: ```json { "scale": 20.0, "similarity_fct": "cos_sim" } ``` #### stsb * Dataset: [stsb](https://huggingface.co/datasets/sentence-transformers/stsb) at [ab7a5ac](https://huggingface.co/datasets/sentence-transformers/stsb/tree/ab7a5ac0e35aa22088bdcf23e7fd99b220e53308) * Size: 5,749 training samples * Columns: <code>sentence1</code>, <code>sentence2</code>, and <code>score</code> * Approximate statistics based on the first 1000 samples: | | sentence1 | sentence2 | score | |:--------|:----------------------------------------------------------------------------------|:----------------------------------------------------------------------------------|:---------------------------------------------------------------| | type | string | string | float | | details | <ul><li>min: 5 tokens</li><li>mean: 10.21 tokens</li><li>max: 31 tokens</li></ul> | <ul><li>min: 6 tokens</li><li>mean: 10.19 tokens</li><li>max: 28 tokens</li></ul> | <ul><li>min: 0.0</li><li>mean: 0.54</li><li>max: 1.0</li></ul> | * Samples: | sentence1 | sentence2 | score | |:-----------------------------------------------------------|:----------------------------------------------------------------------|:------------------| | <code>A plane is taking off.</code> | <code>An air plane is taking off.</code> | <code>1.0</code> | | <code>A man is playing a large flute.</code> | <code>A man is playing a flute.</code> | <code>0.76</code> | | <code>A man is spreading shreded cheese on a pizza.</code> | <code>A man is spreading shredded cheese on an uncooked pizza.</code> | <code>0.76</code> | * Loss: [<code>CoSENTLoss</code>](https://sbert.net/docs/package_reference/sentence_transformer/losses.html#cosentloss) with these parameters: ```json { "scale": 20.0, "similarity_fct": "pairwise_cos_sim" } ``` #### quora * Dataset: [quora](https://huggingface.co/datasets/sentence-transformers/quora-duplicates) at [451a485](https://huggingface.co/datasets/sentence-transformers/quora-duplicates/tree/451a4850bd141edb44ade1b5828c259abd762cdb) * Size: 10,000 training samples * Columns: <code>anchor</code> and <code>positive</code> * Approximate statistics based on the first 1000 samples: | | anchor | positive | |:--------|:----------------------------------------------------------------------------------|:----------------------------------------------------------------------------------| | type | string | string | | details | <ul><li>min: 5 tokens</li><li>mean: 14.26 tokens</li><li>max: 45 tokens</li></ul> | <ul><li>min: 5 tokens</li><li>mean: 14.48 tokens</li><li>max: 49 tokens</li></ul> | * Samples: | anchor | positive | |:----------------------------------------------------------------------------------------------------|:--------------------------------------------------------------------------------------------------------| | <code>Astrology: I am a Capricorn Sun Cap moon and cap rising...what does that say about me?</code> | <code>I'm a triple Capricorn (Sun, Moon and ascendant in Capricorn) What does this say about me?</code> | | <code>How can I be a good geologist?</code> | <code>What should I do to be a great geologist?</code> | | <code>How do I read and find my YouTube comments?</code> | <code>How can I see all my Youtube comments?</code> | * Loss: [<code>MultipleNegativesRankingLoss</code>](https://sbert.net/docs/package_reference/sentence_transformer/losses.html#multiplenegativesrankingloss) with these parameters: ```json { "scale": 20.0, "similarity_fct": "cos_sim" } ``` #### natural-questions * Dataset: [natural-questions](https://huggingface.co/datasets/sentence-transformers/natural-questions) at [f9e894e](https://huggingface.co/datasets/sentence-transformers/natural-questions/tree/f9e894e1081e206e577b4eaa9ee6de2b06ae6f17) * Size: 10,000 training samples * Columns: <code>query</code> and <code>answer</code> * Approximate statistics based on the first 1000 samples: | | query | answer | |:--------|:----------------------------------------------------------------------------------|:-------------------------------------------------------------------------------------| | type | string | string | | details | <ul><li>min: 9 tokens</li><li>mean: 12.46 tokens</li><li>max: 25 tokens</li></ul> | <ul><li>min: 18 tokens</li><li>mean: 160.85 tokens</li><li>max: 611 tokens</li></ul> | * Samples: | query | answer | |:----------------------------------------------------------------|:---------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------| | <code>when did richmond last play in a preliminary final</code> | <code>Richmond Football Club Richmond began 2017 with 5 straight wins, a feat it had not achieved since 1995. A series of close losses hampered the Tigers throughout the middle of the season, including a 5-point loss to the Western Bulldogs, 2-point loss to Fremantle, and a 3-point loss to the Giants. Richmond ended the season strongly with convincing victories over Fremantle and St Kilda in the final two rounds, elevating the club to 3rd on the ladder. Richmond's first final of the season against the Cats at the MCG attracted a record qualifying final crowd of 95,028; the Tigers won by 51 points. Having advanced to the first preliminary finals for the first time since 2001, Richmond defeated Greater Western Sydney by 36 points in front of a crowd of 94,258 to progress to the Grand Final against Adelaide, their first Grand Final appearance since 1982. The attendance was 100,021, the largest crowd to a grand final since 1986. The Crows led at quarter time and led by as many as 13, but the Tigers took over the game as it progressed and scored seven straight goals at one point. They eventually would win by 48 points – 16.12 (108) to Adelaide's 8.12 (60) – to end their 37-year flag drought.[22] Dustin Martin also became the first player to win a Premiership medal, the Brownlow Medal and the Norm Smith Medal in the same season, while Damien Hardwick was named AFL Coaches Association Coach of the Year. Richmond's jump from 13th to premiers also marked the biggest jump from one AFL season to the next.</code> | | <code>who sang what in the world's come over you</code> | <code>Jack Scott (singer) At the beginning of 1960, Scott again changed record labels, this time to Top Rank Records.[1] He then recorded four Billboard Hot 100 hits – "What in the World's Come Over You" (#5), "Burning Bridges" (#3) b/w "Oh Little One" (#34), and "It Only Happened Yesterday" (#38).[1] "What in the World's Come Over You" was Scott's second gold disc winner.[6] Scott continued to record and perform during the 1960s and 1970s.[1] His song "You're Just Gettin' Better" reached the country charts in 1974.[1] In May 1977, Scott recorded a Peel session for BBC Radio 1 disc jockey, John Peel.</code> | | <code>who produces the most wool in the world</code> | <code>Wool Global wool production is about 2 million tonnes per year, of which 60% goes into apparel. Wool comprises ca 3% of the global textile market, but its value is higher owing to dying and other modifications of the material.[1] Australia is a leading producer of wool which is mostly from Merino sheep but has been eclipsed by China in terms of total weight.[30] New Zealand (2016) is the third-largest producer of wool, and the largest producer of crossbred wool. Breeds such as Lincoln, Romney, Drysdale, and Elliotdale produce coarser fibers, and wool from these sheep is usually used for making carpets.</code> | * Loss: [<code>MultipleNegativesRankingLoss</code>](https://sbert.net/docs/package_reference/sentence_transformer/losses.html#multiplenegativesrankingloss) with these parameters: ```json { "scale": 20.0, "similarity_fct": "cos_sim" } ``` ### Evaluation Datasets #### all-nli-triplet * Dataset: [all-nli-triplet](https://huggingface.co/datasets/sentence-transformers/all-nli) at [d482672](https://huggingface.co/datasets/sentence-transformers/all-nli/tree/d482672c8e74ce18da116f430137434ba2e52fab) * Size: 6,584 evaluation samples * Columns: <code>anchor</code>, <code>positive</code>, and <code>negative</code> * Approximate statistics based on the first 1000 samples: | | anchor | positive | negative | |:--------|:----------------------------------------------------------------------------------|:---------------------------------------------------------------------------------|:----------------------------------------------------------------------------------| | type | string | string | string | | details | <ul><li>min: 5 tokens</li><li>mean: 19.38 tokens</li><li>max: 89 tokens</li></ul> | <ul><li>min: 4 tokens</li><li>mean: 9.77 tokens</li><li>max: 35 tokens</li></ul> | <ul><li>min: 4 tokens</li><li>mean: 10.49 tokens</li><li>max: 30 tokens</li></ul> | * Samples: | anchor | positive | negative | |:-------------------------------------------------------------------------------------------------------------------------------------------------------------------------------|:------------------------------------------------------------|:--------------------------------------------------------| | <code>Two women are embracing while holding to go packages.</code> | <code>Two woman are holding packages.</code> | <code>The men are fighting outside a deli.</code> | | <code>Two young children in blue jerseys, one with the number 9 and one with the number 2 are standing on wooden steps in a bathroom and washing their hands in a sink.</code> | <code>Two kids in numbered jerseys wash their hands.</code> | <code>Two kids in jackets walk to school.</code> | | <code>A man selling donuts to a customer during a world exhibition event held in the city of Angeles</code> | <code>A man selling donuts to a customer.</code> | <code>A woman drinks her coffee in a small cafe.</code> | * Loss: [<code>MultipleNegativesRankingLoss</code>](https://sbert.net/docs/package_reference/sentence_transformer/losses.html#multiplenegativesrankingloss) with these parameters: ```json { "scale": 20.0, "similarity_fct": "cos_sim" } ``` #### stsb * Dataset: [stsb](https://huggingface.co/datasets/sentence-transformers/stsb) at [ab7a5ac](https://huggingface.co/datasets/sentence-transformers/stsb/tree/ab7a5ac0e35aa22088bdcf23e7fd99b220e53308) * Size: 1,500 evaluation samples * Columns: <code>sentence1</code>, <code>sentence2</code>, and <code>score</code> * Approximate statistics based on the first 1000 samples: | | sentence1 | sentence2 | score | |:--------|:----------------------------------------------------------------------------------|:----------------------------------------------------------------------------------|:---------------------------------------------------------------| | type | string | string | float | | details | <ul><li>min: 4 tokens</li><li>mean: 15.54 tokens</li><li>max: 49 tokens</li></ul> | <ul><li>min: 6 tokens</li><li>mean: 15.46 tokens</li><li>max: 54 tokens</li></ul> | <ul><li>min: 0.0</li><li>mean: 0.47</li><li>max: 1.0</li></ul> | * Samples: | sentence1 | sentence2 | score | |:--------------------------------------------------|:------------------------------------------------------|:------------------| | <code>A man with a hard hat is dancing.</code> | <code>A man wearing a hard hat is dancing.</code> | <code>1.0</code> | | <code>A young child is riding a horse.</code> | <code>A child is riding a horse.</code> | <code>0.95</code> | | <code>A man is feeding a mouse to a snake.</code> | <code>The man is feeding a mouse to the snake.</code> | <code>1.0</code> | * Loss: [<code>CoSENTLoss</code>](https://sbert.net/docs/package_reference/sentence_transformer/losses.html#cosentloss) with these parameters: ```json { "scale": 20.0, "similarity_fct": "pairwise_cos_sim" } ``` #### quora * Dataset: [quora](https://huggingface.co/datasets/sentence-transformers/quora-duplicates) at [451a485](https://huggingface.co/datasets/sentence-transformers/quora-duplicates/tree/451a4850bd141edb44ade1b5828c259abd762cdb) * Size: 1,000 evaluation samples * Columns: <code>anchor</code> and <code>positive</code> * Approximate statistics based on the first 1000 samples: | | anchor | positive | |:--------|:----------------------------------------------------------------------------------|:----------------------------------------------------------------------------------| | type | string | string | | details | <ul><li>min: 6 tokens</li><li>mean: 14.43 tokens</li><li>max: 68 tokens</li></ul> | <ul><li>min: 5 tokens</li><li>mean: 14.47 tokens</li><li>max: 55 tokens</li></ul> | * Samples: | anchor | positive | |:----------------------------------------------------------------------------|:--------------------------------------------------------------------------------| | <code>What is your New Year resolution?</code> | <code>What can be my new year resolution for 2017?</code> | | <code>Should I buy the IPhone 6s or Samsung Galaxy s7?</code> | <code>Which is better: the iPhone 6S Plus or the Samsung Galaxy S7 Edge?</code> | | <code>What are the differences between transgression and regression?</code> | <code>What is the difference between transgression and regression?</code> | * Loss: [<code>MultipleNegativesRankingLoss</code>](https://sbert.net/docs/package_reference/sentence_transformer/losses.html#multiplenegativesrankingloss) with these parameters: ```json { "scale": 20.0, "similarity_fct": "cos_sim" } ``` #### natural-questions * Dataset: [natural-questions](https://huggingface.co/datasets/sentence-transformers/natural-questions) at [f9e894e](https://huggingface.co/datasets/sentence-transformers/natural-questions/tree/f9e894e1081e206e577b4eaa9ee6de2b06ae6f17) * Size: 1,000 evaluation samples * Columns: <code>query</code> and <code>answer</code> * Approximate statistics based on the first 1000 samples: | | query | answer | |:--------|:---------------------------------------------------------------------------------|:------------------------------------------------------------------------------------| | type | string | string | | details | <ul><li>min: 9 tokens</li><li>mean: 12.5 tokens</li><li>max: 26 tokens</li></ul> | <ul><li>min: 24 tokens</li><li>mean: 164.3 tokens</li><li>max: 708 tokens</li></ul> | * Samples: | query | answer | |:--------------------------------------------------------------|:---------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------| | <code>where does the waikato river begin and end</code> | <code>Waikato River The Waikato River is the longest river in New Zealand, running for 425 kilometres (264 mi) through the North Island. It rises in the eastern slopes of Mount Ruapehu, joining the Tongariro River system and flowing through Lake Taupo, New Zealand's largest lake. It then drains Taupo at the lake's northeastern edge, creates the Huka Falls, and flows northwest through the Waikato Plains. It empties into the Tasman Sea south of Auckland, at Port Waikato. It gives its name to the Waikato Region that surrounds the Waikato Plains. The present course of the river was largely formed about 17,000 years ago. Contributing factors were climate warming, forest being reestablished in the river headwaters and the deepening, rather than widening, of the existing river channel. The channel was gradually eroded as far up river as Piarere, leaving the old Hinuera channel high and dry.[2] The remains of the old river path can be clearly seen at Hinuera where the cliffs mark the ancient river edges. The river's main tributary is the Waipa River, which has its confluence with the Waikato at Ngaruawahia.</code> | | <code>what type of gas is produced during fermentation</code> | <code>Fermentation Fermentation reacts NADH with an endogenous, organic electron acceptor.[1] Usually this is pyruvate formed from sugar through glycolysis. The reaction produces NAD+ and an organic product, typical examples being ethanol, lactic acid, carbon dioxide, and hydrogen gas (H2). However, more exotic compounds can be produced by fermentation, such as butyric acid and acetone. Fermentation products contain chemical energy (they are not fully oxidized), but are considered waste products, since they cannot be metabolized further without the use of oxygen.</code> | | <code>why was star wars episode iv released first</code> | <code>Star Wars (film) Star Wars (later retitled Star Wars: Episode IV – A New Hope) is a 1977 American epic space opera film written and directed by George Lucas. It is the first film in the original Star Wars trilogy and the beginning of the Star Wars franchise. Starring Mark Hamill, Harrison Ford, Carrie Fisher, Peter Cushing, Alec Guinness, David Prowse, James Earl Jones, Anthony Daniels, Kenny Baker, and Peter Mayhew, the film's plot focuses on the Rebel Alliance, led by Princess Leia (Fisher), and its attempt to destroy the Galactic Empire's space station, the Death Star. This conflict disrupts the isolated life of farmhand Luke Skywalker (Hamill), who inadvertently acquires two droids that possess stolen architectural plans for the Death Star. When the Empire begins a destructive search for the missing droids, Skywalker accompanies Jedi Master Obi-Wan Kenobi (Guinness) on a mission to return the plans to the Rebel Alliance and rescue Leia from her imprisonment by the Empire.</code> | * Loss: [<code>MultipleNegativesRankingLoss</code>](https://sbert.net/docs/package_reference/sentence_transformer/losses.html#multiplenegativesrankingloss) with these parameters: ```json { "scale": 20.0, "similarity_fct": "cos_sim" } ``` ### Training Hyperparameters #### Non-Default Hyperparameters - `per_device_train_batch_size`: 6 - `per_device_eval_batch_size`: 6 #### All Hyperparameters <details><summary>Click to expand</summary> - `overwrite_output_dir`: False - `do_predict`: False - `eval_strategy`: no - `prediction_loss_only`: True - `per_device_train_batch_size`: 6 - `per_device_eval_batch_size`: 6 - `per_gpu_train_batch_size`: None - `per_gpu_eval_batch_size`: None - `gradient_accumulation_steps`: 1 - `eval_accumulation_steps`: None - `learning_rate`: 5e-05 - `weight_decay`: 0.0 - `adam_beta1`: 0.9 - `adam_beta2`: 0.999 - `adam_epsilon`: 1e-08 - `max_grad_norm`: 1.0 - `num_train_epochs`: 3.0 - `max_steps`: -1 - `lr_scheduler_type`: linear - `lr_scheduler_kwargs`: {} - `warmup_ratio`: 0.0 - `warmup_steps`: 0 - `log_level`: passive - `log_level_replica`: warning - `log_on_each_node`: True - `logging_nan_inf_filter`: True - `save_safetensors`: True - `save_on_each_node`: False - `save_only_model`: False - `restore_callback_states_from_checkpoint`: False - `no_cuda`: False - `use_cpu`: False - `use_mps_device`: False - `seed`: 42 - `data_seed`: None - `jit_mode_eval`: False - `use_ipex`: False - `bf16`: False - `fp16`: False - `fp16_opt_level`: O1 - `half_precision_backend`: auto - `bf16_full_eval`: False - `fp16_full_eval`: False - `tf32`: None - `local_rank`: 0 - `ddp_backend`: None - `tpu_num_cores`: None - `tpu_metrics_debug`: False - `debug`: [] - `dataloader_drop_last`: False - `dataloader_num_workers`: 0 - `dataloader_prefetch_factor`: None - `past_index`: -1 - `disable_tqdm`: False - `remove_unused_columns`: True - `label_names`: None - `load_best_model_at_end`: False - `ignore_data_skip`: False - `fsdp`: [] - `fsdp_min_num_params`: 0 - `fsdp_config`: {'min_num_params': 0, 'xla': False, 'xla_fsdp_v2': False, 'xla_fsdp_grad_ckpt': False} - `fsdp_transformer_layer_cls_to_wrap`: None - `accelerator_config`: {'split_batches': False, 'dispatch_batches': None, 'even_batches': True, 'use_seedable_sampler': True, 'non_blocking': False, 'gradient_accumulation_kwargs': None} - `deepspeed`: None - `label_smoothing_factor`: 0.0 - `optim`: adamw_torch - `optim_args`: None - `adafactor`: False - `group_by_length`: False - `length_column_name`: length - `ddp_find_unused_parameters`: None - `ddp_bucket_cap_mb`: None - `ddp_broadcast_buffers`: False - `dataloader_pin_memory`: True - `dataloader_persistent_workers`: False - `skip_memory_metrics`: True - `use_legacy_prediction_loop`: False - `push_to_hub`: False - `resume_from_checkpoint`: None - `hub_model_id`: None - `hub_strategy`: every_save - `hub_private_repo`: False - `hub_always_push`: False - `gradient_checkpointing`: False - `gradient_checkpointing_kwargs`: None - `include_inputs_for_metrics`: False - `eval_do_concat_batches`: True - `fp16_backend`: auto - `push_to_hub_model_id`: None - `push_to_hub_organization`: None - `mp_parameters`: - `auto_find_batch_size`: False - `full_determinism`: False - `torchdynamo`: None - `ray_scope`: last - `ddp_timeout`: 1800 - `torch_compile`: False - `torch_compile_backend`: None - `torch_compile_mode`: None - `dispatch_batches`: None - `split_batches`: None - `include_tokens_per_second`: False - `include_num_input_tokens_seen`: False - `neftune_noise_alpha`: None - `optim_target_modules`: None - `batch_eval_metrics`: False - `batch_sampler`: batch_sampler - `multi_dataset_batch_sampler`: proportional </details> ### Training Logs | Epoch | Step | Training Loss | |:------:|:-----:|:-------------:| | 0.0456 | 500 | 1.3352 | | 0.0912 | 1000 | 1.1358 | | 0.1368 | 1500 | 1.093 | | 0.1825 | 2000 | 0.9637 | | 0.2281 | 2500 | 1.1237 | | 0.2737 | 3000 | 0.9959 | | 0.3193 | 3500 | 1.0079 | | 0.3649 | 4000 | 0.9979 | | 0.4105 | 4500 | 0.9099 | | 0.4562 | 5000 | 0.9126 | | 0.5018 | 5500 | 0.9893 | | 0.5474 | 6000 | 1.0078 | | 0.5930 | 6500 | 1.0522 | | 0.6386 | 7000 | 0.8661 | | 0.6842 | 7500 | 0.9543 | | 0.7299 | 8000 | 0.8853 | | 0.7755 | 8500 | 0.9813 | | 0.8211 | 9000 | 0.852 | | 0.8667 | 9500 | 0.8897 | | 0.9123 | 10000 | 0.9234 | | 0.9579 | 10500 | 0.8947 | | 1.0036 | 11000 | 0.8693 | | 1.0492 | 11500 | 0.7357 | | 1.0948 | 12000 | 0.6246 | | 1.1404 | 12500 | 0.6771 | | 1.1860 | 13000 | 0.5807 | | 1.2316 | 13500 | 0.7376 | | 1.2773 | 14000 | 0.6177 | | 1.3229 | 14500 | 0.5667 | | 1.3685 | 15000 | 0.5701 | | 1.4141 | 15500 | 0.5119 | | 1.4597 | 16000 | 0.517 | | 1.5053 | 16500 | 0.6041 | | 1.5510 | 17000 | 0.5872 | | 1.5966 | 17500 | 0.5719 | | 1.6422 | 18000 | 0.4646 | | 1.6878 | 18500 | 0.5375 | | 1.7334 | 19000 | 0.5235 | | 1.7790 | 19500 | 0.5432 | | 1.8247 | 20000 | 0.5648 | | 1.8703 | 20500 | 0.4776 | | 1.9159 | 21000 | 0.5475 | | 1.9615 | 21500 | 0.4902 | | 2.0071 | 22000 | 0.4883 | | 2.0527 | 22500 | 0.4473 | | 2.0983 | 23000 | 0.3735 | | 2.1440 | 23500 | 0.4526 | | 2.1896 | 24000 | 0.3509 | | 2.2352 | 24500 | 0.4658 | | 2.2808 | 25000 | 0.3529 | | 2.3264 | 25500 | 0.3723 | | 2.3720 | 26000 | 0.4281 | | 2.4177 | 26500 | 0.318 | | 2.4633 | 27000 | 0.3073 | | 2.5089 | 27500 | 0.3907 | | 2.5545 | 28000 | 0.4327 | | 2.6001 | 28500 | 0.3484 | | 2.6457 | 29000 | 0.3073 | | 2.6914 | 29500 | 0.2621 | | 2.7370 | 30000 | 0.3265 | | 2.7826 | 30500 | 0.3043 | | 2.8282 | 31000 | 0.3637 | | 2.8738 | 31500 | 0.3331 | | 2.9194 | 32000 | 0.3693 | | 2.9651 | 32500 | 0.2686 | ### Framework Versions - Python: 3.10.14 - Sentence Transformers: 3.2.1 - Transformers: 4.41.2 - PyTorch: 2.1.0+cu121 - Accelerate: 1.1.1 - Datasets: 3.1.0 - Tokenizers: 0.19.1 ## Citation ### BibTeX #### Sentence Transformers and SoftmaxLoss ```bibtex @inproceedings{reimers-2019-sentence-bert, title = "Sentence-BERT: Sentence Embeddings using Siamese BERT-Networks", author = "Reimers, Nils and Gurevych, Iryna", booktitle = "Proceedings of the 2019 Conference on Empirical Methods in Natural Language Processing", month = "11", year = "2019", publisher = "Association for Computational Linguistics", url = "https://arxiv.org/abs/1908.10084", } ``` #### MultipleNegativesRankingLoss ```bibtex @misc{henderson2017efficient, title={Efficient Natural Language Response Suggestion for Smart Reply}, author={Matthew Henderson and Rami Al-Rfou and Brian Strope and Yun-hsuan Sung and Laszlo Lukacs and Ruiqi Guo and Sanjiv Kumar and Balint Miklos and Ray Kurzweil}, year={2017}, eprint={1705.00652}, archivePrefix={arXiv}, primaryClass={cs.CL} } ``` #### CoSENTLoss ```bibtex @online{kexuefm-8847, title={CoSENT: A more efficient sentence vector scheme than Sentence-BERT}, author={Su Jianlin}, year={2022}, month={Jan}, url={https://kexue.fm/archives/8847}, } ``` <!-- ## Glossary *Clearly define terms in order to be accessible across audiences.* --> <!-- ## Model Card Authors *Lists the people who create the model card, providing recognition and accountability for the detailed work that goes into its construction.* --> <!-- ## Model Card Contact *Provides a way for people who have updates to the Model Card, suggestions, or questions, to contact the Model Card authors.* -->
[ "TEXT_CLASSIFICATION" ]
[ "MEDAL" ]
StivenLancheros/Biobert-base-cased-v1.2-finetuned-ner-CRAFT
StivenLancheros
token-classification
[ "transformers", "pytorch", "tensorboard", "bert", "token-classification", "generated_from_trainer", "autotrain_compatible", "endpoints_compatible", "region:us" ]
2022-03-11T19:17:16
2022-03-12T11:49:50
119
1
--- metrics: - precision - recall - f1 - accuracy tags: - generated_from_trainer model-index: - name: Biobert-base-cased-v1.2-finetuned-ner-CRAFT results: [] --- <!-- This model card has been generated automatically according to the information the Trainer had access to. You should probably proofread and complete it, then remove this comment. --> # Biobert-base-cased-v1.2-finetuned-ner-CRAFT This model is a fine-tuned version of [dmis-lab/biobert-base-cased-v1.2](https://huggingface.co/dmis-lab/biobert-base-cased-v1.2) on the None dataset. It achieves the following results on the evaluation set: - Loss: 0.1878 - Precision: 0.8397 - Recall: 0.8366 - F1: 0.8382 - Accuracy: 0.9683 ## Model description This model performs Named Entity Recognition for 6 entity tags: Sequence, Cell, Protein, Gene, Taxon, and Chemical from the [CRAFT](https://github.com/UCDenver-ccp/CRAFT/releases)(Colorado Richly Annotated Full Text) Corpus in English. Entity tags have been normalized and replaced from the original three letter code to a full name e.g. B-Protein, I-Chemical. ## Intended uses & limitations More information needed ## Training and evaluation data More information needed ## Training procedure ### Training hyperparameters The following hyperparameters were used during training: - learning_rate: 3e-05 - train_batch_size: 8 - eval_batch_size: 8 - seed: 42 - optimizer: Adam with betas=(0.9,0.999) and epsilon=1e-08 - lr_scheduler_type: linear - num_epochs: 4 ### Training results | Training Loss | Epoch | Step | Validation Loss | Precision | Recall | F1 | Accuracy | |:-------------:|:-----:|:----:|:---------------:|:---------:|:------:|:------:|:--------:| | 0.11 | 1.0 | 1360 | 0.1668 | 0.7952 | 0.7917 | 0.7934 | 0.9611 | | 0.0484 | 2.0 | 2720 | 0.1640 | 0.8224 | 0.8371 | 0.8297 | 0.9661 | | 0.0261 | 3.0 | 4080 | 0.1812 | 0.8143 | 0.8447 | 0.8292 | 0.9662 | | 0.0112 | 4.0 | 5440 | 0.1878 | 0.8397 | 0.8366 | 0.8382 | 0.9683 | ### Framework versions - Transformers 4.17.0 - Pytorch 1.10.0+cu111 - Datasets 1.18.4 - Tokenizers 0.11.6
[ "NAMED_ENTITY_RECOGNITION" ]
[ "CRAFT" ]
Mahalingam/med-summary
Mahalingam
text2text-generation
[ "transformers", "safetensors", "t5", "text2text-generation", "autotrain_compatible", "text-generation-inference", "endpoints_compatible", "region:us" ]
2023-12-15T07:02:09
2023-12-15T11:51:54
119
0
--- dataset: - medical_data task: - summarization --- # Medical Summary Generation with T5-Small This project involves a T5-Small model for generating medical summaries from input text. The model is trained to understand medical data and produce concise and informative summaries. ## Table of Contents - [Introduction](#introduction) - [Usage](#usage) - [Model Details](#model-details) - [Contact](#contact) ## Introduction The T5-Small Medical Summary Generator is built using the Hugging Face Transformers library and is designed to generate medical summaries from input text. This README provides information on how to use the model, details about the architecture, and where to find downloads. ## Usage To use the model for medical summary generation, follow these steps: Install the required dependencies: - pip install transformers - pip install torch - pip install datasets - pip install sentencepiece ## Model-details Model Name: T5-Small Medical Summary Generator Task: Medical Summary Generation Architecture: T5-Small Training Data: Details about the medical dataset used for training Training Duration: Number of training steps, training time, etc. ## Contact For any inquiries or support related to this model, feel free to contact: Name : Mahalingam Balasubramanian Email : [email protected]
[ "SUMMARIZATION" ]
[ "MEDICAL DATA" ]
starsy/gte-Qwen2-7B-instruct
starsy
sentence-similarity
[ "sentence-transformers", "safetensors", "qwen2", "text-generation", "mteb", "transformers", "Qwen2", "sentence-similarity", "custom_code", "arxiv:2308.03281", "license:apache-2.0", "model-index", "autotrain_compatible", "text-generation-inference", "text-embeddings-inference", "endpoints_compatible", "region:us" ]
2024-12-20T08:30:04
2025-03-05T16:45:50
119
0
--- license: apache-2.0 tags: - mteb - sentence-transformers - transformers - Qwen2 - sentence-similarity model-index: - name: gte-qwen2-7B-instruct results: - task: type: Classification dataset: name: MTEB AmazonCounterfactualClassification (en) type: mteb/amazon_counterfactual config: en split: test revision: e8379541af4e31359cca9fbcf4b00f2671dba205 metrics: - type: accuracy value: 91.31343283582089 - type: ap value: 67.64251402604096 - type: f1 value: 87.53372530755692 - task: type: Classification dataset: name: MTEB AmazonPolarityClassification type: mteb/amazon_polarity config: default split: test revision: e2d317d38cd51312af73b3d32a06d1a08b442046 metrics: - type: accuracy value: 97.497825 - type: ap value: 96.30329547047529 - type: f1 value: 97.49769793778039 - task: type: Classification dataset: name: MTEB AmazonReviewsClassification (en) type: mteb/amazon_reviews_multi config: en split: test revision: 1399c76144fd37290681b995c656ef9b2e06e26d metrics: - type: accuracy value: 62.564 - type: f1 value: 60.975777935041066 - task: type: Retrieval dataset: name: MTEB ArguAna type: mteb/arguana config: default split: test revision: c22ab2a51041ffd869aaddef7af8d8215647e41a metrics: - type: map_at_1 value: 36.486000000000004 - type: map_at_10 value: 54.842 - type: map_at_100 value: 55.206999999999994 - type: map_at_1000 value: 55.206999999999994 - type: map_at_3 value: 49.893 - type: map_at_5 value: 53.105000000000004 - type: mrr_at_1 value: 37.34 - type: mrr_at_10 value: 55.143 - type: mrr_at_100 value: 55.509 - type: mrr_at_1000 value: 55.509 - type: mrr_at_3 value: 50.212999999999994 - type: mrr_at_5 value: 53.432 - type: ndcg_at_1 value: 36.486000000000004 - type: ndcg_at_10 value: 64.273 - type: ndcg_at_100 value: 65.66199999999999 - type: ndcg_at_1000 value: 65.66199999999999 - type: ndcg_at_3 value: 54.352999999999994 - type: ndcg_at_5 value: 60.131 - type: precision_at_1 value: 36.486000000000004 - type: precision_at_10 value: 9.395000000000001 - type: precision_at_100 value: 0.996 - type: precision_at_1000 value: 0.1 - type: precision_at_3 value: 22.428 - type: precision_at_5 value: 16.259 - type: recall_at_1 value: 36.486000000000004 - type: recall_at_10 value: 93.95400000000001 - type: recall_at_100 value: 99.644 - type: recall_at_1000 value: 99.644 - type: recall_at_3 value: 67.283 - type: recall_at_5 value: 81.294 - task: type: Clustering dataset: name: MTEB ArxivClusteringP2P type: mteb/arxiv-clustering-p2p config: default split: test revision: a122ad7f3f0291bf49cc6f4d32aa80929df69d5d metrics: - type: v_measure value: 56.461169803700564 - task: type: Clustering dataset: name: MTEB ArxivClusteringS2S type: mteb/arxiv-clustering-s2s config: default split: test revision: f910caf1a6075f7329cdf8c1a6135696f37dbd53 metrics: - type: v_measure value: 51.73600434466286 - task: type: Reranking dataset: name: MTEB AskUbuntuDupQuestions type: mteb/askubuntudupquestions-reranking config: default split: test revision: 2000358ca161889fa9c082cb41daa8dcfb161a54 metrics: - type: map value: 67.57827065898053 - type: mrr value: 79.08136569493911 - task: type: STS dataset: name: MTEB BIOSSES type: mteb/biosses-sts config: default split: test revision: d3fb88f8f02e40887cd149695127462bbcf29b4a metrics: - type: cos_sim_pearson value: 83.53324575999243 - type: cos_sim_spearman value: 81.37173362822374 - type: euclidean_pearson value: 82.19243335103444 - type: euclidean_spearman value: 81.33679307304334 - type: manhattan_pearson value: 82.38752665975699 - type: manhattan_spearman value: 81.31510583189689 - task: type: Classification dataset: name: MTEB Banking77Classification type: mteb/banking77 config: default split: test revision: 0fd18e25b25c072e09e0d92ab615fda904d66300 metrics: - type: accuracy value: 87.56818181818181 - type: f1 value: 87.25826722019875 - task: type: Clustering dataset: name: MTEB BiorxivClusteringP2P type: mteb/biorxiv-clustering-p2p config: default split: test revision: 65b79d1d13f80053f67aca9498d9402c2d9f1f40 metrics: - type: v_measure value: 50.09239610327673 - task: type: Clustering dataset: name: MTEB BiorxivClusteringS2S type: mteb/biorxiv-clustering-s2s config: default split: test revision: 258694dd0231531bc1fd9de6ceb52a0853c6d908 metrics: - type: v_measure value: 46.64733054606282 - task: type: Retrieval dataset: name: MTEB CQADupstackAndroidRetrieval type: BeIR/cqadupstack config: default split: test revision: f46a197baaae43b4f621051089b82a364682dfeb metrics: - type: map_at_1 value: 33.997 - type: map_at_10 value: 48.176 - type: map_at_100 value: 49.82 - type: map_at_1000 value: 49.924 - type: map_at_3 value: 43.626 - type: map_at_5 value: 46.275 - type: mrr_at_1 value: 42.059999999999995 - type: mrr_at_10 value: 53.726 - type: mrr_at_100 value: 54.398 - type: mrr_at_1000 value: 54.416 - type: mrr_at_3 value: 50.714999999999996 - type: mrr_at_5 value: 52.639 - type: ndcg_at_1 value: 42.059999999999995 - type: ndcg_at_10 value: 55.574999999999996 - type: ndcg_at_100 value: 60.744 - type: ndcg_at_1000 value: 61.85699999999999 - type: ndcg_at_3 value: 49.363 - type: ndcg_at_5 value: 52.44 - type: precision_at_1 value: 42.059999999999995 - type: precision_at_10 value: 11.101999999999999 - type: precision_at_100 value: 1.73 - type: precision_at_1000 value: 0.218 - type: precision_at_3 value: 24.464 - type: precision_at_5 value: 18.026 - type: recall_at_1 value: 33.997 - type: recall_at_10 value: 70.35900000000001 - type: recall_at_100 value: 91.642 - type: recall_at_1000 value: 97.977 - type: recall_at_3 value: 52.76 - type: recall_at_5 value: 61.148 - task: type: Retrieval dataset: name: MTEB CQADupstackEnglishRetrieval type: BeIR/cqadupstack config: default split: test revision: ad9991cb51e31e31e430383c75ffb2885547b5f0 metrics: - type: map_at_1 value: 35.884 - type: map_at_10 value: 48.14 - type: map_at_100 value: 49.5 - type: map_at_1000 value: 49.63 - type: map_at_3 value: 44.646 - type: map_at_5 value: 46.617999999999995 - type: mrr_at_1 value: 44.458999999999996 - type: mrr_at_10 value: 53.751000000000005 - type: mrr_at_100 value: 54.37800000000001 - type: mrr_at_1000 value: 54.415 - type: mrr_at_3 value: 51.815 - type: mrr_at_5 value: 52.882 - type: ndcg_at_1 value: 44.458999999999996 - type: ndcg_at_10 value: 54.157 - type: ndcg_at_100 value: 58.362 - type: ndcg_at_1000 value: 60.178 - type: ndcg_at_3 value: 49.661 - type: ndcg_at_5 value: 51.74999999999999 - type: precision_at_1 value: 44.458999999999996 - type: precision_at_10 value: 10.248 - type: precision_at_100 value: 1.5890000000000002 - type: precision_at_1000 value: 0.207 - type: precision_at_3 value: 23.928 - type: precision_at_5 value: 16.878999999999998 - type: recall_at_1 value: 35.884 - type: recall_at_10 value: 64.798 - type: recall_at_100 value: 82.345 - type: recall_at_1000 value: 93.267 - type: recall_at_3 value: 51.847 - type: recall_at_5 value: 57.601 - task: type: Retrieval dataset: name: MTEB CQADupstackGamingRetrieval type: BeIR/cqadupstack config: default split: test revision: 4885aa143210c98657558c04aaf3dc47cfb54340 metrics: - type: map_at_1 value: 39.383 - type: map_at_10 value: 53.714 - type: map_at_100 value: 54.838 - type: map_at_1000 value: 54.87800000000001 - type: map_at_3 value: 50.114999999999995 - type: map_at_5 value: 52.153000000000006 - type: mrr_at_1 value: 45.016 - type: mrr_at_10 value: 56.732000000000006 - type: mrr_at_100 value: 57.411 - type: mrr_at_1000 value: 57.431 - type: mrr_at_3 value: 54.044000000000004 - type: mrr_at_5 value: 55.639 - type: ndcg_at_1 value: 45.016 - type: ndcg_at_10 value: 60.228 - type: ndcg_at_100 value: 64.277 - type: ndcg_at_1000 value: 65.07 - type: ndcg_at_3 value: 54.124 - type: ndcg_at_5 value: 57.147000000000006 - type: precision_at_1 value: 45.016 - type: precision_at_10 value: 9.937 - type: precision_at_100 value: 1.288 - type: precision_at_1000 value: 0.13899999999999998 - type: precision_at_3 value: 24.471999999999998 - type: precision_at_5 value: 16.991 - type: recall_at_1 value: 39.383 - type: recall_at_10 value: 76.175 - type: recall_at_100 value: 93.02 - type: recall_at_1000 value: 98.60900000000001 - type: recall_at_3 value: 60.265 - type: recall_at_5 value: 67.46600000000001 - task: type: Retrieval dataset: name: MTEB CQADupstackGisRetrieval type: BeIR/cqadupstack config: default split: test revision: 5003b3064772da1887988e05400cf3806fe491f2 metrics: - type: map_at_1 value: 27.426000000000002 - type: map_at_10 value: 37.397000000000006 - type: map_at_100 value: 38.61 - type: map_at_1000 value: 38.678000000000004 - type: map_at_3 value: 34.150999999999996 - type: map_at_5 value: 36.137 - type: mrr_at_1 value: 29.944 - type: mrr_at_10 value: 39.654 - type: mrr_at_100 value: 40.638000000000005 - type: mrr_at_1000 value: 40.691 - type: mrr_at_3 value: 36.817 - type: mrr_at_5 value: 38.524 - type: ndcg_at_1 value: 29.944 - type: ndcg_at_10 value: 43.094 - type: ndcg_at_100 value: 48.789 - type: ndcg_at_1000 value: 50.339999999999996 - type: ndcg_at_3 value: 36.984 - type: ndcg_at_5 value: 40.248 - type: precision_at_1 value: 29.944 - type: precision_at_10 value: 6.78 - type: precision_at_100 value: 1.024 - type: precision_at_1000 value: 0.11800000000000001 - type: precision_at_3 value: 15.895000000000001 - type: precision_at_5 value: 11.39 - type: recall_at_1 value: 27.426000000000002 - type: recall_at_10 value: 58.464000000000006 - type: recall_at_100 value: 84.193 - type: recall_at_1000 value: 95.52000000000001 - type: recall_at_3 value: 42.172 - type: recall_at_5 value: 50.101 - task: type: Retrieval dataset: name: MTEB CQADupstackMathematicaRetrieval type: BeIR/cqadupstack config: default split: test revision: 90fceea13679c63fe563ded68f3b6f06e50061de metrics: - type: map_at_1 value: 19.721 - type: map_at_10 value: 31.604 - type: map_at_100 value: 32.972 - type: map_at_1000 value: 33.077 - type: map_at_3 value: 27.218999999999998 - type: map_at_5 value: 29.53 - type: mrr_at_1 value: 25.0 - type: mrr_at_10 value: 35.843 - type: mrr_at_100 value: 36.785000000000004 - type: mrr_at_1000 value: 36.842000000000006 - type: mrr_at_3 value: 32.193 - type: mrr_at_5 value: 34.264 - type: ndcg_at_1 value: 25.0 - type: ndcg_at_10 value: 38.606 - type: ndcg_at_100 value: 44.272 - type: ndcg_at_1000 value: 46.527 - type: ndcg_at_3 value: 30.985000000000003 - type: ndcg_at_5 value: 34.43 - type: precision_at_1 value: 25.0 - type: precision_at_10 value: 7.811 - type: precision_at_100 value: 1.203 - type: precision_at_1000 value: 0.15 - type: precision_at_3 value: 15.423 - type: precision_at_5 value: 11.791 - type: recall_at_1 value: 19.721 - type: recall_at_10 value: 55.625 - type: recall_at_100 value: 79.34400000000001 - type: recall_at_1000 value: 95.208 - type: recall_at_3 value: 35.19 - type: recall_at_5 value: 43.626 - task: type: Retrieval dataset: name: MTEB CQADupstackPhysicsRetrieval type: BeIR/cqadupstack config: default split: test revision: 79531abbd1fb92d06c6d6315a0cbbbf5bb247ea4 metrics: - type: map_at_1 value: 33.784 - type: map_at_10 value: 47.522 - type: map_at_100 value: 48.949999999999996 - type: map_at_1000 value: 49.038 - type: map_at_3 value: 43.284 - type: map_at_5 value: 45.629 - type: mrr_at_1 value: 41.482 - type: mrr_at_10 value: 52.830999999999996 - type: mrr_at_100 value: 53.559999999999995 - type: mrr_at_1000 value: 53.588 - type: mrr_at_3 value: 50.016000000000005 - type: mrr_at_5 value: 51.614000000000004 - type: ndcg_at_1 value: 41.482 - type: ndcg_at_10 value: 54.569 - type: ndcg_at_100 value: 59.675999999999995 - type: ndcg_at_1000 value: 60.989000000000004 - type: ndcg_at_3 value: 48.187000000000005 - type: ndcg_at_5 value: 51.183 - type: precision_at_1 value: 41.482 - type: precision_at_10 value: 10.221 - type: precision_at_100 value: 1.486 - type: precision_at_1000 value: 0.17500000000000002 - type: precision_at_3 value: 23.548 - type: precision_at_5 value: 16.805 - type: recall_at_1 value: 33.784 - type: recall_at_10 value: 69.798 - type: recall_at_100 value: 90.098 - type: recall_at_1000 value: 98.176 - type: recall_at_3 value: 52.127 - type: recall_at_5 value: 59.861 - task: type: Retrieval dataset: name: MTEB CQADupstackProgrammersRetrieval type: BeIR/cqadupstack config: default split: test revision: 6184bc1440d2dbc7612be22b50686b8826d22b32 metrics: - type: map_at_1 value: 28.038999999999998 - type: map_at_10 value: 41.904 - type: map_at_100 value: 43.36 - type: map_at_1000 value: 43.453 - type: map_at_3 value: 37.785999999999994 - type: map_at_5 value: 40.105000000000004 - type: mrr_at_1 value: 35.046 - type: mrr_at_10 value: 46.926 - type: mrr_at_100 value: 47.815000000000005 - type: mrr_at_1000 value: 47.849000000000004 - type: mrr_at_3 value: 44.273 - type: mrr_at_5 value: 45.774 - type: ndcg_at_1 value: 35.046 - type: ndcg_at_10 value: 48.937000000000005 - type: ndcg_at_100 value: 54.544000000000004 - type: ndcg_at_1000 value: 56.069 - type: ndcg_at_3 value: 42.858000000000004 - type: ndcg_at_5 value: 45.644 - type: precision_at_1 value: 35.046 - type: precision_at_10 value: 9.452 - type: precision_at_100 value: 1.429 - type: precision_at_1000 value: 0.173 - type: precision_at_3 value: 21.346999999999998 - type: precision_at_5 value: 15.342 - type: recall_at_1 value: 28.038999999999998 - type: recall_at_10 value: 64.59700000000001 - type: recall_at_100 value: 87.735 - type: recall_at_1000 value: 97.41300000000001 - type: recall_at_3 value: 47.368 - type: recall_at_5 value: 54.93900000000001 - task: type: Retrieval dataset: name: MTEB CQADupstackRetrieval type: BeIR/cqadupstack config: default split: test revision: 4ffe81d471b1924886b33c7567bfb200e9eec5c4 metrics: - type: map_at_1 value: 28.17291666666667 - type: map_at_10 value: 40.025749999999995 - type: map_at_100 value: 41.39208333333333 - type: map_at_1000 value: 41.499249999999996 - type: map_at_3 value: 36.347 - type: map_at_5 value: 38.41391666666667 - type: mrr_at_1 value: 33.65925 - type: mrr_at_10 value: 44.085499999999996 - type: mrr_at_100 value: 44.94116666666667 - type: mrr_at_1000 value: 44.9855 - type: mrr_at_3 value: 41.2815 - type: mrr_at_5 value: 42.91491666666666 - type: ndcg_at_1 value: 33.65925 - type: ndcg_at_10 value: 46.430833333333325 - type: ndcg_at_100 value: 51.761 - type: ndcg_at_1000 value: 53.50899999999999 - type: ndcg_at_3 value: 40.45133333333333 - type: ndcg_at_5 value: 43.31483333333334 - type: precision_at_1 value: 33.65925 - type: precision_at_10 value: 8.4995 - type: precision_at_100 value: 1.3210000000000004 - type: precision_at_1000 value: 0.16591666666666666 - type: precision_at_3 value: 19.165083333333335 - type: precision_at_5 value: 13.81816666666667 - type: recall_at_1 value: 28.17291666666667 - type: recall_at_10 value: 61.12624999999999 - type: recall_at_100 value: 83.97266666666667 - type: recall_at_1000 value: 95.66550000000001 - type: recall_at_3 value: 44.661249999999995 - type: recall_at_5 value: 51.983333333333334 - type: map_at_1 value: 17.936 - type: map_at_10 value: 27.399 - type: map_at_100 value: 28.632 - type: map_at_1000 value: 28.738000000000003 - type: map_at_3 value: 24.456 - type: map_at_5 value: 26.06 - type: mrr_at_1 value: 19.224 - type: mrr_at_10 value: 28.998 - type: mrr_at_100 value: 30.11 - type: mrr_at_1000 value: 30.177 - type: mrr_at_3 value: 26.247999999999998 - type: mrr_at_5 value: 27.708 - type: ndcg_at_1 value: 19.224 - type: ndcg_at_10 value: 32.911 - type: ndcg_at_100 value: 38.873999999999995 - type: ndcg_at_1000 value: 41.277 - type: ndcg_at_3 value: 27.142 - type: ndcg_at_5 value: 29.755 - type: precision_at_1 value: 19.224 - type: precision_at_10 value: 5.6930000000000005 - type: precision_at_100 value: 0.9259999999999999 - type: precision_at_1000 value: 0.126 - type: precision_at_3 value: 12.138 - type: precision_at_5 value: 8.909 - type: recall_at_1 value: 17.936 - type: recall_at_10 value: 48.096 - type: recall_at_100 value: 75.389 - type: recall_at_1000 value: 92.803 - type: recall_at_3 value: 32.812999999999995 - type: recall_at_5 value: 38.851 - task: type: Retrieval dataset: name: MTEB CQADupstackStatsRetrieval type: BeIR/cqadupstack config: default split: test revision: 65ac3a16b8e91f9cee4c9828cc7c335575432a2a metrics: - type: map_at_1 value: 24.681 - type: map_at_10 value: 34.892 - type: map_at_100 value: 35.996 - type: map_at_1000 value: 36.083 - type: map_at_3 value: 31.491999999999997 - type: map_at_5 value: 33.632 - type: mrr_at_1 value: 28.528 - type: mrr_at_10 value: 37.694 - type: mrr_at_100 value: 38.613 - type: mrr_at_1000 value: 38.668 - type: mrr_at_3 value: 34.714 - type: mrr_at_5 value: 36.616 - type: ndcg_at_1 value: 28.528 - type: ndcg_at_10 value: 40.703 - type: ndcg_at_100 value: 45.993 - type: ndcg_at_1000 value: 47.847 - type: ndcg_at_3 value: 34.622 - type: ndcg_at_5 value: 38.035999999999994 - type: precision_at_1 value: 28.528 - type: precision_at_10 value: 6.902 - type: precision_at_100 value: 1.0370000000000001 - type: precision_at_1000 value: 0.126 - type: precision_at_3 value: 15.798000000000002 - type: precision_at_5 value: 11.655999999999999 - type: recall_at_1 value: 24.681 - type: recall_at_10 value: 55.81 - type: recall_at_100 value: 79.785 - type: recall_at_1000 value: 92.959 - type: recall_at_3 value: 39.074 - type: recall_at_5 value: 47.568 - task: type: Retrieval dataset: name: MTEB CQADupstackTexRetrieval type: BeIR/cqadupstack config: default split: test revision: 46989137a86843e03a6195de44b09deda022eec7 metrics: - type: map_at_1 value: 18.627 - type: map_at_10 value: 27.872000000000003 - type: map_at_100 value: 29.237999999999996 - type: map_at_1000 value: 29.363 - type: map_at_3 value: 24.751 - type: map_at_5 value: 26.521 - type: mrr_at_1 value: 23.021 - type: mrr_at_10 value: 31.924000000000003 - type: mrr_at_100 value: 32.922000000000004 - type: mrr_at_1000 value: 32.988 - type: mrr_at_3 value: 29.192 - type: mrr_at_5 value: 30.798 - type: ndcg_at_1 value: 23.021 - type: ndcg_at_10 value: 33.535 - type: ndcg_at_100 value: 39.732 - type: ndcg_at_1000 value: 42.201 - type: ndcg_at_3 value: 28.153 - type: ndcg_at_5 value: 30.746000000000002 - type: precision_at_1 value: 23.021 - type: precision_at_10 value: 6.459 - type: precision_at_100 value: 1.1320000000000001 - type: precision_at_1000 value: 0.153 - type: precision_at_3 value: 13.719000000000001 - type: precision_at_5 value: 10.193000000000001 - type: recall_at_1 value: 18.627 - type: recall_at_10 value: 46.463 - type: recall_at_100 value: 74.226 - type: recall_at_1000 value: 91.28500000000001 - type: recall_at_3 value: 31.357000000000003 - type: recall_at_5 value: 38.067 - task: type: Retrieval dataset: name: MTEB CQADupstackUnixRetrieval type: BeIR/cqadupstack config: default split: test revision: 6c6430d3a6d36f8d2a829195bc5dc94d7e063e53 metrics: - type: map_at_1 value: 31.457 - type: map_at_10 value: 42.888 - type: map_at_100 value: 44.24 - type: map_at_1000 value: 44.327 - type: map_at_3 value: 39.588 - type: map_at_5 value: 41.423 - type: mrr_at_1 value: 37.126999999999995 - type: mrr_at_10 value: 47.083000000000006 - type: mrr_at_100 value: 47.997 - type: mrr_at_1000 value: 48.044 - type: mrr_at_3 value: 44.574000000000005 - type: mrr_at_5 value: 46.202 - type: ndcg_at_1 value: 37.126999999999995 - type: ndcg_at_10 value: 48.833 - type: ndcg_at_100 value: 54.327000000000005 - type: ndcg_at_1000 value: 56.011 - type: ndcg_at_3 value: 43.541999999999994 - type: ndcg_at_5 value: 46.127 - type: precision_at_1 value: 37.126999999999995 - type: precision_at_10 value: 8.376999999999999 - type: precision_at_100 value: 1.2309999999999999 - type: precision_at_1000 value: 0.146 - type: precision_at_3 value: 20.211000000000002 - type: precision_at_5 value: 14.16 - type: recall_at_1 value: 31.457 - type: recall_at_10 value: 62.369 - type: recall_at_100 value: 85.444 - type: recall_at_1000 value: 96.65599999999999 - type: recall_at_3 value: 47.961 - type: recall_at_5 value: 54.676 - task: type: Retrieval dataset: name: MTEB CQADupstackWebmastersRetrieval type: BeIR/cqadupstack config: default split: test revision: 160c094312a0e1facb97e55eeddb698c0abe3571 metrics: - type: map_at_1 value: 27.139999999999997 - type: map_at_10 value: 38.801 - type: map_at_100 value: 40.549 - type: map_at_1000 value: 40.802 - type: map_at_3 value: 35.05 - type: map_at_5 value: 36.884 - type: mrr_at_1 value: 33.004 - type: mrr_at_10 value: 43.864 - type: mrr_at_100 value: 44.667 - type: mrr_at_1000 value: 44.717 - type: mrr_at_3 value: 40.777 - type: mrr_at_5 value: 42.319 - type: ndcg_at_1 value: 33.004 - type: ndcg_at_10 value: 46.022 - type: ndcg_at_100 value: 51.542 - type: ndcg_at_1000 value: 53.742000000000004 - type: ndcg_at_3 value: 39.795 - type: ndcg_at_5 value: 42.272 - type: precision_at_1 value: 33.004 - type: precision_at_10 value: 9.012 - type: precision_at_100 value: 1.7770000000000001 - type: precision_at_1000 value: 0.26 - type: precision_at_3 value: 19.038 - type: precision_at_5 value: 13.675999999999998 - type: recall_at_1 value: 27.139999999999997 - type: recall_at_10 value: 60.961 - type: recall_at_100 value: 84.451 - type: recall_at_1000 value: 98.113 - type: recall_at_3 value: 43.001 - type: recall_at_5 value: 49.896 - task: type: Retrieval dataset: name: MTEB ClimateFEVER type: mteb/climate-fever config: default split: test revision: 47f2ac6acb640fc46020b02a5b59fdda04d39380 metrics: - type: map_at_1 value: 22.076999999999998 - type: map_at_10 value: 35.44 - type: map_at_100 value: 37.651 - type: map_at_1000 value: 37.824999999999996 - type: map_at_3 value: 30.764999999999997 - type: map_at_5 value: 33.26 - type: mrr_at_1 value: 50.163000000000004 - type: mrr_at_10 value: 61.207 - type: mrr_at_100 value: 61.675000000000004 - type: mrr_at_1000 value: 61.692 - type: mrr_at_3 value: 58.60999999999999 - type: mrr_at_5 value: 60.307 - type: ndcg_at_1 value: 50.163000000000004 - type: ndcg_at_10 value: 45.882 - type: ndcg_at_100 value: 53.239999999999995 - type: ndcg_at_1000 value: 55.852000000000004 - type: ndcg_at_3 value: 40.514 - type: ndcg_at_5 value: 42.038 - type: precision_at_1 value: 50.163000000000004 - type: precision_at_10 value: 13.466000000000001 - type: precision_at_100 value: 2.164 - type: precision_at_1000 value: 0.266 - type: precision_at_3 value: 29.707 - type: precision_at_5 value: 21.694 - type: recall_at_1 value: 22.076999999999998 - type: recall_at_10 value: 50.193 - type: recall_at_100 value: 74.993 - type: recall_at_1000 value: 89.131 - type: recall_at_3 value: 35.472 - type: recall_at_5 value: 41.814 - task: type: Retrieval dataset: name: MTEB DBPedia type: mteb/dbpedia config: default split: test revision: c0f706b76e590d620bd6618b3ca8efdd34e2d659 metrics: - type: map_at_1 value: 9.953 - type: map_at_10 value: 24.515 - type: map_at_100 value: 36.173 - type: map_at_1000 value: 38.351 - type: map_at_3 value: 16.592000000000002 - type: map_at_5 value: 20.036 - type: mrr_at_1 value: 74.25 - type: mrr_at_10 value: 81.813 - type: mrr_at_100 value: 82.006 - type: mrr_at_1000 value: 82.011 - type: mrr_at_3 value: 80.875 - type: mrr_at_5 value: 81.362 - type: ndcg_at_1 value: 62.5 - type: ndcg_at_10 value: 52.42 - type: ndcg_at_100 value: 56.808 - type: ndcg_at_1000 value: 63.532999999999994 - type: ndcg_at_3 value: 56.654 - type: ndcg_at_5 value: 54.18300000000001 - type: precision_at_1 value: 74.25 - type: precision_at_10 value: 42.699999999999996 - type: precision_at_100 value: 13.675 - type: precision_at_1000 value: 2.664 - type: precision_at_3 value: 60.5 - type: precision_at_5 value: 52.800000000000004 - type: recall_at_1 value: 9.953 - type: recall_at_10 value: 30.253999999999998 - type: recall_at_100 value: 62.516000000000005 - type: recall_at_1000 value: 84.163 - type: recall_at_3 value: 18.13 - type: recall_at_5 value: 22.771 - task: type: Classification dataset: name: MTEB EmotionClassification type: mteb/emotion config: default split: test revision: 4f58c6b202a23cf9a4da393831edf4f9183cad37 metrics: - type: accuracy value: 79.455 - type: f1 value: 74.16798697647569 - task: type: Retrieval dataset: name: MTEB FEVER type: mteb/fever config: default split: test revision: bea83ef9e8fb933d90a2f1d5515737465d613e12 metrics: - type: map_at_1 value: 87.531 - type: map_at_10 value: 93.16799999999999 - type: map_at_100 value: 93.341 - type: map_at_1000 value: 93.349 - type: map_at_3 value: 92.444 - type: map_at_5 value: 92.865 - type: mrr_at_1 value: 94.014 - type: mrr_at_10 value: 96.761 - type: mrr_at_100 value: 96.762 - type: mrr_at_1000 value: 96.762 - type: mrr_at_3 value: 96.672 - type: mrr_at_5 value: 96.736 - type: ndcg_at_1 value: 94.014 - type: ndcg_at_10 value: 95.112 - type: ndcg_at_100 value: 95.578 - type: ndcg_at_1000 value: 95.68900000000001 - type: ndcg_at_3 value: 94.392 - type: ndcg_at_5 value: 94.72500000000001 - type: precision_at_1 value: 94.014 - type: precision_at_10 value: 11.065 - type: precision_at_100 value: 1.157 - type: precision_at_1000 value: 0.11800000000000001 - type: precision_at_3 value: 35.259 - type: precision_at_5 value: 21.599 - type: recall_at_1 value: 87.531 - type: recall_at_10 value: 97.356 - type: recall_at_100 value: 98.965 - type: recall_at_1000 value: 99.607 - type: recall_at_3 value: 95.312 - type: recall_at_5 value: 96.295 - task: type: Retrieval dataset: name: MTEB FiQA2018 type: mteb/fiqa config: default split: test revision: 27a168819829fe9bcd655c2df245fb19452e8e06 metrics: - type: map_at_1 value: 32.055 - type: map_at_10 value: 53.114 - type: map_at_100 value: 55.235 - type: map_at_1000 value: 55.345 - type: map_at_3 value: 45.854 - type: map_at_5 value: 50.025 - type: mrr_at_1 value: 60.34 - type: mrr_at_10 value: 68.804 - type: mrr_at_100 value: 69.309 - type: mrr_at_1000 value: 69.32199999999999 - type: mrr_at_3 value: 66.40899999999999 - type: mrr_at_5 value: 67.976 - type: ndcg_at_1 value: 60.34 - type: ndcg_at_10 value: 62.031000000000006 - type: ndcg_at_100 value: 68.00500000000001 - type: ndcg_at_1000 value: 69.286 - type: ndcg_at_3 value: 56.355999999999995 - type: ndcg_at_5 value: 58.687 - type: precision_at_1 value: 60.34 - type: precision_at_10 value: 17.176 - type: precision_at_100 value: 2.36 - type: precision_at_1000 value: 0.259 - type: precision_at_3 value: 37.14 - type: precision_at_5 value: 27.809 - type: recall_at_1 value: 32.055 - type: recall_at_10 value: 70.91 - type: recall_at_100 value: 91.83 - type: recall_at_1000 value: 98.871 - type: recall_at_3 value: 51.202999999999996 - type: recall_at_5 value: 60.563 - task: type: Retrieval dataset: name: MTEB HotpotQA type: mteb/hotpotqa config: default split: test revision: ab518f4d6fcca38d87c25209f94beba119d02014 metrics: - type: map_at_1 value: 43.68 - type: map_at_10 value: 64.389 - type: map_at_100 value: 65.24 - type: map_at_1000 value: 65.303 - type: map_at_3 value: 61.309000000000005 - type: map_at_5 value: 63.275999999999996 - type: mrr_at_1 value: 87.36 - type: mrr_at_10 value: 91.12 - type: mrr_at_100 value: 91.227 - type: mrr_at_1000 value: 91.229 - type: mrr_at_3 value: 90.57600000000001 - type: mrr_at_5 value: 90.912 - type: ndcg_at_1 value: 87.36 - type: ndcg_at_10 value: 73.076 - type: ndcg_at_100 value: 75.895 - type: ndcg_at_1000 value: 77.049 - type: ndcg_at_3 value: 68.929 - type: ndcg_at_5 value: 71.28 - type: precision_at_1 value: 87.36 - type: precision_at_10 value: 14.741000000000001 - type: precision_at_100 value: 1.694 - type: precision_at_1000 value: 0.185 - type: precision_at_3 value: 43.043 - type: precision_at_5 value: 27.681 - type: recall_at_1 value: 43.68 - type: recall_at_10 value: 73.707 - type: recall_at_100 value: 84.7 - type: recall_at_1000 value: 92.309 - type: recall_at_3 value: 64.564 - type: recall_at_5 value: 69.203 - task: type: Classification dataset: name: MTEB ImdbClassification type: mteb/imdb config: default split: test revision: 3d86128a09e091d6018b6d26cad27f2739fc2db7 metrics: - type: accuracy value: 96.75399999999999 - type: ap value: 95.29389839242187 - type: f1 value: 96.75348377433475 - task: type: Retrieval dataset: name: MTEB MSMARCO type: mteb/msmarco config: default split: dev revision: c5a29a104738b98a9e76336939199e264163d4a0 metrics: - type: map_at_1 value: 25.176 - type: map_at_10 value: 38.598 - type: map_at_100 value: 39.707 - type: map_at_1000 value: 39.744 - type: map_at_3 value: 34.566 - type: map_at_5 value: 36.863 - type: mrr_at_1 value: 25.874000000000002 - type: mrr_at_10 value: 39.214 - type: mrr_at_100 value: 40.251 - type: mrr_at_1000 value: 40.281 - type: mrr_at_3 value: 35.291 - type: mrr_at_5 value: 37.545 - type: ndcg_at_1 value: 25.874000000000002 - type: ndcg_at_10 value: 45.98 - type: ndcg_at_100 value: 51.197 - type: ndcg_at_1000 value: 52.073 - type: ndcg_at_3 value: 37.785999999999994 - type: ndcg_at_5 value: 41.870000000000005 - type: precision_at_1 value: 25.874000000000002 - type: precision_at_10 value: 7.181 - type: precision_at_100 value: 0.979 - type: precision_at_1000 value: 0.106 - type: precision_at_3 value: 16.051000000000002 - type: precision_at_5 value: 11.713 - type: recall_at_1 value: 25.176 - type: recall_at_10 value: 68.67699999999999 - type: recall_at_100 value: 92.55 - type: recall_at_1000 value: 99.164 - type: recall_at_3 value: 46.372 - type: recall_at_5 value: 56.16 - task: type: Classification dataset: name: MTEB MTOPDomainClassification (en) type: mteb/mtop_domain config: en split: test revision: d80d48c1eb48d3562165c59d59d0034df9fff0bf metrics: - type: accuracy value: 99.03784769721841 - type: f1 value: 98.97791641821495 - task: type: Classification dataset: name: MTEB MTOPIntentClassification (en) type: mteb/mtop_intent config: en split: test revision: ae001d0e6b1228650b7bd1c2c65fb50ad11a8aba metrics: - type: accuracy value: 91.88326493388054 - type: f1 value: 73.74809928034335 - task: type: Classification dataset: name: MTEB MassiveIntentClassification (en) type: mteb/amazon_massive_intent config: en split: test revision: 31efe3c427b0bae9c22cbb560b8f15491cc6bed7 metrics: - type: accuracy value: 85.41358439811701 - type: f1 value: 83.503679460639 - task: type: Classification dataset: name: MTEB MassiveScenarioClassification (en) type: mteb/amazon_massive_scenario config: en split: test revision: 7d571f92784cd94a019292a1f45445077d0ef634 metrics: - type: accuracy value: 89.77135171486215 - type: f1 value: 88.89843747468366 - task: type: Clustering dataset: name: MTEB MedrxivClusteringP2P type: mteb/medrxiv-clustering-p2p config: default split: test revision: e7a26af6f3ae46b30dde8737f02c07b1505bcc73 metrics: - type: v_measure value: 46.22695362087359 - task: type: Clustering dataset: name: MTEB MedrxivClusteringS2S type: mteb/medrxiv-clustering-s2s config: default split: test revision: 35191c8c0dca72d8ff3efcd72aa802307d469663 metrics: - type: v_measure value: 44.132372165849425 - task: type: Reranking dataset: name: MTEB MindSmallReranking type: mteb/mind_small config: default split: test revision: 3bdac13927fdc888b903db93b2ffdbd90b295a69 metrics: - type: map value: 33.35680810650402 - type: mrr value: 34.72625715637218 - task: type: Retrieval dataset: name: MTEB NFCorpus type: mteb/nfcorpus config: default split: test revision: ec0fa4fe99da2ff19ca1214b7966684033a58814 metrics: - type: map_at_1 value: 7.165000000000001 - type: map_at_10 value: 15.424 - type: map_at_100 value: 20.28 - type: map_at_1000 value: 22.065 - type: map_at_3 value: 11.236 - type: map_at_5 value: 13.025999999999998 - type: mrr_at_1 value: 51.702999999999996 - type: mrr_at_10 value: 59.965 - type: mrr_at_100 value: 60.667 - type: mrr_at_1000 value: 60.702999999999996 - type: mrr_at_3 value: 58.772000000000006 - type: mrr_at_5 value: 59.267 - type: ndcg_at_1 value: 49.536 - type: ndcg_at_10 value: 40.6 - type: ndcg_at_100 value: 37.848 - type: ndcg_at_1000 value: 46.657 - type: ndcg_at_3 value: 46.117999999999995 - type: ndcg_at_5 value: 43.619 - type: precision_at_1 value: 51.393 - type: precision_at_10 value: 30.31 - type: precision_at_100 value: 9.972 - type: precision_at_1000 value: 2.329 - type: precision_at_3 value: 43.137 - type: precision_at_5 value: 37.585 - type: recall_at_1 value: 7.165000000000001 - type: recall_at_10 value: 19.689999999999998 - type: recall_at_100 value: 39.237 - type: recall_at_1000 value: 71.417 - type: recall_at_3 value: 12.247 - type: recall_at_5 value: 14.902999999999999 - task: type: Retrieval dataset: name: MTEB NQ type: mteb/nq config: default split: test revision: b774495ed302d8c44a3a7ea25c90dbce03968f31 metrics: - type: map_at_1 value: 42.653999999999996 - type: map_at_10 value: 59.611999999999995 - type: map_at_100 value: 60.32300000000001 - type: map_at_1000 value: 60.336 - type: map_at_3 value: 55.584999999999994 - type: map_at_5 value: 58.19 - type: mrr_at_1 value: 47.683 - type: mrr_at_10 value: 62.06700000000001 - type: mrr_at_100 value: 62.537 - type: mrr_at_1000 value: 62.544999999999995 - type: mrr_at_3 value: 59.178 - type: mrr_at_5 value: 61.034 - type: ndcg_at_1 value: 47.654 - type: ndcg_at_10 value: 67.001 - type: ndcg_at_100 value: 69.73899999999999 - type: ndcg_at_1000 value: 69.986 - type: ndcg_at_3 value: 59.95700000000001 - type: ndcg_at_5 value: 64.025 - type: precision_at_1 value: 47.654 - type: precision_at_10 value: 10.367999999999999 - type: precision_at_100 value: 1.192 - type: precision_at_1000 value: 0.121 - type: precision_at_3 value: 26.651000000000003 - type: precision_at_5 value: 18.459 - type: recall_at_1 value: 42.653999999999996 - type: recall_at_10 value: 86.619 - type: recall_at_100 value: 98.04899999999999 - type: recall_at_1000 value: 99.812 - type: recall_at_3 value: 68.987 - type: recall_at_5 value: 78.158 - task: type: Retrieval dataset: name: MTEB QuoraRetrieval type: mteb/quora config: default split: test revision: None metrics: - type: map_at_1 value: 72.538 - type: map_at_10 value: 86.702 - type: map_at_100 value: 87.31 - type: map_at_1000 value: 87.323 - type: map_at_3 value: 83.87 - type: map_at_5 value: 85.682 - type: mrr_at_1 value: 83.31 - type: mrr_at_10 value: 89.225 - type: mrr_at_100 value: 89.30399999999999 - type: mrr_at_1000 value: 89.30399999999999 - type: mrr_at_3 value: 88.44300000000001 - type: mrr_at_5 value: 89.005 - type: ndcg_at_1 value: 83.32000000000001 - type: ndcg_at_10 value: 90.095 - type: ndcg_at_100 value: 91.12 - type: ndcg_at_1000 value: 91.179 - type: ndcg_at_3 value: 87.606 - type: ndcg_at_5 value: 89.031 - type: precision_at_1 value: 83.32000000000001 - type: precision_at_10 value: 13.641 - type: precision_at_100 value: 1.541 - type: precision_at_1000 value: 0.157 - type: precision_at_3 value: 38.377 - type: precision_at_5 value: 25.162000000000003 - type: recall_at_1 value: 72.538 - type: recall_at_10 value: 96.47200000000001 - type: recall_at_100 value: 99.785 - type: recall_at_1000 value: 99.99900000000001 - type: recall_at_3 value: 89.278 - type: recall_at_5 value: 93.367 - task: type: Clustering dataset: name: MTEB RedditClustering type: mteb/reddit-clustering config: default split: test revision: 24640382cdbf8abc73003fb0fa6d111a705499eb metrics: - type: v_measure value: 73.55219145406065 - task: type: Clustering dataset: name: MTEB RedditClusteringP2P type: mteb/reddit-clustering-p2p config: default split: test revision: 282350215ef01743dc01b456c7f5241fa8937f16 metrics: - type: v_measure value: 74.13437105242755 - task: type: Retrieval dataset: name: MTEB SCIDOCS type: mteb/scidocs config: default split: test revision: None metrics: - type: map_at_1 value: 6.873 - type: map_at_10 value: 17.944 - type: map_at_100 value: 21.171 - type: map_at_1000 value: 21.528 - type: map_at_3 value: 12.415 - type: map_at_5 value: 15.187999999999999 - type: mrr_at_1 value: 33.800000000000004 - type: mrr_at_10 value: 46.455 - type: mrr_at_100 value: 47.378 - type: mrr_at_1000 value: 47.394999999999996 - type: mrr_at_3 value: 42.367 - type: mrr_at_5 value: 44.972 - type: ndcg_at_1 value: 33.800000000000004 - type: ndcg_at_10 value: 28.907 - type: ndcg_at_100 value: 39.695 - type: ndcg_at_1000 value: 44.582 - type: ndcg_at_3 value: 26.949 - type: ndcg_at_5 value: 23.988 - type: precision_at_1 value: 33.800000000000004 - type: precision_at_10 value: 15.079999999999998 - type: precision_at_100 value: 3.056 - type: precision_at_1000 value: 0.42100000000000004 - type: precision_at_3 value: 25.167 - type: precision_at_5 value: 21.26 - type: recall_at_1 value: 6.873 - type: recall_at_10 value: 30.568 - type: recall_at_100 value: 62.062 - type: recall_at_1000 value: 85.37700000000001 - type: recall_at_3 value: 15.312999999999999 - type: recall_at_5 value: 21.575 - task: type: STS dataset: name: MTEB SICK-R type: mteb/sickr-sts config: default split: test revision: a6ea5a8cab320b040a23452cc28066d9beae2cee metrics: - type: cos_sim_pearson value: 82.37009118256057 - type: cos_sim_spearman value: 79.27986395671529 - type: euclidean_pearson value: 79.18037715442115 - type: euclidean_spearman value: 79.28004791561621 - type: manhattan_pearson value: 79.34062972800541 - type: manhattan_spearman value: 79.43106695543402 - task: type: STS dataset: name: MTEB STS12 type: mteb/sts12-sts config: default split: test revision: a0d554a64d88156834ff5ae9920b964011b16384 metrics: - type: cos_sim_pearson value: 87.48474767383833 - type: cos_sim_spearman value: 79.54505388752513 - type: euclidean_pearson value: 83.43282704179565 - type: euclidean_spearman value: 79.54579919925405 - type: manhattan_pearson value: 83.77564492427952 - type: manhattan_spearman value: 79.84558396989286 - task: type: STS dataset: name: MTEB STS13 type: mteb/sts13-sts config: default split: test revision: 7e90230a92c190f1bf69ae9002b8cea547a64cca metrics: - type: cos_sim_pearson value: 88.803698035802 - type: cos_sim_spearman value: 88.83451367754881 - type: euclidean_pearson value: 88.28939285711628 - type: euclidean_spearman value: 88.83528996073112 - type: manhattan_pearson value: 88.28017412671795 - type: manhattan_spearman value: 88.9228828016344 - task: type: STS dataset: name: MTEB STS14 type: mteb/sts14-sts config: default split: test revision: 6031580fec1f6af667f0bd2da0a551cf4f0b2375 metrics: - type: cos_sim_pearson value: 85.27469288153428 - type: cos_sim_spearman value: 83.87477064876288 - type: euclidean_pearson value: 84.2601737035379 - type: euclidean_spearman value: 83.87431082479074 - type: manhattan_pearson value: 84.3621547772745 - type: manhattan_spearman value: 84.12094375000423 - task: type: STS dataset: name: MTEB STS15 type: mteb/sts15-sts config: default split: test revision: ae752c7c21bf194d8b67fd573edf7ae58183cbe3 metrics: - type: cos_sim_pearson value: 88.12749863201587 - type: cos_sim_spearman value: 88.54287568368565 - type: euclidean_pearson value: 87.90429700607999 - type: euclidean_spearman value: 88.5437689576261 - type: manhattan_pearson value: 88.19276653356833 - type: manhattan_spearman value: 88.99995393814679 - task: type: STS dataset: name: MTEB STS16 type: mteb/sts16-sts config: default split: test revision: 4d8694f8f0e0100860b497b999b3dbed754a0513 metrics: - type: cos_sim_pearson value: 85.68398747560902 - type: cos_sim_spearman value: 86.48815303460574 - type: euclidean_pearson value: 85.52356631237954 - type: euclidean_spearman value: 86.486391949551 - type: manhattan_pearson value: 85.67267981761788 - type: manhattan_spearman value: 86.7073696332485 - task: type: STS dataset: name: MTEB STS17 (en-en) type: mteb/sts17-crosslingual-sts config: en-en split: test revision: af5e6fb845001ecf41f4c1e033ce921939a2a68d metrics: - type: cos_sim_pearson value: 88.9057107443124 - type: cos_sim_spearman value: 88.7312168757697 - type: euclidean_pearson value: 88.72810439714794 - type: euclidean_spearman value: 88.71976185854771 - type: manhattan_pearson value: 88.50433745949111 - type: manhattan_spearman value: 88.51726175544195 - task: type: STS dataset: name: MTEB STS22 (en) type: mteb/sts22-crosslingual-sts config: en split: test revision: eea2b4fe26a775864c896887d910b76a8098ad3f metrics: - type: cos_sim_pearson value: 67.59391795109886 - type: cos_sim_spearman value: 66.87613008631367 - type: euclidean_pearson value: 69.23198488262217 - type: euclidean_spearman value: 66.85427723013692 - type: manhattan_pearson value: 69.50730124841084 - type: manhattan_spearman value: 67.10404669820792 - task: type: STS dataset: name: MTEB STSBenchmark type: mteb/stsbenchmark-sts config: default split: test revision: b0fddb56ed78048fa8b90373c8a3cfc37b684831 metrics: - type: cos_sim_pearson value: 87.0820605344619 - type: cos_sim_spearman value: 86.8518089863434 - type: euclidean_pearson value: 86.31087134689284 - type: euclidean_spearman value: 86.8518520517941 - type: manhattan_pearson value: 86.47203796160612 - type: manhattan_spearman value: 87.1080149734421 - task: type: Reranking dataset: name: MTEB SciDocsRR type: mteb/scidocs-reranking config: default split: test revision: d3c5e1fc0b855ab6097bf1cda04dd73947d7caab metrics: - type: map value: 89.09255369305481 - type: mrr value: 97.10323445617563 - task: type: Retrieval dataset: name: MTEB SciFact type: mteb/scifact config: default split: test revision: 0228b52cf27578f30900b9e5271d331663a030d7 metrics: - type: map_at_1 value: 61.260999999999996 - type: map_at_10 value: 74.043 - type: map_at_100 value: 74.37700000000001 - type: map_at_1000 value: 74.384 - type: map_at_3 value: 71.222 - type: map_at_5 value: 72.875 - type: mrr_at_1 value: 64.333 - type: mrr_at_10 value: 74.984 - type: mrr_at_100 value: 75.247 - type: mrr_at_1000 value: 75.25500000000001 - type: mrr_at_3 value: 73.167 - type: mrr_at_5 value: 74.35000000000001 - type: ndcg_at_1 value: 64.333 - type: ndcg_at_10 value: 79.06 - type: ndcg_at_100 value: 80.416 - type: ndcg_at_1000 value: 80.55600000000001 - type: ndcg_at_3 value: 74.753 - type: ndcg_at_5 value: 76.97500000000001 - type: precision_at_1 value: 64.333 - type: precision_at_10 value: 10.567 - type: precision_at_100 value: 1.1199999999999999 - type: precision_at_1000 value: 0.11299999999999999 - type: precision_at_3 value: 29.889 - type: precision_at_5 value: 19.533 - type: recall_at_1 value: 61.260999999999996 - type: recall_at_10 value: 93.167 - type: recall_at_100 value: 99.0 - type: recall_at_1000 value: 100.0 - type: recall_at_3 value: 81.667 - type: recall_at_5 value: 87.394 - task: type: PairClassification dataset: name: MTEB SprintDuplicateQuestions type: mteb/sprintduplicatequestions-pairclassification config: default split: test revision: d66bd1f72af766a5cc4b0ca5e00c162f89e8cc46 metrics: - type: cos_sim_accuracy value: 99.71980198019801 - type: cos_sim_ap value: 92.81616007802704 - type: cos_sim_f1 value: 85.17548454688318 - type: cos_sim_precision value: 89.43894389438944 - type: cos_sim_recall value: 81.3 - type: dot_accuracy value: 99.71980198019801 - type: dot_ap value: 92.81398760591358 - type: dot_f1 value: 85.17548454688318 - type: dot_precision value: 89.43894389438944 - type: dot_recall value: 81.3 - type: euclidean_accuracy value: 99.71980198019801 - type: euclidean_ap value: 92.81560637245072 - type: euclidean_f1 value: 85.17548454688318 - type: euclidean_precision value: 89.43894389438944 - type: euclidean_recall value: 81.3 - type: manhattan_accuracy value: 99.73069306930694 - type: manhattan_ap value: 93.14005487480794 - type: manhattan_f1 value: 85.56263269639068 - type: manhattan_precision value: 91.17647058823529 - type: manhattan_recall value: 80.60000000000001 - type: max_accuracy value: 99.73069306930694 - type: max_ap value: 93.14005487480794 - type: max_f1 value: 85.56263269639068 - task: type: Clustering dataset: name: MTEB StackExchangeClustering type: mteb/stackexchange-clustering config: default split: test revision: 6cbc1f7b2bc0622f2e39d2c77fa502909748c259 metrics: - type: v_measure value: 79.86443362395185 - task: type: Clustering dataset: name: MTEB StackExchangeClusteringP2P type: mteb/stackexchange-clustering-p2p config: default split: test revision: 815ca46b2622cec33ccafc3735d572c266efdb44 metrics: - type: v_measure value: 49.40897096662564 - task: type: Reranking dataset: name: MTEB StackOverflowDupQuestions type: mteb/stackoverflowdupquestions-reranking config: default split: test revision: e185fbe320c72810689fc5848eb6114e1ef5ec69 metrics: - type: map value: 55.66040806627947 - type: mrr value: 56.58670475766064 - task: type: Summarization dataset: name: MTEB SummEval type: mteb/summeval config: default split: test revision: cda12ad7615edc362dbf25a00fdd61d3b1eaf93c metrics: - type: cos_sim_pearson value: 31.51015090598575 - type: cos_sim_spearman value: 31.35016454939226 - type: dot_pearson value: 31.5150068731 - type: dot_spearman value: 31.34790869023487 - task: type: Retrieval dataset: name: MTEB TRECCOVID type: mteb/trec-covid config: default split: test revision: None metrics: - type: map_at_1 value: 0.254 - type: map_at_10 value: 2.064 - type: map_at_100 value: 12.909 - type: map_at_1000 value: 31.761 - type: map_at_3 value: 0.738 - type: map_at_5 value: 1.155 - type: mrr_at_1 value: 96.0 - type: mrr_at_10 value: 98.0 - type: mrr_at_100 value: 98.0 - type: mrr_at_1000 value: 98.0 - type: mrr_at_3 value: 98.0 - type: mrr_at_5 value: 98.0 - type: ndcg_at_1 value: 93.0 - type: ndcg_at_10 value: 82.258 - type: ndcg_at_100 value: 64.34 - type: ndcg_at_1000 value: 57.912 - type: ndcg_at_3 value: 90.827 - type: ndcg_at_5 value: 86.79 - type: precision_at_1 value: 96.0 - type: precision_at_10 value: 84.8 - type: precision_at_100 value: 66.0 - type: precision_at_1000 value: 25.356 - type: precision_at_3 value: 94.667 - type: precision_at_5 value: 90.4 - type: recall_at_1 value: 0.254 - type: recall_at_10 value: 2.1950000000000003 - type: recall_at_100 value: 16.088 - type: recall_at_1000 value: 54.559000000000005 - type: recall_at_3 value: 0.75 - type: recall_at_5 value: 1.191 - task: type: Retrieval dataset: name: MTEB Touche2020 type: mteb/touche2020 config: default split: test revision: a34f9a33db75fa0cbb21bb5cfc3dae8dc8bec93f metrics: - type: map_at_1 value: 2.976 - type: map_at_10 value: 11.389000000000001 - type: map_at_100 value: 18.429000000000002 - type: map_at_1000 value: 20.113 - type: map_at_3 value: 6.483 - type: map_at_5 value: 8.770999999999999 - type: mrr_at_1 value: 40.816 - type: mrr_at_10 value: 58.118 - type: mrr_at_100 value: 58.489999999999995 - type: mrr_at_1000 value: 58.489999999999995 - type: mrr_at_3 value: 53.061 - type: mrr_at_5 value: 57.041 - type: ndcg_at_1 value: 40.816 - type: ndcg_at_10 value: 30.567 - type: ndcg_at_100 value: 42.44 - type: ndcg_at_1000 value: 53.480000000000004 - type: ndcg_at_3 value: 36.016 - type: ndcg_at_5 value: 34.257 - type: precision_at_1 value: 42.857 - type: precision_at_10 value: 25.714 - type: precision_at_100 value: 8.429 - type: precision_at_1000 value: 1.5939999999999999 - type: precision_at_3 value: 36.735 - type: precision_at_5 value: 33.878 - type: recall_at_1 value: 2.976 - type: recall_at_10 value: 17.854999999999997 - type: recall_at_100 value: 51.833 - type: recall_at_1000 value: 86.223 - type: recall_at_3 value: 7.887 - type: recall_at_5 value: 12.026 - task: type: Classification dataset: name: MTEB ToxicConversationsClassification type: mteb/toxic_conversations_50k config: default split: test revision: d7c0de2777da35d6aae2200a62c6e0e5af397c4c metrics: - type: accuracy value: 85.1174 - type: ap value: 30.169441069345748 - type: f1 value: 69.79254701873245 - task: type: Classification dataset: name: MTEB TweetSentimentExtractionClassification type: mteb/tweet_sentiment_extraction config: default split: test revision: d604517c81ca91fe16a244d1248fc021f9ecee7a metrics: - type: accuracy value: 72.58347481607245 - type: f1 value: 72.74877295564937 - task: type: Clustering dataset: name: MTEB TwentyNewsgroupsClustering type: mteb/twentynewsgroups-clustering config: default split: test revision: 6125ec4e24fa026cec8a478383ee943acfbd5449 metrics: - type: v_measure value: 53.90586138221305 - task: type: PairClassification dataset: name: MTEB TwitterSemEval2015 type: mteb/twittersemeval2015-pairclassification config: default split: test revision: 70970daeab8776df92f5ea462b6173c0b46fd2d1 metrics: - type: cos_sim_accuracy value: 87.35769207844072 - type: cos_sim_ap value: 77.9645072410354 - type: cos_sim_f1 value: 71.32352941176471 - type: cos_sim_precision value: 66.5903890160183 - type: cos_sim_recall value: 76.78100263852242 - type: dot_accuracy value: 87.37557370209214 - type: dot_ap value: 77.96250046429908 - type: dot_f1 value: 71.28932757557064 - type: dot_precision value: 66.95249130938586 - type: dot_recall value: 76.22691292875989 - type: euclidean_accuracy value: 87.35173153722357 - type: euclidean_ap value: 77.96520460741593 - type: euclidean_f1 value: 71.32470733210104 - type: euclidean_precision value: 66.91329479768785 - type: euclidean_recall value: 76.35883905013192 - type: manhattan_accuracy value: 87.25636287774931 - type: manhattan_ap value: 77.77752485611796 - type: manhattan_f1 value: 71.18148599269183 - type: manhattan_precision value: 66.10859728506787 - type: manhattan_recall value: 77.0976253298153 - type: max_accuracy value: 87.37557370209214 - type: max_ap value: 77.96520460741593 - type: max_f1 value: 71.32470733210104 - task: type: PairClassification dataset: name: MTEB TwitterURLCorpus type: mteb/twitterurlcorpus-pairclassification config: default split: test revision: 8b6510b0b1fa4e4c4f879467980e9be563ec1cdf metrics: - type: cos_sim_accuracy value: 89.38176737687739 - type: cos_sim_ap value: 86.58811861657401 - type: cos_sim_f1 value: 79.09430644097604 - type: cos_sim_precision value: 75.45085977911366 - type: cos_sim_recall value: 83.10748383122882 - type: dot_accuracy value: 89.38370784336554 - type: dot_ap value: 86.58840606004333 - type: dot_f1 value: 79.10179860068133 - type: dot_precision value: 75.44546153308643 - type: dot_recall value: 83.13058207576223 - type: euclidean_accuracy value: 89.38564830985369 - type: euclidean_ap value: 86.58820721061164 - type: euclidean_f1 value: 79.09070942235888 - type: euclidean_precision value: 75.38729937194697 - type: euclidean_recall value: 83.17677856482906 - type: manhattan_accuracy value: 89.40699344122326 - type: manhattan_ap value: 86.60631843011362 - type: manhattan_f1 value: 79.14949970570925 - type: manhattan_precision value: 75.78191039729502 - type: manhattan_recall value: 82.83030489682784 - type: max_accuracy value: 89.40699344122326 - type: max_ap value: 86.60631843011362 - type: max_f1 value: 79.14949970570925 - task: type: STS dataset: name: MTEB AFQMC type: C-MTEB/AFQMC config: default split: validation revision: b44c3b011063adb25877c13823db83bb193913c4 metrics: - type: cos_sim_pearson value: 65.58442135663871 - type: cos_sim_spearman value: 72.2538631361313 - type: euclidean_pearson value: 70.97255486607429 - type: euclidean_spearman value: 72.25374250228647 - type: manhattan_pearson value: 70.83250199989911 - type: manhattan_spearman value: 72.14819496536272 - task: type: STS dataset: name: MTEB ATEC type: C-MTEB/ATEC config: default split: test revision: 0f319b1142f28d00e055a6770f3f726ae9b7d865 metrics: - type: cos_sim_pearson value: 59.99478404929932 - type: cos_sim_spearman value: 62.61836216999812 - type: euclidean_pearson value: 66.86429811933593 - type: euclidean_spearman value: 62.6183520374191 - type: manhattan_pearson value: 66.8063778911633 - type: manhattan_spearman value: 62.569607573241115 - task: type: Classification dataset: name: MTEB AmazonReviewsClassification (zh) type: mteb/amazon_reviews_multi config: zh split: test revision: 1399c76144fd37290681b995c656ef9b2e06e26d metrics: - type: accuracy value: 53.98400000000001 - type: f1 value: 51.21447361350723 - task: type: STS dataset: name: MTEB BQ type: C-MTEB/BQ config: default split: test revision: e3dda5e115e487b39ec7e618c0c6a29137052a55 metrics: - type: cos_sim_pearson value: 79.11941660686553 - type: cos_sim_spearman value: 81.25029594540435 - type: euclidean_pearson value: 82.06973504238826 - type: euclidean_spearman value: 81.2501989488524 - type: manhattan_pearson value: 82.10094630392753 - type: manhattan_spearman value: 81.27987244392389 - task: type: Clustering dataset: name: MTEB CLSClusteringP2P type: C-MTEB/CLSClusteringP2P config: default split: test revision: 4b6227591c6c1a73bc76b1055f3b7f3588e72476 metrics: - type: v_measure value: 47.07270168705156 - task: type: Clustering dataset: name: MTEB CLSClusteringS2S type: C-MTEB/CLSClusteringS2S config: default split: test revision: e458b3f5414b62b7f9f83499ac1f5497ae2e869f metrics: - type: v_measure value: 45.98511703185043 - task: type: Reranking dataset: name: MTEB CMedQAv1 type: C-MTEB/CMedQAv1-reranking config: default split: test revision: 8d7f1e942507dac42dc58017c1a001c3717da7df metrics: - type: map value: 88.19895157194931 - type: mrr value: 90.21424603174603 - task: type: Reranking dataset: name: MTEB CMedQAv2 type: C-MTEB/CMedQAv2-reranking config: default split: test revision: 23d186750531a14a0357ca22cd92d712fd512ea0 metrics: - type: map value: 88.03317320980119 - type: mrr value: 89.9461507936508 - task: type: Retrieval dataset: name: MTEB CmedqaRetrieval type: C-MTEB/CmedqaRetrieval config: default split: dev revision: cd540c506dae1cf9e9a59c3e06f42030d54e7301 metrics: - type: map_at_1 value: 29.037000000000003 - type: map_at_10 value: 42.001 - type: map_at_100 value: 43.773 - type: map_at_1000 value: 43.878 - type: map_at_3 value: 37.637 - type: map_at_5 value: 40.034 - type: mrr_at_1 value: 43.136 - type: mrr_at_10 value: 51.158 - type: mrr_at_100 value: 52.083 - type: mrr_at_1000 value: 52.12 - type: mrr_at_3 value: 48.733 - type: mrr_at_5 value: 50.025 - type: ndcg_at_1 value: 43.136 - type: ndcg_at_10 value: 48.685 - type: ndcg_at_100 value: 55.513 - type: ndcg_at_1000 value: 57.242000000000004 - type: ndcg_at_3 value: 43.329 - type: ndcg_at_5 value: 45.438 - type: precision_at_1 value: 43.136 - type: precision_at_10 value: 10.56 - type: precision_at_100 value: 1.6129999999999998 - type: precision_at_1000 value: 0.184 - type: precision_at_3 value: 24.064 - type: precision_at_5 value: 17.269000000000002 - type: recall_at_1 value: 29.037000000000003 - type: recall_at_10 value: 59.245000000000005 - type: recall_at_100 value: 87.355 - type: recall_at_1000 value: 98.74000000000001 - type: recall_at_3 value: 42.99 - type: recall_at_5 value: 49.681999999999995 - task: type: PairClassification dataset: name: MTEB Cmnli type: C-MTEB/CMNLI config: default split: validation revision: 41bc36f332156f7adc9e38f53777c959b2ae9766 metrics: - type: cos_sim_accuracy value: 82.68190018039687 - type: cos_sim_ap value: 90.18017125327886 - type: cos_sim_f1 value: 83.64080906868193 - type: cos_sim_precision value: 79.7076890489303 - type: cos_sim_recall value: 87.98223053542202 - type: dot_accuracy value: 82.68190018039687 - type: dot_ap value: 90.18782350103646 - type: dot_f1 value: 83.64242087729039 - type: dot_precision value: 79.65313028764805 - type: dot_recall value: 88.05237315875614 - type: euclidean_accuracy value: 82.68190018039687 - type: euclidean_ap value: 90.1801957900632 - type: euclidean_f1 value: 83.63636363636364 - type: euclidean_precision value: 79.52772506852203 - type: euclidean_recall value: 88.19265840542437 - type: manhattan_accuracy value: 82.14070956103427 - type: manhattan_ap value: 89.96178420101427 - type: manhattan_f1 value: 83.21087838578791 - type: manhattan_precision value: 78.35605121850475 - type: manhattan_recall value: 88.70703764320785 - type: max_accuracy value: 82.68190018039687 - type: max_ap value: 90.18782350103646 - type: max_f1 value: 83.64242087729039 - task: type: Retrieval dataset: name: MTEB CovidRetrieval type: C-MTEB/CovidRetrieval config: default split: dev revision: 1271c7809071a13532e05f25fb53511ffce77117 metrics: - type: map_at_1 value: 72.234 - type: map_at_10 value: 80.10000000000001 - type: map_at_100 value: 80.36 - type: map_at_1000 value: 80.363 - type: map_at_3 value: 78.315 - type: map_at_5 value: 79.607 - type: mrr_at_1 value: 72.392 - type: mrr_at_10 value: 80.117 - type: mrr_at_100 value: 80.36999999999999 - type: mrr_at_1000 value: 80.373 - type: mrr_at_3 value: 78.469 - type: mrr_at_5 value: 79.633 - type: ndcg_at_1 value: 72.392 - type: ndcg_at_10 value: 83.651 - type: ndcg_at_100 value: 84.749 - type: ndcg_at_1000 value: 84.83000000000001 - type: ndcg_at_3 value: 80.253 - type: ndcg_at_5 value: 82.485 - type: precision_at_1 value: 72.392 - type: precision_at_10 value: 9.557 - type: precision_at_100 value: 1.004 - type: precision_at_1000 value: 0.101 - type: precision_at_3 value: 28.732000000000003 - type: precision_at_5 value: 18.377 - type: recall_at_1 value: 72.234 - type: recall_at_10 value: 94.573 - type: recall_at_100 value: 99.368 - type: recall_at_1000 value: 100.0 - type: recall_at_3 value: 85.669 - type: recall_at_5 value: 91.01700000000001 - task: type: Retrieval dataset: name: MTEB DuRetrieval type: C-MTEB/DuRetrieval config: default split: dev revision: a1a333e290fe30b10f3f56498e3a0d911a693ced metrics: - type: map_at_1 value: 26.173999999999996 - type: map_at_10 value: 80.04 - type: map_at_100 value: 82.94500000000001 - type: map_at_1000 value: 82.98100000000001 - type: map_at_3 value: 55.562999999999995 - type: map_at_5 value: 69.89800000000001 - type: mrr_at_1 value: 89.5 - type: mrr_at_10 value: 92.996 - type: mrr_at_100 value: 93.06400000000001 - type: mrr_at_1000 value: 93.065 - type: mrr_at_3 value: 92.658 - type: mrr_at_5 value: 92.84599999999999 - type: ndcg_at_1 value: 89.5 - type: ndcg_at_10 value: 87.443 - type: ndcg_at_100 value: 90.253 - type: ndcg_at_1000 value: 90.549 - type: ndcg_at_3 value: 85.874 - type: ndcg_at_5 value: 84.842 - type: precision_at_1 value: 89.5 - type: precision_at_10 value: 41.805 - type: precision_at_100 value: 4.827 - type: precision_at_1000 value: 0.49 - type: precision_at_3 value: 76.85 - type: precision_at_5 value: 64.8 - type: recall_at_1 value: 26.173999999999996 - type: recall_at_10 value: 89.101 - type: recall_at_100 value: 98.08099999999999 - type: recall_at_1000 value: 99.529 - type: recall_at_3 value: 57.902 - type: recall_at_5 value: 74.602 - task: type: Retrieval dataset: name: MTEB EcomRetrieval type: C-MTEB/EcomRetrieval config: default split: dev revision: 687de13dc7294d6fd9be10c6945f9e8fec8166b9 metrics: - type: map_at_1 value: 56.10000000000001 - type: map_at_10 value: 66.15299999999999 - type: map_at_100 value: 66.625 - type: map_at_1000 value: 66.636 - type: map_at_3 value: 63.632999999999996 - type: map_at_5 value: 65.293 - type: mrr_at_1 value: 56.10000000000001 - type: mrr_at_10 value: 66.15299999999999 - type: mrr_at_100 value: 66.625 - type: mrr_at_1000 value: 66.636 - type: mrr_at_3 value: 63.632999999999996 - type: mrr_at_5 value: 65.293 - type: ndcg_at_1 value: 56.10000000000001 - type: ndcg_at_10 value: 71.146 - type: ndcg_at_100 value: 73.27799999999999 - type: ndcg_at_1000 value: 73.529 - type: ndcg_at_3 value: 66.09 - type: ndcg_at_5 value: 69.08999999999999 - type: precision_at_1 value: 56.10000000000001 - type: precision_at_10 value: 8.68 - type: precision_at_100 value: 0.964 - type: precision_at_1000 value: 0.098 - type: precision_at_3 value: 24.4 - type: precision_at_5 value: 16.1 - type: recall_at_1 value: 56.10000000000001 - type: recall_at_10 value: 86.8 - type: recall_at_100 value: 96.39999999999999 - type: recall_at_1000 value: 98.3 - type: recall_at_3 value: 73.2 - type: recall_at_5 value: 80.5 - task: type: Classification dataset: name: MTEB IFlyTek type: C-MTEB/IFlyTek-classification config: default split: validation revision: 421605374b29664c5fc098418fe20ada9bd55f8a metrics: - type: accuracy value: 54.52096960369373 - type: f1 value: 40.930845295808695 - task: type: Classification dataset: name: MTEB JDReview type: C-MTEB/JDReview-classification config: default split: test revision: b7c64bd89eb87f8ded463478346f76731f07bf8b metrics: - type: accuracy value: 86.51031894934334 - type: ap value: 55.9516014323483 - type: f1 value: 81.54813679326381 - task: type: STS dataset: name: MTEB LCQMC type: C-MTEB/LCQMC config: default split: test revision: 17f9b096f80380fce5ed12a9be8be7784b337daf metrics: - type: cos_sim_pearson value: 69.67437838574276 - type: cos_sim_spearman value: 73.81314174653045 - type: euclidean_pearson value: 72.63430276680275 - type: euclidean_spearman value: 73.81358736777001 - type: manhattan_pearson value: 72.58743833842829 - type: manhattan_spearman value: 73.7590419009179 - task: type: Reranking dataset: name: MTEB MMarcoReranking type: C-MTEB/Mmarco-reranking config: default split: dev revision: None metrics: - type: map value: 31.648613483640254 - type: mrr value: 30.37420634920635 - task: type: Retrieval dataset: name: MTEB MMarcoRetrieval type: C-MTEB/MMarcoRetrieval config: default split: dev revision: 539bbde593d947e2a124ba72651aafc09eb33fc2 metrics: - type: map_at_1 value: 73.28099999999999 - type: map_at_10 value: 81.977 - type: map_at_100 value: 82.222 - type: map_at_1000 value: 82.22699999999999 - type: map_at_3 value: 80.441 - type: map_at_5 value: 81.46600000000001 - type: mrr_at_1 value: 75.673 - type: mrr_at_10 value: 82.41000000000001 - type: mrr_at_100 value: 82.616 - type: mrr_at_1000 value: 82.621 - type: mrr_at_3 value: 81.094 - type: mrr_at_5 value: 81.962 - type: ndcg_at_1 value: 75.673 - type: ndcg_at_10 value: 85.15599999999999 - type: ndcg_at_100 value: 86.151 - type: ndcg_at_1000 value: 86.26899999999999 - type: ndcg_at_3 value: 82.304 - type: ndcg_at_5 value: 84.009 - type: precision_at_1 value: 75.673 - type: precision_at_10 value: 10.042 - type: precision_at_100 value: 1.052 - type: precision_at_1000 value: 0.106 - type: precision_at_3 value: 30.673000000000002 - type: precision_at_5 value: 19.326999999999998 - type: recall_at_1 value: 73.28099999999999 - type: recall_at_10 value: 94.446 - type: recall_at_100 value: 98.737 - type: recall_at_1000 value: 99.649 - type: recall_at_3 value: 86.984 - type: recall_at_5 value: 91.024 - task: type: Classification dataset: name: MTEB MassiveIntentClassification (zh-CN) type: mteb/amazon_massive_intent config: zh-CN split: test revision: 31efe3c427b0bae9c22cbb560b8f15491cc6bed7 metrics: - type: accuracy value: 81.08607935440484 - type: f1 value: 78.24879986066307 - task: type: Classification dataset: name: MTEB MassiveScenarioClassification (zh-CN) type: mteb/amazon_massive_scenario config: zh-CN split: test revision: 7d571f92784cd94a019292a1f45445077d0ef634 metrics: - type: accuracy value: 86.05917955615332 - type: f1 value: 85.05279279434997 - task: type: Retrieval dataset: name: MTEB MedicalRetrieval type: C-MTEB/MedicalRetrieval config: default split: dev revision: 2039188fb5800a9803ba5048df7b76e6fb151fc6 metrics: - type: map_at_1 value: 56.2 - type: map_at_10 value: 62.57899999999999 - type: map_at_100 value: 63.154999999999994 - type: map_at_1000 value: 63.193 - type: map_at_3 value: 61.217 - type: map_at_5 value: 62.012 - type: mrr_at_1 value: 56.3 - type: mrr_at_10 value: 62.629000000000005 - type: mrr_at_100 value: 63.205999999999996 - type: mrr_at_1000 value: 63.244 - type: mrr_at_3 value: 61.267 - type: mrr_at_5 value: 62.062 - type: ndcg_at_1 value: 56.2 - type: ndcg_at_10 value: 65.592 - type: ndcg_at_100 value: 68.657 - type: ndcg_at_1000 value: 69.671 - type: ndcg_at_3 value: 62.808 - type: ndcg_at_5 value: 64.24499999999999 - type: precision_at_1 value: 56.2 - type: precision_at_10 value: 7.5 - type: precision_at_100 value: 0.899 - type: precision_at_1000 value: 0.098 - type: precision_at_3 value: 22.467000000000002 - type: precision_at_5 value: 14.180000000000001 - type: recall_at_1 value: 56.2 - type: recall_at_10 value: 75.0 - type: recall_at_100 value: 89.9 - type: recall_at_1000 value: 97.89999999999999 - type: recall_at_3 value: 67.4 - type: recall_at_5 value: 70.89999999999999 - task: type: Classification dataset: name: MTEB MultilingualSentiment type: C-MTEB/MultilingualSentiment-classification config: default split: validation revision: 46958b007a63fdbf239b7672c25d0bea67b5ea1a metrics: - type: accuracy value: 76.87666666666667 - type: f1 value: 76.7317686219665 - task: type: PairClassification dataset: name: MTEB Ocnli type: C-MTEB/OCNLI config: default split: validation revision: 66e76a618a34d6d565d5538088562851e6daa7ec metrics: - type: cos_sim_accuracy value: 79.64266377910124 - type: cos_sim_ap value: 84.78274442344829 - type: cos_sim_f1 value: 81.16947472745292 - type: cos_sim_precision value: 76.47058823529412 - type: cos_sim_recall value: 86.48363252375924 - type: dot_accuracy value: 79.64266377910124 - type: dot_ap value: 84.7851404063692 - type: dot_f1 value: 81.16947472745292 - type: dot_precision value: 76.47058823529412 - type: dot_recall value: 86.48363252375924 - type: euclidean_accuracy value: 79.64266377910124 - type: euclidean_ap value: 84.78068373762378 - type: euclidean_f1 value: 81.14794656110837 - type: euclidean_precision value: 76.35009310986965 - type: euclidean_recall value: 86.58922914466737 - type: manhattan_accuracy value: 79.48023822414727 - type: manhattan_ap value: 84.72928897427576 - type: manhattan_f1 value: 81.32084770823064 - type: manhattan_precision value: 76.24768946395564 - type: manhattan_recall value: 87.11721224920802 - type: max_accuracy value: 79.64266377910124 - type: max_ap value: 84.7851404063692 - type: max_f1 value: 81.32084770823064 - task: type: Classification dataset: name: MTEB OnlineShopping type: C-MTEB/OnlineShopping-classification config: default split: test revision: e610f2ebd179a8fda30ae534c3878750a96db120 metrics: - type: accuracy value: 94.3 - type: ap value: 92.8664032274438 - type: f1 value: 94.29311102997727 - task: type: STS dataset: name: MTEB PAWSX type: C-MTEB/PAWSX config: default split: test revision: 9c6a90e430ac22b5779fb019a23e820b11a8b5e1 metrics: - type: cos_sim_pearson value: 48.51392279882909 - type: cos_sim_spearman value: 54.06338895994974 - type: euclidean_pearson value: 52.58480559573412 - type: euclidean_spearman value: 54.06417276612201 - type: manhattan_pearson value: 52.69525121721343 - type: manhattan_spearman value: 54.048147455389675 - task: type: STS dataset: name: MTEB QBQTC type: C-MTEB/QBQTC config: default split: test revision: 790b0510dc52b1553e8c49f3d2afb48c0e5c48b7 metrics: - type: cos_sim_pearson value: 29.728387290757325 - type: cos_sim_spearman value: 31.366121633635284 - type: euclidean_pearson value: 29.14588368552961 - type: euclidean_spearman value: 31.36764411112844 - type: manhattan_pearson value: 29.63517350523121 - type: manhattan_spearman value: 31.94157020583762 - task: type: STS dataset: name: MTEB STS22 (zh) type: mteb/sts22-crosslingual-sts config: zh split: test revision: eea2b4fe26a775864c896887d910b76a8098ad3f metrics: - type: cos_sim_pearson value: 63.64868296271406 - type: cos_sim_spearman value: 66.12800618164744 - type: euclidean_pearson value: 63.21405767340238 - type: euclidean_spearman value: 66.12786567790748 - type: manhattan_pearson value: 64.04300276525848 - type: manhattan_spearman value: 66.5066857145652 - task: type: STS dataset: name: MTEB STSB type: C-MTEB/STSB config: default split: test revision: 0cde68302b3541bb8b3c340dc0644b0b745b3dc0 metrics: - type: cos_sim_pearson value: 81.2302623912794 - type: cos_sim_spearman value: 81.16833673266562 - type: euclidean_pearson value: 79.47647843876024 - type: euclidean_spearman value: 81.16944349524972 - type: manhattan_pearson value: 79.84947238492208 - type: manhattan_spearman value: 81.64626599410026 - task: type: Reranking dataset: name: MTEB T2Reranking type: C-MTEB/T2Reranking config: default split: dev revision: 76631901a18387f85eaa53e5450019b87ad58ef9 metrics: - type: map value: 67.80129586475687 - type: mrr value: 77.77402311635554 - task: type: Retrieval dataset: name: MTEB T2Retrieval type: C-MTEB/T2Retrieval config: default split: dev revision: 8731a845f1bf500a4f111cf1070785c793d10e64 metrics: - type: map_at_1 value: 28.666999999999998 - type: map_at_10 value: 81.063 - type: map_at_100 value: 84.504 - type: map_at_1000 value: 84.552 - type: map_at_3 value: 56.897 - type: map_at_5 value: 70.073 - type: mrr_at_1 value: 92.087 - type: mrr_at_10 value: 94.132 - type: mrr_at_100 value: 94.19800000000001 - type: mrr_at_1000 value: 94.19999999999999 - type: mrr_at_3 value: 93.78999999999999 - type: mrr_at_5 value: 94.002 - type: ndcg_at_1 value: 92.087 - type: ndcg_at_10 value: 87.734 - type: ndcg_at_100 value: 90.736 - type: ndcg_at_1000 value: 91.184 - type: ndcg_at_3 value: 88.78 - type: ndcg_at_5 value: 87.676 - type: precision_at_1 value: 92.087 - type: precision_at_10 value: 43.46 - type: precision_at_100 value: 5.07 - type: precision_at_1000 value: 0.518 - type: precision_at_3 value: 77.49000000000001 - type: precision_at_5 value: 65.194 - type: recall_at_1 value: 28.666999999999998 - type: recall_at_10 value: 86.632 - type: recall_at_100 value: 96.646 - type: recall_at_1000 value: 98.917 - type: recall_at_3 value: 58.333999999999996 - type: recall_at_5 value: 72.974 - task: type: Classification dataset: name: MTEB TNews type: C-MTEB/TNews-classification config: default split: validation revision: 317f262bf1e6126357bbe89e875451e4b0938fe4 metrics: - type: accuracy value: 52.971999999999994 - type: f1 value: 50.2898280984929 - task: type: Clustering dataset: name: MTEB ThuNewsClusteringP2P type: C-MTEB/ThuNewsClusteringP2P config: default split: test revision: 5798586b105c0434e4f0fe5e767abe619442cf93 metrics: - type: v_measure value: 86.0797948663824 - task: type: Clustering dataset: name: MTEB ThuNewsClusteringS2S type: C-MTEB/ThuNewsClusteringS2S config: default split: test revision: 8a8b2caeda43f39e13c4bc5bea0f8a667896e10d metrics: - type: v_measure value: 85.10759092255017 - task: type: Retrieval dataset: name: MTEB VideoRetrieval type: C-MTEB/VideoRetrieval config: default split: dev revision: 58c2597a5943a2ba48f4668c3b90d796283c5639 metrics: - type: map_at_1 value: 65.60000000000001 - type: map_at_10 value: 74.773 - type: map_at_100 value: 75.128 - type: map_at_1000 value: 75.136 - type: map_at_3 value: 73.05 - type: map_at_5 value: 74.13499999999999 - type: mrr_at_1 value: 65.60000000000001 - type: mrr_at_10 value: 74.773 - type: mrr_at_100 value: 75.128 - type: mrr_at_1000 value: 75.136 - type: mrr_at_3 value: 73.05 - type: mrr_at_5 value: 74.13499999999999 - type: ndcg_at_1 value: 65.60000000000001 - type: ndcg_at_10 value: 78.84299999999999 - type: ndcg_at_100 value: 80.40899999999999 - type: ndcg_at_1000 value: 80.57 - type: ndcg_at_3 value: 75.40599999999999 - type: ndcg_at_5 value: 77.351 - type: precision_at_1 value: 65.60000000000001 - type: precision_at_10 value: 9.139999999999999 - type: precision_at_100 value: 0.984 - type: precision_at_1000 value: 0.1 - type: precision_at_3 value: 27.400000000000002 - type: precision_at_5 value: 17.380000000000003 - type: recall_at_1 value: 65.60000000000001 - type: recall_at_10 value: 91.4 - type: recall_at_100 value: 98.4 - type: recall_at_1000 value: 99.6 - type: recall_at_3 value: 82.19999999999999 - type: recall_at_5 value: 86.9 - task: type: Classification dataset: name: MTEB Waimai type: C-MTEB/waimai-classification config: default split: test revision: 339287def212450dcaa9df8c22bf93e9980c7023 metrics: - type: accuracy value: 89.47 - type: ap value: 75.59561751845389 - type: f1 value: 87.95207751382563 - task: type: Clustering dataset: name: MTEB AlloProfClusteringP2P type: lyon-nlp/alloprof config: default split: test revision: 392ba3f5bcc8c51f578786c1fc3dae648662cb9b metrics: - type: v_measure value: 76.05592323841036 - type: v_measure value: 64.51718058866508 - task: type: Reranking dataset: name: MTEB AlloprofReranking type: lyon-nlp/mteb-fr-reranking-alloprof-s2p config: default split: test revision: 666fdacebe0291776e86f29345663dfaf80a0db9 metrics: - type: map value: 73.08278490943373 - type: mrr value: 74.66561454570449 - task: type: Retrieval dataset: name: MTEB AlloprofRetrieval type: lyon-nlp/alloprof config: default split: test revision: 392ba3f5bcc8c51f578786c1fc3dae648662cb9b metrics: - type: map_at_1 value: 38.912 - type: map_at_10 value: 52.437999999999995 - type: map_at_100 value: 53.38 - type: map_at_1000 value: 53.427 - type: map_at_3 value: 48.879 - type: map_at_5 value: 50.934000000000005 - type: mrr_at_1 value: 44.085 - type: mrr_at_10 value: 55.337 - type: mrr_at_100 value: 56.016999999999996 - type: mrr_at_1000 value: 56.043 - type: mrr_at_3 value: 52.55499999999999 - type: mrr_at_5 value: 54.20399999999999 - type: ndcg_at_1 value: 44.085 - type: ndcg_at_10 value: 58.876 - type: ndcg_at_100 value: 62.714000000000006 - type: ndcg_at_1000 value: 63.721000000000004 - type: ndcg_at_3 value: 52.444 - type: ndcg_at_5 value: 55.692 - type: precision_at_1 value: 44.085 - type: precision_at_10 value: 9.21 - type: precision_at_100 value: 1.164 - type: precision_at_1000 value: 0.128 - type: precision_at_3 value: 23.043 - type: precision_at_5 value: 15.898000000000001 - type: recall_at_1 value: 38.912 - type: recall_at_10 value: 75.577 - type: recall_at_100 value: 92.038 - type: recall_at_1000 value: 99.325 - type: recall_at_3 value: 58.592 - type: recall_at_5 value: 66.235 - task: type: Classification dataset: name: MTEB AmazonReviewsClassification (fr) type: mteb/amazon_reviews_multi config: fr split: test revision: 1399c76144fd37290681b995c656ef9b2e06e26d metrics: - type: accuracy value: 55.532000000000004 - type: f1 value: 52.5783943471605 - task: type: Retrieval dataset: name: MTEB BSARDRetrieval type: maastrichtlawtech/bsard config: default split: test revision: 5effa1b9b5fa3b0f9e12523e6e43e5f86a6e6d59 metrics: - type: map_at_1 value: 8.108 - type: map_at_10 value: 14.710999999999999 - type: map_at_100 value: 15.891 - type: map_at_1000 value: 15.983 - type: map_at_3 value: 12.237 - type: map_at_5 value: 13.679 - type: mrr_at_1 value: 8.108 - type: mrr_at_10 value: 14.710999999999999 - type: mrr_at_100 value: 15.891 - type: mrr_at_1000 value: 15.983 - type: mrr_at_3 value: 12.237 - type: mrr_at_5 value: 13.679 - type: ndcg_at_1 value: 8.108 - type: ndcg_at_10 value: 18.796 - type: ndcg_at_100 value: 25.098 - type: ndcg_at_1000 value: 27.951999999999998 - type: ndcg_at_3 value: 13.712 - type: ndcg_at_5 value: 16.309 - type: precision_at_1 value: 8.108 - type: precision_at_10 value: 3.198 - type: precision_at_100 value: 0.626 - type: precision_at_1000 value: 0.086 - type: precision_at_3 value: 6.006 - type: precision_at_5 value: 4.865 - type: recall_at_1 value: 8.108 - type: recall_at_10 value: 31.982 - type: recall_at_100 value: 62.613 - type: recall_at_1000 value: 86.036 - type: recall_at_3 value: 18.018 - type: recall_at_5 value: 24.324 - task: type: Clustering dataset: name: MTEB HALClusteringS2S type: lyon-nlp/clustering-hal-s2s config: default split: test revision: e06ebbbb123f8144bef1a5d18796f3dec9ae2915 metrics: - type: v_measure value: 30.833269778867116 - task: type: Clustering dataset: name: MTEB MLSUMClusteringP2P type: mlsum config: default split: test revision: b5d54f8f3b61ae17845046286940f03c6bc79bc7 metrics: - type: v_measure value: 50.0281928004713 - type: v_measure value: 43.699961510636534 - task: type: Classification dataset: name: MTEB MTOPDomainClassification (fr) type: mteb/mtop_domain config: fr split: test revision: d80d48c1eb48d3562165c59d59d0034df9fff0bf metrics: - type: accuracy value: 96.68963357344191 - type: f1 value: 96.45175170820961 - task: type: Classification dataset: name: MTEB MTOPIntentClassification (fr) type: mteb/mtop_intent config: fr split: test revision: ae001d0e6b1228650b7bd1c2c65fb50ad11a8aba metrics: - type: accuracy value: 87.46946445349202 - type: f1 value: 65.79860440988624 - task: type: Classification dataset: name: MTEB MasakhaNEWSClassification (fra) type: masakhane/masakhanews config: fra split: test revision: 8ccc72e69e65f40c70e117d8b3c08306bb788b60 metrics: - type: accuracy value: 82.60663507109005 - type: f1 value: 77.20462646604777 - task: type: Clustering dataset: name: MTEB MasakhaNEWSClusteringP2P (fra) type: masakhane/masakhanews config: fra split: test revision: 8ccc72e69e65f40c70e117d8b3c08306bb788b60 metrics: - type: v_measure value: 60.19311264967803 - type: v_measure value: 63.6235764409785 - task: type: Classification dataset: name: MTEB MassiveIntentClassification (fr) type: mteb/amazon_massive_intent config: fr split: test revision: 31efe3c427b0bae9c22cbb560b8f15491cc6bed7 metrics: - type: accuracy value: 81.65097511768661 - type: f1 value: 78.77796091490924 - task: type: Classification dataset: name: MTEB MassiveScenarioClassification (fr) type: mteb/amazon_massive_scenario config: fr split: test revision: 7d571f92784cd94a019292a1f45445077d0ef634 metrics: - type: accuracy value: 86.64425016812373 - type: f1 value: 85.4912728670017 - task: type: Retrieval dataset: name: MTEB MintakaRetrieval (fr) type: jinaai/mintakaqa config: fr split: test revision: efa78cc2f74bbcd21eff2261f9e13aebe40b814e metrics: - type: map_at_1 value: 35.913000000000004 - type: map_at_10 value: 48.147 - type: map_at_100 value: 48.91 - type: map_at_1000 value: 48.949 - type: map_at_3 value: 45.269999999999996 - type: map_at_5 value: 47.115 - type: mrr_at_1 value: 35.913000000000004 - type: mrr_at_10 value: 48.147 - type: mrr_at_100 value: 48.91 - type: mrr_at_1000 value: 48.949 - type: mrr_at_3 value: 45.269999999999996 - type: mrr_at_5 value: 47.115 - type: ndcg_at_1 value: 35.913000000000004 - type: ndcg_at_10 value: 54.03 - type: ndcg_at_100 value: 57.839 - type: ndcg_at_1000 value: 58.925000000000004 - type: ndcg_at_3 value: 48.217999999999996 - type: ndcg_at_5 value: 51.56699999999999 - type: precision_at_1 value: 35.913000000000004 - type: precision_at_10 value: 7.244000000000001 - type: precision_at_100 value: 0.9039999999999999 - type: precision_at_1000 value: 0.099 - type: precision_at_3 value: 18.905 - type: precision_at_5 value: 12.981000000000002 - type: recall_at_1 value: 35.913000000000004 - type: recall_at_10 value: 72.441 - type: recall_at_100 value: 90.41799999999999 - type: recall_at_1000 value: 99.099 - type: recall_at_3 value: 56.716 - type: recall_at_5 value: 64.90599999999999 - task: type: PairClassification dataset: name: MTEB OpusparcusPC (fr) type: GEM/opusparcus config: fr split: test revision: 9e9b1f8ef51616073f47f306f7f47dd91663f86a metrics: - type: cos_sim_accuracy value: 99.90069513406156 - type: cos_sim_ap value: 100.0 - type: cos_sim_f1 value: 99.95032290114257 - type: cos_sim_precision value: 100.0 - type: cos_sim_recall value: 99.90069513406156 - type: dot_accuracy value: 99.90069513406156 - type: dot_ap value: 100.0 - type: dot_f1 value: 99.95032290114257 - type: dot_precision value: 100.0 - type: dot_recall value: 99.90069513406156 - type: euclidean_accuracy value: 99.90069513406156 - type: euclidean_ap value: 100.0 - type: euclidean_f1 value: 99.95032290114257 - type: euclidean_precision value: 100.0 - type: euclidean_recall value: 99.90069513406156 - type: manhattan_accuracy value: 99.90069513406156 - type: manhattan_ap value: 100.0 - type: manhattan_f1 value: 99.95032290114257 - type: manhattan_precision value: 100.0 - type: manhattan_recall value: 99.90069513406156 - type: max_accuracy value: 99.90069513406156 - type: max_ap value: 100.0 - type: max_f1 value: 99.95032290114257 - task: type: PairClassification dataset: name: MTEB PawsX (fr) type: paws-x config: fr split: test revision: 8a04d940a42cd40658986fdd8e3da561533a3646 metrics: - type: cos_sim_accuracy value: 75.25 - type: cos_sim_ap value: 80.86376001270014 - type: cos_sim_f1 value: 73.65945437441204 - type: cos_sim_precision value: 64.02289452166802 - type: cos_sim_recall value: 86.71096345514951 - type: dot_accuracy value: 75.25 - type: dot_ap value: 80.93686107633002 - type: dot_f1 value: 73.65945437441204 - type: dot_precision value: 64.02289452166802 - type: dot_recall value: 86.71096345514951 - type: euclidean_accuracy value: 75.25 - type: euclidean_ap value: 80.86379136218862 - type: euclidean_f1 value: 73.65945437441204 - type: euclidean_precision value: 64.02289452166802 - type: euclidean_recall value: 86.71096345514951 - type: manhattan_accuracy value: 75.3 - type: manhattan_ap value: 80.87826606097734 - type: manhattan_f1 value: 73.68421052631581 - type: manhattan_precision value: 64.0 - type: manhattan_recall value: 86.82170542635659 - type: max_accuracy value: 75.3 - type: max_ap value: 80.93686107633002 - type: max_f1 value: 73.68421052631581 - task: type: STS dataset: name: MTEB SICKFr type: Lajavaness/SICK-fr config: default split: test revision: e077ab4cf4774a1e36d86d593b150422fafd8e8a metrics: - type: cos_sim_pearson value: 81.42349425981143 - type: cos_sim_spearman value: 78.90454327031226 - type: euclidean_pearson value: 78.39086497435166 - type: euclidean_spearman value: 78.9046133980509 - type: manhattan_pearson value: 78.63743094286502 - type: manhattan_spearman value: 79.12136348449269 - task: type: STS dataset: name: MTEB STS22 (fr) type: mteb/sts22-crosslingual-sts config: fr split: test revision: eea2b4fe26a775864c896887d910b76a8098ad3f metrics: - type: cos_sim_pearson value: 81.452697919749 - type: cos_sim_spearman value: 82.58116836039301 - type: euclidean_pearson value: 81.04038478932786 - type: euclidean_spearman value: 82.58116836039301 - type: manhattan_pearson value: 81.37075396187771 - type: manhattan_spearman value: 82.73678231355368 - task: type: STS dataset: name: MTEB STSBenchmarkMultilingualSTS (fr) type: stsb_multi_mt config: fr split: test revision: 93d57ef91790589e3ce9c365164337a8a78b7632 metrics: - type: cos_sim_pearson value: 85.7419764013806 - type: cos_sim_spearman value: 85.46085808849622 - type: euclidean_pearson value: 83.70449639870063 - type: euclidean_spearman value: 85.46159013076233 - type: manhattan_pearson value: 83.95259510313929 - type: manhattan_spearman value: 85.8029724659458 - task: type: Summarization dataset: name: MTEB SummEvalFr type: lyon-nlp/summarization-summeval-fr-p2p config: default split: test revision: b385812de6a9577b6f4d0f88c6a6e35395a94054 metrics: - type: cos_sim_pearson value: 32.61063271753325 - type: cos_sim_spearman value: 31.454589417353603 - type: dot_pearson value: 32.6106288643431 - type: dot_spearman value: 31.454589417353603 - task: type: Reranking dataset: name: MTEB SyntecReranking type: lyon-nlp/mteb-fr-reranking-syntec-s2p config: default split: test revision: b205c5084a0934ce8af14338bf03feb19499c84d metrics: - type: map value: 84.31666666666666 - type: mrr value: 84.31666666666666 - task: type: Retrieval dataset: name: MTEB SyntecRetrieval type: lyon-nlp/mteb-fr-retrieval-syntec-s2p config: default split: test revision: 77f7e271bf4a92b24fce5119f3486b583ca016ff metrics: - type: map_at_1 value: 63.0 - type: map_at_10 value: 73.471 - type: map_at_100 value: 73.87 - type: map_at_1000 value: 73.87 - type: map_at_3 value: 70.5 - type: map_at_5 value: 73.05 - type: mrr_at_1 value: 63.0 - type: mrr_at_10 value: 73.471 - type: mrr_at_100 value: 73.87 - type: mrr_at_1000 value: 73.87 - type: mrr_at_3 value: 70.5 - type: mrr_at_5 value: 73.05 - type: ndcg_at_1 value: 63.0 - type: ndcg_at_10 value: 78.255 - type: ndcg_at_100 value: 79.88 - type: ndcg_at_1000 value: 79.88 - type: ndcg_at_3 value: 72.702 - type: ndcg_at_5 value: 77.264 - type: precision_at_1 value: 63.0 - type: precision_at_10 value: 9.3 - type: precision_at_100 value: 1.0 - type: precision_at_1000 value: 0.1 - type: precision_at_3 value: 26.333000000000002 - type: precision_at_5 value: 18.0 - type: recall_at_1 value: 63.0 - type: recall_at_10 value: 93.0 - type: recall_at_100 value: 100.0 - type: recall_at_1000 value: 100.0 - type: recall_at_3 value: 79.0 - type: recall_at_5 value: 90.0 - task: type: Retrieval dataset: name: MTEB XPQARetrieval (fr) type: jinaai/xpqa config: fr split: test revision: c99d599f0a6ab9b85b065da6f9d94f9cf731679f metrics: - type: map_at_1 value: 40.338 - type: map_at_10 value: 61.927 - type: map_at_100 value: 63.361999999999995 - type: map_at_1000 value: 63.405 - type: map_at_3 value: 55.479 - type: map_at_5 value: 59.732 - type: mrr_at_1 value: 63.551 - type: mrr_at_10 value: 71.006 - type: mrr_at_100 value: 71.501 - type: mrr_at_1000 value: 71.509 - type: mrr_at_3 value: 69.07 - type: mrr_at_5 value: 70.165 - type: ndcg_at_1 value: 63.551 - type: ndcg_at_10 value: 68.297 - type: ndcg_at_100 value: 73.13199999999999 - type: ndcg_at_1000 value: 73.751 - type: ndcg_at_3 value: 62.999 - type: ndcg_at_5 value: 64.89 - type: precision_at_1 value: 63.551 - type: precision_at_10 value: 15.661 - type: precision_at_100 value: 1.9789999999999999 - type: precision_at_1000 value: 0.207 - type: precision_at_3 value: 38.273 - type: precision_at_5 value: 27.61 - type: recall_at_1 value: 40.338 - type: recall_at_10 value: 77.267 - type: recall_at_100 value: 95.892 - type: recall_at_1000 value: 99.75500000000001 - type: recall_at_3 value: 60.36 - type: recall_at_5 value: 68.825 - task: type: Clustering dataset: name: MTEB 8TagsClustering type: PL-MTEB/8tags-clustering config: default split: test revision: None metrics: - type: v_measure value: 51.36126303874126 - task: type: Classification dataset: name: MTEB AllegroReviews type: PL-MTEB/allegro-reviews config: default split: test revision: None metrics: - type: accuracy value: 67.13717693836979 - type: f1 value: 57.27609848003782 - task: type: Retrieval dataset: name: MTEB ArguAna-PL type: clarin-knext/arguana-pl config: default split: test revision: 63fc86750af76253e8c760fc9e534bbf24d260a2 metrics: - type: map_at_1 value: 35.276999999999994 - type: map_at_10 value: 51.086 - type: map_at_100 value: 51.788000000000004 - type: map_at_1000 value: 51.791 - type: map_at_3 value: 46.147 - type: map_at_5 value: 49.078 - type: mrr_at_1 value: 35.917 - type: mrr_at_10 value: 51.315999999999995 - type: mrr_at_100 value: 52.018 - type: mrr_at_1000 value: 52.022 - type: mrr_at_3 value: 46.349000000000004 - type: mrr_at_5 value: 49.297000000000004 - type: ndcg_at_1 value: 35.276999999999994 - type: ndcg_at_10 value: 59.870999999999995 - type: ndcg_at_100 value: 62.590999999999994 - type: ndcg_at_1000 value: 62.661 - type: ndcg_at_3 value: 49.745 - type: ndcg_at_5 value: 55.067 - type: precision_at_1 value: 35.276999999999994 - type: precision_at_10 value: 8.791 - type: precision_at_100 value: 0.991 - type: precision_at_1000 value: 0.1 - type: precision_at_3 value: 20.057 - type: precision_at_5 value: 14.637 - type: recall_at_1 value: 35.276999999999994 - type: recall_at_10 value: 87.909 - type: recall_at_100 value: 99.14699999999999 - type: recall_at_1000 value: 99.644 - type: recall_at_3 value: 60.171 - type: recall_at_5 value: 73.18599999999999 - task: type: Classification dataset: name: MTEB CBD type: PL-MTEB/cbd config: default split: test revision: None metrics: - type: accuracy value: 78.03000000000002 - type: ap value: 29.12548553897622 - type: f1 value: 66.54857118886073 - task: type: PairClassification dataset: name: MTEB CDSC-E type: PL-MTEB/cdsce-pairclassification config: default split: test revision: None metrics: - type: cos_sim_accuracy value: 89.0 - type: cos_sim_ap value: 76.75437826834582 - type: cos_sim_f1 value: 66.4850136239782 - type: cos_sim_precision value: 68.92655367231639 - type: cos_sim_recall value: 64.21052631578948 - type: dot_accuracy value: 89.0 - type: dot_ap value: 76.75437826834582 - type: dot_f1 value: 66.4850136239782 - type: dot_precision value: 68.92655367231639 - type: dot_recall value: 64.21052631578948 - type: euclidean_accuracy value: 89.0 - type: euclidean_ap value: 76.75437826834582 - type: euclidean_f1 value: 66.4850136239782 - type: euclidean_precision value: 68.92655367231639 - type: euclidean_recall value: 64.21052631578948 - type: manhattan_accuracy value: 89.0 - type: manhattan_ap value: 76.66074220647083 - type: manhattan_f1 value: 66.47058823529412 - type: manhattan_precision value: 75.33333333333333 - type: manhattan_recall value: 59.473684210526315 - type: max_accuracy value: 89.0 - type: max_ap value: 76.75437826834582 - type: max_f1 value: 66.4850136239782 - task: type: STS dataset: name: MTEB CDSC-R type: PL-MTEB/cdscr-sts config: default split: test revision: None metrics: - type: cos_sim_pearson value: 93.12903172428328 - type: cos_sim_spearman value: 92.66381487060741 - type: euclidean_pearson value: 90.37278396708922 - type: euclidean_spearman value: 92.66381487060741 - type: manhattan_pearson value: 90.32503296540962 - type: manhattan_spearman value: 92.6902938354313 - task: type: Retrieval dataset: name: MTEB DBPedia-PL type: clarin-knext/dbpedia-pl config: default split: test revision: 76afe41d9af165cc40999fcaa92312b8b012064a metrics: - type: map_at_1 value: 8.83 - type: map_at_10 value: 18.326 - type: map_at_100 value: 26.496 - type: map_at_1000 value: 28.455000000000002 - type: map_at_3 value: 12.933 - type: map_at_5 value: 15.168000000000001 - type: mrr_at_1 value: 66.0 - type: mrr_at_10 value: 72.76700000000001 - type: mrr_at_100 value: 73.203 - type: mrr_at_1000 value: 73.219 - type: mrr_at_3 value: 71.458 - type: mrr_at_5 value: 72.246 - type: ndcg_at_1 value: 55.375 - type: ndcg_at_10 value: 41.3 - type: ndcg_at_100 value: 45.891 - type: ndcg_at_1000 value: 52.905 - type: ndcg_at_3 value: 46.472 - type: ndcg_at_5 value: 43.734 - type: precision_at_1 value: 66.0 - type: precision_at_10 value: 33.074999999999996 - type: precision_at_100 value: 11.094999999999999 - type: precision_at_1000 value: 2.374 - type: precision_at_3 value: 48.583 - type: precision_at_5 value: 42.0 - type: recall_at_1 value: 8.83 - type: recall_at_10 value: 22.587 - type: recall_at_100 value: 50.61600000000001 - type: recall_at_1000 value: 73.559 - type: recall_at_3 value: 13.688 - type: recall_at_5 value: 16.855 - task: type: Retrieval dataset: name: MTEB FiQA-PL type: clarin-knext/fiqa-pl config: default split: test revision: 2e535829717f8bf9dc829b7f911cc5bbd4e6608e metrics: - type: map_at_1 value: 20.587 - type: map_at_10 value: 33.095 - type: map_at_100 value: 35.24 - type: map_at_1000 value: 35.429 - type: map_at_3 value: 28.626 - type: map_at_5 value: 31.136999999999997 - type: mrr_at_1 value: 40.586 - type: mrr_at_10 value: 49.033 - type: mrr_at_100 value: 49.952999999999996 - type: mrr_at_1000 value: 49.992 - type: mrr_at_3 value: 46.553 - type: mrr_at_5 value: 48.035 - type: ndcg_at_1 value: 40.586 - type: ndcg_at_10 value: 41.046 - type: ndcg_at_100 value: 48.586 - type: ndcg_at_1000 value: 51.634 - type: ndcg_at_3 value: 36.773 - type: ndcg_at_5 value: 38.389 - type: precision_at_1 value: 40.586 - type: precision_at_10 value: 11.466 - type: precision_at_100 value: 1.909 - type: precision_at_1000 value: 0.245 - type: precision_at_3 value: 24.434 - type: precision_at_5 value: 18.426000000000002 - type: recall_at_1 value: 20.587 - type: recall_at_10 value: 47.986000000000004 - type: recall_at_100 value: 75.761 - type: recall_at_1000 value: 94.065 - type: recall_at_3 value: 33.339 - type: recall_at_5 value: 39.765 - task: type: Retrieval dataset: name: MTEB HotpotQA-PL type: clarin-knext/hotpotqa-pl config: default split: test revision: a0bd479ac97b4ccb5bd6ce320c415d0bb4beb907 metrics: - type: map_at_1 value: 40.878 - type: map_at_10 value: 58.775999999999996 - type: map_at_100 value: 59.632 - type: map_at_1000 value: 59.707 - type: map_at_3 value: 56.074 - type: map_at_5 value: 57.629 - type: mrr_at_1 value: 81.756 - type: mrr_at_10 value: 86.117 - type: mrr_at_100 value: 86.299 - type: mrr_at_1000 value: 86.30600000000001 - type: mrr_at_3 value: 85.345 - type: mrr_at_5 value: 85.832 - type: ndcg_at_1 value: 81.756 - type: ndcg_at_10 value: 67.608 - type: ndcg_at_100 value: 70.575 - type: ndcg_at_1000 value: 71.99600000000001 - type: ndcg_at_3 value: 63.723 - type: ndcg_at_5 value: 65.70700000000001 - type: precision_at_1 value: 81.756 - type: precision_at_10 value: 13.619 - type: precision_at_100 value: 1.5939999999999999 - type: precision_at_1000 value: 0.178 - type: precision_at_3 value: 39.604 - type: precision_at_5 value: 25.332 - type: recall_at_1 value: 40.878 - type: recall_at_10 value: 68.096 - type: recall_at_100 value: 79.696 - type: recall_at_1000 value: 89.082 - type: recall_at_3 value: 59.406000000000006 - type: recall_at_5 value: 63.329 - task: type: Retrieval dataset: name: MTEB MSMARCO-PL type: clarin-knext/msmarco-pl config: default split: test revision: 8634c07806d5cce3a6138e260e59b81760a0a640 metrics: - type: map_at_1 value: 2.1839999999999997 - type: map_at_10 value: 11.346 - type: map_at_100 value: 30.325000000000003 - type: map_at_1000 value: 37.806 - type: map_at_3 value: 4.842 - type: map_at_5 value: 6.891 - type: mrr_at_1 value: 86.047 - type: mrr_at_10 value: 89.14699999999999 - type: mrr_at_100 value: 89.46600000000001 - type: mrr_at_1000 value: 89.46600000000001 - type: mrr_at_3 value: 89.14699999999999 - type: mrr_at_5 value: 89.14699999999999 - type: ndcg_at_1 value: 67.829 - type: ndcg_at_10 value: 62.222 - type: ndcg_at_100 value: 55.337 - type: ndcg_at_1000 value: 64.076 - type: ndcg_at_3 value: 68.12700000000001 - type: ndcg_at_5 value: 64.987 - type: precision_at_1 value: 86.047 - type: precision_at_10 value: 69.535 - type: precision_at_100 value: 32.93 - type: precision_at_1000 value: 6.6049999999999995 - type: precision_at_3 value: 79.845 - type: precision_at_5 value: 75.349 - type: recall_at_1 value: 2.1839999999999997 - type: recall_at_10 value: 12.866 - type: recall_at_100 value: 43.505 - type: recall_at_1000 value: 72.366 - type: recall_at_3 value: 4.947 - type: recall_at_5 value: 7.192 - task: type: Classification dataset: name: MTEB MassiveIntentClassification (pl) type: mteb/amazon_massive_intent config: pl split: test revision: 31efe3c427b0bae9c22cbb560b8f15491cc6bed7 metrics: - type: accuracy value: 80.75319435104238 - type: f1 value: 77.58961444860606 - task: type: Classification dataset: name: MTEB MassiveScenarioClassification (pl) type: mteb/amazon_massive_scenario config: pl split: test revision: 7d571f92784cd94a019292a1f45445077d0ef634 metrics: - type: accuracy value: 85.54472091459313 - type: f1 value: 84.29498563572106 - task: type: Retrieval dataset: name: MTEB NFCorpus-PL type: clarin-knext/nfcorpus-pl config: default split: test revision: 9a6f9567fda928260afed2de480d79c98bf0bec0 metrics: - type: map_at_1 value: 4.367 - type: map_at_10 value: 10.38 - type: map_at_100 value: 13.516 - type: map_at_1000 value: 14.982000000000001 - type: map_at_3 value: 7.367 - type: map_at_5 value: 8.59 - type: mrr_at_1 value: 41.486000000000004 - type: mrr_at_10 value: 48.886 - type: mrr_at_100 value: 49.657000000000004 - type: mrr_at_1000 value: 49.713 - type: mrr_at_3 value: 46.904 - type: mrr_at_5 value: 48.065000000000005 - type: ndcg_at_1 value: 40.402 - type: ndcg_at_10 value: 30.885 - type: ndcg_at_100 value: 28.393 - type: ndcg_at_1000 value: 37.428 - type: ndcg_at_3 value: 35.394999999999996 - type: ndcg_at_5 value: 33.391999999999996 - type: precision_at_1 value: 41.486000000000004 - type: precision_at_10 value: 23.437 - type: precision_at_100 value: 7.638 - type: precision_at_1000 value: 2.0389999999999997 - type: precision_at_3 value: 32.817 - type: precision_at_5 value: 28.915999999999997 - type: recall_at_1 value: 4.367 - type: recall_at_10 value: 14.655000000000001 - type: recall_at_100 value: 29.665999999999997 - type: recall_at_1000 value: 62.073 - type: recall_at_3 value: 8.51 - type: recall_at_5 value: 10.689 - task: type: Retrieval dataset: name: MTEB NQ-PL type: clarin-knext/nq-pl config: default split: test revision: f171245712cf85dd4700b06bef18001578d0ca8d metrics: - type: map_at_1 value: 28.616000000000003 - type: map_at_10 value: 41.626000000000005 - type: map_at_100 value: 42.689 - type: map_at_1000 value: 42.733 - type: map_at_3 value: 37.729 - type: map_at_5 value: 39.879999999999995 - type: mrr_at_1 value: 32.068000000000005 - type: mrr_at_10 value: 44.029 - type: mrr_at_100 value: 44.87 - type: mrr_at_1000 value: 44.901 - type: mrr_at_3 value: 40.687 - type: mrr_at_5 value: 42.625 - type: ndcg_at_1 value: 32.068000000000005 - type: ndcg_at_10 value: 48.449999999999996 - type: ndcg_at_100 value: 53.13 - type: ndcg_at_1000 value: 54.186 - type: ndcg_at_3 value: 40.983999999999995 - type: ndcg_at_5 value: 44.628 - type: precision_at_1 value: 32.068000000000005 - type: precision_at_10 value: 7.9750000000000005 - type: precision_at_100 value: 1.061 - type: precision_at_1000 value: 0.116 - type: precision_at_3 value: 18.404999999999998 - type: precision_at_5 value: 13.111 - type: recall_at_1 value: 28.616000000000003 - type: recall_at_10 value: 66.956 - type: recall_at_100 value: 87.657 - type: recall_at_1000 value: 95.548 - type: recall_at_3 value: 47.453 - type: recall_at_5 value: 55.87800000000001 - task: type: Classification dataset: name: MTEB PAC type: laugustyniak/abusive-clauses-pl config: default split: test revision: None metrics: - type: accuracy value: 69.04141326382856 - type: ap value: 77.47589122111044 - type: f1 value: 66.6332277374775 - task: type: PairClassification dataset: name: MTEB PPC type: PL-MTEB/ppc-pairclassification config: default split: test revision: None metrics: - type: cos_sim_accuracy value: 86.4 - type: cos_sim_ap value: 94.1044939667201 - type: cos_sim_f1 value: 88.78048780487805 - type: cos_sim_precision value: 87.22044728434504 - type: cos_sim_recall value: 90.39735099337747 - type: dot_accuracy value: 86.4 - type: dot_ap value: 94.1044939667201 - type: dot_f1 value: 88.78048780487805 - type: dot_precision value: 87.22044728434504 - type: dot_recall value: 90.39735099337747 - type: euclidean_accuracy value: 86.4 - type: euclidean_ap value: 94.1044939667201 - type: euclidean_f1 value: 88.78048780487805 - type: euclidean_precision value: 87.22044728434504 - type: euclidean_recall value: 90.39735099337747 - type: manhattan_accuracy value: 86.4 - type: manhattan_ap value: 94.11438365697387 - type: manhattan_f1 value: 88.77968877968877 - type: manhattan_precision value: 87.84440842787681 - type: manhattan_recall value: 89.73509933774835 - type: max_accuracy value: 86.4 - type: max_ap value: 94.11438365697387 - type: max_f1 value: 88.78048780487805 - task: type: PairClassification dataset: name: MTEB PSC type: PL-MTEB/psc-pairclassification config: default split: test revision: None metrics: - type: cos_sim_accuracy value: 97.86641929499072 - type: cos_sim_ap value: 99.36904211868182 - type: cos_sim_f1 value: 96.56203288490283 - type: cos_sim_precision value: 94.72140762463343 - type: cos_sim_recall value: 98.47560975609755 - type: dot_accuracy value: 97.86641929499072 - type: dot_ap value: 99.36904211868183 - type: dot_f1 value: 96.56203288490283 - type: dot_precision value: 94.72140762463343 - type: dot_recall value: 98.47560975609755 - type: euclidean_accuracy value: 97.86641929499072 - type: euclidean_ap value: 99.36904211868183 - type: euclidean_f1 value: 96.56203288490283 - type: euclidean_precision value: 94.72140762463343 - type: euclidean_recall value: 98.47560975609755 - type: manhattan_accuracy value: 98.14471243042672 - type: manhattan_ap value: 99.43359540492416 - type: manhattan_f1 value: 96.98795180722892 - type: manhattan_precision value: 95.83333333333334 - type: manhattan_recall value: 98.17073170731707 - type: max_accuracy value: 98.14471243042672 - type: max_ap value: 99.43359540492416 - type: max_f1 value: 96.98795180722892 - task: type: Classification dataset: name: MTEB PolEmo2.0-IN type: PL-MTEB/polemo2_in config: default split: test revision: None metrics: - type: accuracy value: 89.39058171745152 - type: f1 value: 86.8552093529568 - task: type: Classification dataset: name: MTEB PolEmo2.0-OUT type: PL-MTEB/polemo2_out config: default split: test revision: None metrics: - type: accuracy value: 74.97975708502024 - type: f1 value: 58.73081628832407 - task: type: Retrieval dataset: name: MTEB Quora-PL type: clarin-knext/quora-pl config: default split: test revision: 0be27e93455051e531182b85e85e425aba12e9d4 metrics: - type: map_at_1 value: 64.917 - type: map_at_10 value: 78.74600000000001 - type: map_at_100 value: 79.501 - type: map_at_1000 value: 79.524 - type: map_at_3 value: 75.549 - type: map_at_5 value: 77.495 - type: mrr_at_1 value: 74.9 - type: mrr_at_10 value: 82.112 - type: mrr_at_100 value: 82.314 - type: mrr_at_1000 value: 82.317 - type: mrr_at_3 value: 80.745 - type: mrr_at_5 value: 81.607 - type: ndcg_at_1 value: 74.83999999999999 - type: ndcg_at_10 value: 83.214 - type: ndcg_at_100 value: 84.997 - type: ndcg_at_1000 value: 85.207 - type: ndcg_at_3 value: 79.547 - type: ndcg_at_5 value: 81.46600000000001 - type: precision_at_1 value: 74.83999999999999 - type: precision_at_10 value: 12.822 - type: precision_at_100 value: 1.506 - type: precision_at_1000 value: 0.156 - type: precision_at_3 value: 34.903 - type: precision_at_5 value: 23.16 - type: recall_at_1 value: 64.917 - type: recall_at_10 value: 92.27199999999999 - type: recall_at_100 value: 98.715 - type: recall_at_1000 value: 99.854 - type: recall_at_3 value: 82.04599999999999 - type: recall_at_5 value: 87.2 - task: type: Retrieval dataset: name: MTEB SCIDOCS-PL type: clarin-knext/scidocs-pl config: default split: test revision: 45452b03f05560207ef19149545f168e596c9337 metrics: - type: map_at_1 value: 3.51 - type: map_at_10 value: 9.046999999999999 - type: map_at_100 value: 10.823 - type: map_at_1000 value: 11.144 - type: map_at_3 value: 6.257 - type: map_at_5 value: 7.648000000000001 - type: mrr_at_1 value: 17.299999999999997 - type: mrr_at_10 value: 27.419 - type: mrr_at_100 value: 28.618 - type: mrr_at_1000 value: 28.685 - type: mrr_at_3 value: 23.817 - type: mrr_at_5 value: 25.927 - type: ndcg_at_1 value: 17.299999999999997 - type: ndcg_at_10 value: 16.084 - type: ndcg_at_100 value: 23.729 - type: ndcg_at_1000 value: 29.476999999999997 - type: ndcg_at_3 value: 14.327000000000002 - type: ndcg_at_5 value: 13.017999999999999 - type: precision_at_1 value: 17.299999999999997 - type: precision_at_10 value: 8.63 - type: precision_at_100 value: 1.981 - type: precision_at_1000 value: 0.336 - type: precision_at_3 value: 13.4 - type: precision_at_5 value: 11.700000000000001 - type: recall_at_1 value: 3.51 - type: recall_at_10 value: 17.518 - type: recall_at_100 value: 40.275 - type: recall_at_1000 value: 68.203 - type: recall_at_3 value: 8.155 - type: recall_at_5 value: 11.875 - task: type: PairClassification dataset: name: MTEB SICK-E-PL type: PL-MTEB/sicke-pl-pairclassification config: default split: test revision: None metrics: - type: cos_sim_accuracy value: 86.30248675091724 - type: cos_sim_ap value: 83.6756734006714 - type: cos_sim_f1 value: 74.97367497367497 - type: cos_sim_precision value: 73.91003460207612 - type: cos_sim_recall value: 76.06837606837607 - type: dot_accuracy value: 86.30248675091724 - type: dot_ap value: 83.6756734006714 - type: dot_f1 value: 74.97367497367497 - type: dot_precision value: 73.91003460207612 - type: dot_recall value: 76.06837606837607 - type: euclidean_accuracy value: 86.30248675091724 - type: euclidean_ap value: 83.67566984333091 - type: euclidean_f1 value: 74.97367497367497 - type: euclidean_precision value: 73.91003460207612 - type: euclidean_recall value: 76.06837606837607 - type: manhattan_accuracy value: 86.28210354667753 - type: manhattan_ap value: 83.64216119130171 - type: manhattan_f1 value: 74.92152075340078 - type: manhattan_precision value: 73.4107997265892 - type: manhattan_recall value: 76.49572649572649 - type: max_accuracy value: 86.30248675091724 - type: max_ap value: 83.6756734006714 - type: max_f1 value: 74.97367497367497 - task: type: STS dataset: name: MTEB SICK-R-PL type: PL-MTEB/sickr-pl-sts config: default split: test revision: None metrics: - type: cos_sim_pearson value: 82.23295940859121 - type: cos_sim_spearman value: 78.89329160768719 - type: euclidean_pearson value: 79.56019107076818 - type: euclidean_spearman value: 78.89330209904084 - type: manhattan_pearson value: 79.76098513973719 - type: manhattan_spearman value: 79.05490162570123 - task: type: STS dataset: name: MTEB STS22 (pl) type: mteb/sts22-crosslingual-sts config: pl split: test revision: eea2b4fe26a775864c896887d910b76a8098ad3f metrics: - type: cos_sim_pearson value: 37.732606308062486 - type: cos_sim_spearman value: 41.01645667030284 - type: euclidean_pearson value: 26.61722556367085 - type: euclidean_spearman value: 41.01645667030284 - type: manhattan_pearson value: 26.60917378970807 - type: manhattan_spearman value: 41.51335727617614 - task: type: Retrieval dataset: name: MTEB SciFact-PL type: clarin-knext/scifact-pl config: default split: test revision: 47932a35f045ef8ed01ba82bf9ff67f6e109207e metrics: - type: map_at_1 value: 54.31700000000001 - type: map_at_10 value: 65.564 - type: map_at_100 value: 66.062 - type: map_at_1000 value: 66.08699999999999 - type: map_at_3 value: 62.592999999999996 - type: map_at_5 value: 63.888 - type: mrr_at_1 value: 56.99999999999999 - type: mrr_at_10 value: 66.412 - type: mrr_at_100 value: 66.85900000000001 - type: mrr_at_1000 value: 66.88 - type: mrr_at_3 value: 64.22200000000001 - type: mrr_at_5 value: 65.206 - type: ndcg_at_1 value: 56.99999999999999 - type: ndcg_at_10 value: 70.577 - type: ndcg_at_100 value: 72.879 - type: ndcg_at_1000 value: 73.45 - type: ndcg_at_3 value: 65.5 - type: ndcg_at_5 value: 67.278 - type: precision_at_1 value: 56.99999999999999 - type: precision_at_10 value: 9.667 - type: precision_at_100 value: 1.083 - type: precision_at_1000 value: 0.11299999999999999 - type: precision_at_3 value: 26.0 - type: precision_at_5 value: 16.933 - type: recall_at_1 value: 54.31700000000001 - type: recall_at_10 value: 85.056 - type: recall_at_100 value: 95.667 - type: recall_at_1000 value: 100.0 - type: recall_at_3 value: 71.0 - type: recall_at_5 value: 75.672 - task: type: Retrieval dataset: name: MTEB TRECCOVID-PL type: clarin-knext/trec-covid-pl config: default split: test revision: 81bcb408f33366c2a20ac54adafad1ae7e877fdd metrics: - type: map_at_1 value: 0.245 - type: map_at_10 value: 2.051 - type: map_at_100 value: 12.009 - type: map_at_1000 value: 27.448 - type: map_at_3 value: 0.721 - type: map_at_5 value: 1.13 - type: mrr_at_1 value: 88.0 - type: mrr_at_10 value: 93.0 - type: mrr_at_100 value: 93.0 - type: mrr_at_1000 value: 93.0 - type: mrr_at_3 value: 93.0 - type: mrr_at_5 value: 93.0 - type: ndcg_at_1 value: 85.0 - type: ndcg_at_10 value: 80.303 - type: ndcg_at_100 value: 61.23499999999999 - type: ndcg_at_1000 value: 52.978 - type: ndcg_at_3 value: 84.419 - type: ndcg_at_5 value: 82.976 - type: precision_at_1 value: 88.0 - type: precision_at_10 value: 83.39999999999999 - type: precision_at_100 value: 61.96 - type: precision_at_1000 value: 22.648 - type: precision_at_3 value: 89.333 - type: precision_at_5 value: 87.2 - type: recall_at_1 value: 0.245 - type: recall_at_10 value: 2.193 - type: recall_at_100 value: 14.938 - type: recall_at_1000 value: 48.563 - type: recall_at_3 value: 0.738 - type: recall_at_5 value: 1.173 --- ## gte-Qwen2-7B-instruct **gte-Qwen2-7B-instruct** is the latest model in the gte (General Text Embedding) model family that ranks **No.1** in both English and Chinese evaluations on the Massive Text Embedding Benchmark [MTEB benchmark](https://huggingface.co/spaces/mteb/leaderboard) (as of June 16, 2024). Recently, the [**Qwen team**](https://huggingface.co/Qwen) released the Qwen2 series models, and we have trained the **gte-Qwen2-7B-instruct** model based on the [Qwen2-7B](https://huggingface.co/Qwen/Qwen2-7B) LLM model. Compared to the [gte-Qwen1.5-7B-instruct](https://huggingface.co/Alibaba-NLP/gte-Qwen1.5-7B-instruct) model, the **gte-Qwen2-7B-instruct** model uses the same training data and training strategies during the finetuning stage, with the only difference being the upgraded base model to Qwen2-7B. Considering the improvements in the Qwen2 series models compared to the Qwen1.5 series, we can also expect consistent performance enhancements in the embedding models. The model incorporates several key advancements: - Integration of bidirectional attention mechanisms, enriching its contextual understanding. - Instruction tuning, applied solely on the query side for streamlined efficiency - Comprehensive training across a vast, multilingual text corpus spanning diverse domains and scenarios. This training leverages both weakly supervised and supervised data, ensuring the model's applicability across numerous languages and a wide array of downstream tasks. ## Model Information - Model Size: 7B - Embedding Dimension: 3584 - Max Input Tokens: 32k ## Requirements ``` transformers>=4.39.2 flash_attn>=2.5.6 ``` ## Usage ### Sentence Transformers ```python from sentence_transformers import SentenceTransformer model = SentenceTransformer("Alibaba-NLP/gte-Qwen2-7B-instruct", trust_remote_code=True) # In case you want to reduce the maximum length: model.max_seq_length = 8192 queries = [ "how much protein should a female eat", "summit define", ] documents = [ "As a general guideline, the CDC's average requirement of protein for women ages 19 to 70 is 46 grams per day. But, as you can see from this chart, you'll need to increase that if you're expecting or training for a marathon. Check out the chart below to see how much protein you should be eating each day.", "Definition of summit for English Language Learners. : 1 the highest point of a mountain : the top of a mountain. : 2 the highest level. : 3 a meeting or series of meetings between the leaders of two or more governments.", ] query_embeddings = model.encode(queries, prompt_name="query") document_embeddings = model.encode(documents) scores = (query_embeddings @ document_embeddings.T) * 100 print(scores.tolist()) ``` Observe the [config_sentence_transformers.json](config_sentence_transformers.json) to see all pre-built prompt names. Otherwise, you can use `model.encode(queries, prompt="Instruct: ...\nQuery: "` to use a custom prompt of your choice. ### Transformers ```python import torch import torch.nn.functional as F from torch import Tensor from transformers import AutoTokenizer, AutoModel def last_token_pool(last_hidden_states: Tensor, attention_mask: Tensor) -> Tensor: left_padding = (attention_mask[:, -1].sum() == attention_mask.shape[0]) if left_padding: return last_hidden_states[:, -1] else: sequence_lengths = attention_mask.sum(dim=1) - 1 batch_size = last_hidden_states.shape[0] return last_hidden_states[torch.arange(batch_size, device=last_hidden_states.device), sequence_lengths] def get_detailed_instruct(task_description: str, query: str) -> str: return f'Instruct: {task_description}\nQuery: {query}' # Each query must come with a one-sentence instruction that describes the task task = 'Given a web search query, retrieve relevant passages that answer the query' queries = [ get_detailed_instruct(task, 'how much protein should a female eat'), get_detailed_instruct(task, 'summit define') ] # No need to add instruction for retrieval documents documents = [ "As a general guideline, the CDC's average requirement of protein for women ages 19 to 70 is 46 grams per day. But, as you can see from this chart, you'll need to increase that if you're expecting or training for a marathon. Check out the chart below to see how much protein you should be eating each day.", "Definition of summit for English Language Learners. : 1 the highest point of a mountain : the top of a mountain. : 2 the highest level. : 3 a meeting or series of meetings between the leaders of two or more governments." ] input_texts = queries + documents tokenizer = AutoTokenizer.from_pretrained('Alibaba-NLP/gte-Qwen2-7B-instruct', trust_remote_code=True) model = AutoModel.from_pretrained('Alibaba-NLP/gte-Qwen2-7B-instruct', trust_remote_code=True) max_length = 8192 # Tokenize the input texts batch_dict = tokenizer(input_texts, max_length=max_length, padding=True, truncation=True, return_tensors='pt') outputs = model(**batch_dict) embeddings = last_token_pool(outputs.last_hidden_state, batch_dict['attention_mask']) # normalize embeddings embeddings = F.normalize(embeddings, p=2, dim=1) scores = (embeddings[:2] @ embeddings[2:].T) * 100 print(scores.tolist()) ``` ## Infinity_emb Usage via [infinity](https://github.com/michaelfeil/infinity), a MIT Licensed inference server. ``` # requires ~16-32GB VRAM NVIDIA Compute Capability >= 8.0 docker run \ -v $PWD/data:/app/.cache --gpus "0" -p "7997":"7997" \ michaelf34/infinity:0.0.68-trt-onnx \ v2 --model-id Alibaba-NLP/gte-Qwen2-7B-instruct --revision "refs/pr/38" --dtype bfloat16 --batch-size 8 --device cuda --engine torch --port 7997 --no-bettertransformer ``` ## Evaluation ### MTEB & C-MTEB You can use the [scripts/eval_mteb.py](https://huggingface.co/Alibaba-NLP/gte-Qwen2-7B-instruct/blob/main/scripts/eval_mteb.py) to reproduce the following result of **gte-Qwen2-7B-instruct** on MTEB(English)/C-MTEB(Chinese): | Model Name | MTEB(56) | C-MTEB(35) | MTEB-fr(26) | MTEB-pl(26) | |:----:|:---------:|:----------:|:----------:|:----------:| | [bge-base-en-1.5](https://huggingface.co/BAAI/bge-base-en-v1.5) | 64.23 | - | - | - | | [bge-large-en-1.5](https://huggingface.co/BAAI/bge-large-en-v1.5) | 63.55 | - | - | - | | [gte-large-en-v1.5](https://huggingface.co/Alibaba-NLP/gte-large-en-v1.5) | 65.39 | - | - | - | | [gte-base-en-v1.5](https://huggingface.co/Alibaba-NLP/gte-large-en-v1.5) | 64.11 | - | - | - | | [mxbai-embed-large-v1](https://huggingface.co/mixedbread-ai/mxbai-embed-large-v1) | 64.68 | - | - | - | | [acge_text_embedding](https://huggingface.co/aspire/acge_text_embedding) | - | 69.07 | - | - | | [stella-mrl-large-zh-v3.5-1792d](https://huggingface.co/infgrad/stella-mrl-large-zh-v3.5-1792d) | - | 68.55 | - | - | | [gte-large-zh](https://huggingface.co/thenlper/gte-large-zh) | - | 66.72 | - | - | | [multilingual-e5-base](https://huggingface.co/intfloat/multilingual-e5-base) | 59.45 | 56.21 | - | - | | [multilingual-e5-large](https://huggingface.co/intfloat/multilingual-e5-large) | 61.50 | 58.81 | - | - | | [e5-mistral-7b-instruct](https://huggingface.co/intfloat/e5-mistral-7b-instruct) | 66.63 | 60.81 | - | - | | [gte-Qwen1.5-7B-instruct](https://huggingface.co/Alibaba-NLP/gte-Qwen1.5-7B-instruct) | 67.34 | 69.52 | - | - | | [NV-Embed-v1](https://huggingface.co/nvidia/NV-Embed-v1) | 69.32 | - | - | - | | [**gte-Qwen2-7B-instruct**](https://huggingface.co/Alibaba-NLP/gte-Qwen2-7B-instruct) | **70.24** | **72.05** | **68.25** | **67.86** | | gte-Qwen2-1.5B-instruc(https://huggingface.co/Alibaba-NLP/gte-Qwen2-1.5B-instruct) | 67.16 | 67.65 | 66.60 | 64.04 | ### GTE Models The gte series models have consistently released two types of models: encoder-only models (based on the BERT architecture) and decode-only models (based on the LLM architecture). | Models | Language | Max Sequence Length | Dimension | Model Size (Memory Usage, fp32) | |:-------------------------------------------------------------------------------------:|:--------:|:-----: |:---------:|:-------------------------------:| | [GTE-large-zh](https://huggingface.co/thenlper/gte-large-zh) | Chinese | 512 | 1024 | 1.25GB | | [GTE-base-zh](https://huggingface.co/thenlper/gte-base-zh) | Chinese | 512 | 512 | 0.41GB | | [GTE-small-zh](https://huggingface.co/thenlper/gte-small-zh) | Chinese | 512 | 512 | 0.12GB | | [GTE-large](https://huggingface.co/thenlper/gte-large) | English | 512 | 1024 | 1.25GB | | [GTE-base](https://huggingface.co/thenlper/gte-base) | English | 512 | 512 | 0.21GB | | [GTE-small](https://huggingface.co/thenlper/gte-small) | English | 512 | 384 | 0.10GB | | [GTE-large-en-v1.5](https://huggingface.co/Alibaba-NLP/gte-large-en-v1.5) | English | 8192 | 1024 | 1.74GB | | [GTE-base-en-v1.5](https://huggingface.co/Alibaba-NLP/gte-base-en-v1.5) | English | 8192 | 768 | 0.51GB | | [GTE-Qwen1.5-7B-instruct](https://huggingface.co/Alibaba-NLP/gte-Qwen1.5-7B-instruct) | Multilingual | 32000 | 4096 | 26.45GB | | [GTE-Qwen2-7B-instruct](https://huggingface.co/Alibaba-NLP/gte-Qwen2-7B-instruct) | Multilingual | 32000 | 3584 | 26.45GB | | [GTE-Qwen2-1.5B-instruct](https://huggingface.co/Alibaba-NLP/gte-Qwen2-1.5B-instruct) | Multilingual | 32000 | 1536 | 6.62GB | ## Cloud API Services In addition to the open-source [GTE](https://huggingface.co/collections/Alibaba-NLP/gte-models-6680f0b13f885cb431e6d469) series models, GTE series models are also available as commercial API services on Alibaba Cloud. - [Embedding Models](https://help.aliyun.com/zh/model-studio/developer-reference/general-text-embedding/): Rhree versions of the text embedding models are available: text-embedding-v1/v2/v3, with v3 being the latest API service. - [ReRank Models](https://help.aliyun.com/zh/model-studio/developer-reference/general-text-sorting-model/): The gte-rerank model service is available. Note that the models behind the commercial APIs are not entirely identical to the open-source models. ## Citation If you find our paper or models helpful, please consider cite: ``` @article{li2023towards, title={Towards general text embeddings with multi-stage contrastive learning}, author={Li, Zehan and Zhang, Xin and Zhang, Yanzhao and Long, Dingkun and Xie, Pengjun and Zhang, Meishan}, journal={arXiv preprint arXiv:2308.03281}, year={2023} } ```
[ "SUMMARIZATION" ]
[ "BIOSSES", "SCIFACT" ]
StivenLancheros/biobert-base-cased-v1.2-finetuned-ner-CRAFT_AugmentedTransfer_EN
StivenLancheros
token-classification
[ "transformers", "pytorch", "tensorboard", "bert", "token-classification", "generated_from_trainer", "autotrain_compatible", "endpoints_compatible", "region:us" ]
2022-03-17T13:21:18
2022-03-17T14:51:01
118
0
--- metrics: - precision - recall - f1 - accuracy tags: - generated_from_trainer model-index: - name: biobert-base-cased-v1.2-finetuned-ner-CRAFT_AugmentedTransfer_EN results: [] --- <!-- This model card has been generated automatically according to the information the Trainer had access to. You should probably proofread and complete it, then remove this comment. --> # biobert-base-cased-v1.2-finetuned-ner-CRAFT_AugmentedTransfer_EN This model is a fine-tuned version of [StivenLancheros/biobert-base-cased-v1.2-finetuned-ner-CRAFT_Augmented_EN](https://huggingface.co/StivenLancheros/biobert-base-cased-v1.2-finetuned-ner-CRAFT_Augmented_EN) on the CRAFTone dataset. It achieves the following results on the evaluation set: - Loss: 0.2213 - Precision: 0.8528 - Recall: 0.8617 - F1: 0.8572 - Accuracy: 0.9709 ## Model description This model performs Named Entity Recognition for 6 entity tags: Sequence, Cell, Protein, Gene, Taxon, and Chemical from the CRAFT(Colorado Richly Annotated Full Text) Corpus in Spanish and English. Entity tags have been normalized and replaced from the original three letter code to a full name e.g. B-Protein, I-Chemical. This model is trained on augmented data created using Entity Replacement. 20% of the entities were replaced using a list of entities for each entity tag obtained from the official ontologies for each entity class. Both datasets (original, augmented) were concatenated. To improve F1 score the transfer learning was completed in two steps. Using [StivenLancheros/biobert-base-cased-v1.2-finetuned-ner-CRAFT_Augmented_EN](https://huggingface.co/StivenLancheros/biobert-base-cased-v1.2-finetuned-ner-CRAFT_Augmented_EN) as a base model, I finetuned once more on the original CRAFT dataset in English. Biobert --> Augmented CRAFT --> CRAFT ## Intended uses & limitations More information needed ## Training and evaluation data More information needed ## Training procedure ### Training hyperparameters The following hyperparameters were used during training: - learning_rate: 3e-05 - train_batch_size: 8 - eval_batch_size: 8 - seed: 42 - optimizer: Adam with betas=(0.9,0.999) and epsilon=1e-08 - lr_scheduler_type: linear - num_epochs: 4 ### Training results | Training Loss | Epoch | Step | Validation Loss | Precision | Recall | F1 | Accuracy | |:-------------:|:-----:|:----:|:---------------:|:---------:|:------:|:------:|:--------:| | 0.0106 | 1.0 | 1360 | 0.1866 | 0.8343 | 0.8661 | 0.8499 | 0.9698 | | 0.0063 | 2.0 | 2720 | 0.2100 | 0.8536 | 0.8537 | 0.8537 | 0.9701 | | 0.0031 | 3.0 | 4080 | 0.2133 | 0.8506 | 0.8578 | 0.8542 | 0.9705 | | 0.0008 | 4.0 | 5440 | 0.2213 | 0.8528 | 0.8617 | 0.8572 | 0.9709 | ### Framework versions - Transformers 4.17.0 - Pytorch 1.10.0+cu111 - Datasets 2.0.0 - Tokenizers 0.11.6
[ "NAMED_ENTITY_RECOGNITION" ]
[ "CRAFT" ]
aisingapore/llama3-8b-cpt-sea-lionv2-base
aisingapore
text-generation
[ "transformers", "safetensors", "llama", "text-generation", "en", "id", "ta", "th", "vi", "arxiv:2309.06085", "arxiv:2101.09635", "base_model:meta-llama/Meta-Llama-3-8B-Instruct", "base_model:finetune:meta-llama/Meta-Llama-3-8B-Instruct", "license:llama3", "autotrain_compatible", "text-generation-inference", "endpoints_compatible", "region:us" ]
2024-07-30T00:31:08
2024-12-19T13:19:44
117
4
--- base_model: meta-llama/Meta-Llama-3-8B-Instruct language: - en - id - ta - th - vi license: llama3 new_version: aisingapore/llama3.1-8b-cpt-sea-lionv3-base --- # Llama3 8B CPT SEA-LIONv2 SEA-LION is a collection of Large Language Models (LLMs) which has been pretrained and instruct-tuned for the Southeast Asia (SEA) region. Llama3 8B CPT SEA-LIONv2 Base is a multilingual model which has undergone continued pre-training on approximately **48B** tokens across 5 SEA languages: English, Indonesia, Tamil, Thai and Vietnamese. SEA-LION stands for <i>Southeast Asian Languages In One Network</i>. - **Developed by:** Products Pillar, AI Singapore - **Funded by:** Singapore NRF - **Model type:** Decoder - **Languages supported:** English, Indonesian, Thai, Vietnamese, Tamil - **License:** [Llama3 Community License](https://huggingface.co/meta-llama/Meta-Llama-3-8B/blob/main/LICENSE) ## Model Details ### Model Description We performed continued pre-training in English and SEA languages on [Meta-Llama-3-8B-Instruct](https://huggingface.co/meta-llama/Meta-Llama-3-8B-Instruct), a decoder model using the Llama 3 architecture, to create Llama3 8B CPT SEA-LIONv2 Base. For tokenisation, the model employs the default tokenizer used in Llama 3 8B Instruct. ### Benchmark Performance We evaluated Llama3 8B CPT SEA-LIONv2 base model on general language capabilities. #### General Language Capabilities For the evaluation of general language capabilities in SEA languages, we employed the [BHASA evaluation benchmark](https://arxiv.org/abs/2309.06085v2) across a variety of tasks. These tasks include Question Answering (QA), Sentiment Analysis (Sentiment), Toxicity Detection (Toxicity), Translation in both directions (Eng>Lang & Lang>Eng), Abstractive Summarization (Summ), Causal Reasoning (Causal) and Natural Language Inference (NLI). The evaluation was done **five-shot** with native prompts and only a sample of 100-1000 instances for each dataset was used as per the setting described in the paper. For more details on Llama3 8B CPT SEA-LIONv2 base benchmark performance, please refer to the SEA HELM leaderboard, https://leaderboard.sea-lion.ai/ ## Training Details ### Infrastructure Llama3 8B CPT SEA-LIONv2 was trained using [MosaicML Composer](https://github.com/mosaicml/composer) on the following hardware: | Training Details | Llama3 8B CPT SEA-LIONv2 | |----------------------|:--------------------:| | AWS EC2 p5d.24xlarge | 8 instances | | Nvidia H100 80GB GPU | 64 | | Training Duration | 2 days | ### Configuration | HyperParameter | Llama3 8B CPT SEA-LIONv2 | |-------------------|:--------------------:| | Precision | bfloat16 | | Optimizer | decoupled_adamw | | Scheduler | weight_stable_decay | | Learning Rate | 1.0e-5 | | Global Batch Size | 512 | | Micro Batch Size | 2 | ## Data Llama3 8B CPT SEA-LIONv2 base model was continued pre-trained on 48B tokens of the following data: | Data Source | Unique Tokens (B) | Multiplier | Total Tokens (B) | Percentage (%) | |---------------------------|:-----------------:|:----------:|:----------------:|:--------------:| | Dolma RefinedWeb - English| 7.650 | 1 | 7.650 | 15.90 | | Dolma C4 - English | 1.160 | 1 | 1.16 | 9.21 | | Dolma Reddit - English | 1.339 | 1 | 1.339 | 2.42 | | Dolma Semantic Scholar | 0.959 | 1 | 0.959 | 2.79 | | Dolma arXiv | 0.469 | 1 | 0.469 | 1.99 | | Dolma StarCoder | 4.422 | 1 | 4.422 | 0.98 | | SEA-LION Pile - Indonesian| 3.4 | 2 | 6.8 | 14.17 | | Wiki* - Indonesian | 0.3 | 4 | 1.2 | 2.50 | | SEA-LION Pile - Tamil | 5.6 | 1 | 5.6 | 11.67 | | Wiki* + News - Tamil | 0.6 | 4 | 2.4 | 5.00 | | SEA-LION Pile - Thai | 2.28 | 1 | 2.28 | 4.75 | | WangChanBERTa - Thai | 5 | 1 | 5 | 10.42 | | Wiki* - Thai | 0.18 | 4 | 0.72 | 1.50 | | SEA-LION Pile - Vietnamese| 6.76 | 1 | 6.76 | 14.08 | | Wiki* - Vietnamese | 0.31 | 4 | 1.24 | 2.58 | Note: - All token counts are counted using Llama3 tokenizer - wiki* sources includes Wikipedia, Wiki Books, Wiki Source and Wiki Voyage - Tamil news is sourced with permission from [Seithi](https://seithi.mediacorp.sg/) ## Call for Contributions We encourage researchers, developers, and language enthusiasts to actively contribute to the enhancement and expansion of SEA-LION. Contributions can involve identifying and reporting bugs, sharing pre-training, instruction, and preference data, improving documentation usability, proposing and implementing new model evaluation tasks and metrics, or training versions of the model in additional Southeast Asian languages. Join us in shaping the future of SEA-LION by sharing your expertise and insights to make these models more accessible, accurate, and versatile. Please check out our GitHub for further information on the call for contributions. ## The Team Cheng Nicholas, Choa Esther, Huang Yuli, Lau Wayne, Lee Chwan Ren, Leong Wai Yi, Leong Wei Qi, Li Yier, Liu Bing Jie Darius, Lovenia Holy, Montalan Jann Railey, Ng Boon Cheong Raymond, Ngui Jian Gang, Nguyen Thanh Ngan, Ong Brandon, Ong Tat-Wee David, Ong Zhi Hao, Rengarajan Hamsawardhini, Siow Bryan, Susanto Yosephine, Tai Ngee Chia, Tan Choon Meng, Teo Eng Sipp Leslie, Teo Wei Yi, Tjhi William, Teng Walter, Yeo Yeow Tong, Yong Xianbin ## Acknowledgements [AI Singapore](​​https://aisingapore.org/) is a national programme supported by the National Research Foundation, Singapore and hosted by the National University of Singapore. Any opinions, findings and conclusions or recommendations expressed in this material are those of the author(s) and do not reflect the views of the National Research Foundation or the National University of Singapore. ## Contact For more info, please contact us using this [SEA-LION Inquiry Form.](https://forms.gle/sLCUVb95wmGf43hi6) [Link to SEA-LION's GitHub repository.](https://github.com/aisingapore/sealion) ## Disclaimer This is the repository for the commercial instruction-tuned model. The model has _not_ been aligned for safety. Developers and users should perform their own safety fine-tuning and related security measures. In no event shall the authors be held liable for any claims, damages, or other liabilities arising from the use of the released weights and codes. ## References ### Thai Pre-Training Data Reference ```bibtex @misc{lowphansirikul2021wangchanberta, title={WangchanBERTa: Pretraining transformer-based Thai Language Models}, author={Lalita Lowphansirikul and Charin Polpanumas and Nawat Jantrakulchai and Sarana Nutanong}, year={2021}, eprint={2101.09635}, archivePrefix={arXiv}, primaryClass={cs.CL} } ```
[ "QUESTION_ANSWERING", "TRANSLATION", "SUMMARIZATION" ]
[ "CHIA" ]
StivenLancheros/Biobert-base-cased-v1.2-finetuned-ner-CRAFT_es_en
StivenLancheros
token-classification
[ "transformers", "pytorch", "tensorboard", "bert", "token-classification", "generated_from_trainer", "autotrain_compatible", "endpoints_compatible", "region:us" ]
2022-03-11T20:09:49
2022-03-12T11:40:00
116
0
--- metrics: - precision - recall - f1 - accuracy tags: - generated_from_trainer model-index: - name: Biobert-base-cased-v1.2-finetuned-ner-CRAFT_es_en results: [] --- <!-- This model card has been generated automatically according to the information the Trainer had access to. You should probably proofread and complete it, then remove this comment. --> # Biobert-base-cased-v1.2-finetuned-ner-CRAFT_es_en This model is a fine-tuned version of [dmis-lab/biobert-base-cased-v1.2](https://huggingface.co/dmis-lab/biobert-base-cased-v1.2) on the CRAFT dataset. It achieves the following results on the evaluation set: - Loss: 0.1811 - Precision: 0.8555 - Recall: 0.8539 - F1: 0.8547 - Accuracy: 0.9706 ## Model description This model performs Named Entity Recognition for 6 entity tags: Sequence, Cell, Protein, Gene, Taxon, and Chemical from the [CRAFT](https://github.com/UCDenver-ccp/CRAFT/releases)(Colorado Richly Annotated Full Text) Corpus in Spanish and English. Entity tags have been normalized and replaced from the original three letter code to a full name e.g. B-Protein, I-Chemical. ## Intended uses & limitations More information needed ## Training and evaluation data More information needed ## Training procedure ### Training hyperparameters The following hyperparameters were used during training: - learning_rate: 3e-05 - train_batch_size: 8 - eval_batch_size: 8 - seed: 42 - optimizer: Adam with betas=(0.9,0.999) and epsilon=1e-08 - lr_scheduler_type: linear - num_epochs: 4 ### Training results | Training Loss | Epoch | Step | Validation Loss | Precision | Recall | F1 | Accuracy | |:-------------:|:-----:|:----:|:---------------:|:---------:|:------:|:------:|:--------:| | 0.052 | 1.0 | 1360 | 0.1413 | 0.8300 | 0.8442 | 0.8370 | 0.9677 | | 0.0199 | 2.0 | 2720 | 0.1673 | 0.8461 | 0.8458 | 0.8459 | 0.9689 | | 0.011 | 3.0 | 4080 | 0.1647 | 0.8588 | 0.8528 | 0.8558 | 0.9704 | | 0.0031 | 4.0 | 5440 | 0.1811 | 0.8555 | 0.8539 | 0.8547 | 0.9706 | ### Framework versions - Transformers 4.17.0 - Pytorch 1.10.0+cu111 - Datasets 1.18.4 - Tokenizers 0.11.6
[ "NAMED_ENTITY_RECOGNITION" ]
[ "CRAFT" ]
StivenLancheros/biobert-base-cased-v1.2-finetuned-ner-CRAFT_Augmented_ES
StivenLancheros
token-classification
[ "transformers", "pytorch", "tensorboard", "bert", "token-classification", "generated_from_trainer", "autotrain_compatible", "endpoints_compatible", "region:us" ]
2022-03-15T22:44:16
2022-03-17T14:49:03
116
0
--- metrics: - precision - recall - f1 - accuracy tags: - generated_from_trainer model-index: - name: biobert-base-cased-v1.2-finetuned-ner-CRAFT_Augmented_ES results: [] --- <!-- This model card has been generated automatically according to the information the Trainer had access to. You should probably proofread and complete it, then remove this comment. --> # biobert-base-cased-v1.2-finetuned-ner-CRAFT_Augmented_ES This model is a fine-tuned version of [dmis-lab/biobert-base-cased-v1.2](https://huggingface.co/dmis-lab/biobert-base-cased-v1.2) on the CRAFT dataset. It achieves the following results on the evaluation set: - Loss: 0.2251 - Precision: 0.8276 - Recall: 0.8411 - F1: 0.8343 - Accuracy: 0.9676 ## Model description This model performs Named Entity Recognition for 6 entity tags: Sequence, Cell, Protein, Gene, Taxon, and Chemical from the CRAFT(Colorado Richly Annotated Full Text) Corpus in Spanish (MT translated) and English. Entity tags have been normalized and replaced from the original three letter code to a full name e.g. B-Protein, I-Chemical. This model is trained on augmented data created using Entity Replacement. 20% of the entities were replaced using a list of entities for each entity tag obtained from the official ontologies for each entity class. Three datasets (original, augmented, MT translated CRAFT) were concatenated. ## Intended uses & limitations More information needed ## Training and evaluation data More information needed ## Training procedure ### Training hyperparameters The following hyperparameters were used during training: - learning_rate: 3e-05 - train_batch_size: 8 - eval_batch_size: 8 - seed: 42 - optimizer: Adam with betas=(0.9,0.999) and epsilon=1e-08 - lr_scheduler_type: linear - num_epochs: 4 ### Training results | Training Loss | Epoch | Step | Validation Loss | Precision | Recall | F1 | Accuracy | |:-------------:|:-----:|:-----:|:---------------:|:---------:|:------:|:------:|:--------:| | 0.0549 | 1.0 | 4078 | 0.1673 | 0.8056 | 0.8112 | 0.8084 | 0.9640 | | 0.0233 | 2.0 | 8156 | 0.1733 | 0.8321 | 0.8244 | 0.8283 | 0.9662 | | 0.0101 | 3.0 | 12234 | 0.1972 | 0.8336 | 0.8391 | 0.8363 | 0.9678 | | 0.0036 | 4.0 | 16312 | 0.2251 | 0.8276 | 0.8411 | 0.8343 | 0.9676 | ### Framework versions - Transformers 4.17.0 - Pytorch 1.10.0+cu111 - Datasets 2.0.0 - Tokenizers 0.11.6
[ "NAMED_ENTITY_RECOGNITION" ]
[ "CRAFT" ]
StivenLancheros/Roberta-base-biomedical-clinical-es-finetuned-ner-CRAFT_en_es
StivenLancheros
token-classification
[ "transformers", "pytorch", "tensorboard", "roberta", "token-classification", "generated_from_trainer", "license:apache-2.0", "autotrain_compatible", "endpoints_compatible", "region:us" ]
2022-03-11T19:08:07
2022-03-12T11:39:55
115
0
--- license: apache-2.0 metrics: - precision - recall - f1 - accuracy tags: - generated_from_trainer model-index: - name: Roberta-base-biomedical-clinical-es-finetuned-ner-CRAFT_en_es results: [] --- <!-- This model card has been generated automatically according to the information the Trainer had access to. You should probably proofread and complete it, then remove this comment. --> # Roberta-base-biomedical-clinical-es-finetuned-ner-CRAFT_en_es This model is a fine-tuned version of [PlanTL-GOB-ES/roberta-base-biomedical-clinical-es](https://huggingface.co/PlanTL-GOB-ES/roberta-base-biomedical-clinical-es) on the CRAFT dataset. It achieves the following results on the evaluation set: - Loss: 0.1750 - Precision: 0.8664 - Recall: 0.8587 - F1: 0.8625 - Accuracy: 0.9727 ## Model description This model performs Named Entity Recognition for 6 entity tags: Sequence, Cell, Protein, Gene, Taxon, and Chemical from the [CRAFT](https://github.com/UCDenver-ccp/CRAFT/releases)(Colorado Richly Annotated Full Text) Corpus in Spanish and English. Entity tags have been normalized and replaced from the original three letter code to a full name e.g. B-Protein, I-Chemical. ## Intended uses & limitations More information needed ## Training and evaluation data More information needed ### Training hyperparameters The following hyperparameters were used during training: - learning_rate: 3e-05 - train_batch_size: 8 - eval_batch_size: 8 - seed: 42 - optimizer: Adam with betas=(0.9,0.999) and epsilon=1e-08 - lr_scheduler_type: linear - num_epochs: 4 ### Training results | Training Loss | Epoch | Step | Validation Loss | Precision | Recall | F1 | Accuracy | |:-------------:|:-----:|:----:|:---------------:|:---------:|:------:|:------:|:--------:| | 0.0564 | 1.0 | 1360 | 0.1459 | 0.8296 | 0.8489 | 0.8392 | 0.9696 | | 0.0222 | 2.0 | 2720 | 0.1554 | 0.8650 | 0.8320 | 0.8482 | 0.9702 | | 0.0124 | 3.0 | 4080 | 0.1670 | 0.8588 | 0.8564 | 0.8576 | 0.9717 | | 0.0052 | 4.0 | 5440 | 0.1750 | 0.8664 | 0.8587 | 0.8625 | 0.9727 | ### Framework versions - Transformers 4.17.0 - Pytorch 1.10.0+cu111 - Datasets 1.18.4 - Tokenizers 0.11.6
[ "NAMED_ENTITY_RECOGNITION" ]
[ "CRAFT" ]
sumedh/autonlp-MeQSum-1-660519466
sumedh
text2text-generation
[ "transformers", "pytorch", "pegasus", "text2text-generation", "autonlp", "unk", "dataset:sumedh/autotrain-data-MeQSum-1", "co2_eq_emissions", "autotrain_compatible", "endpoints_compatible", "region:us" ]
2022-03-23T06:43:11
2022-03-23T07:16:44
115
0
--- datasets: - sumedh/autotrain-data-MeQSum-1 language: unk tags: - a - u - t - o - n - l - p widget: - text: I love AutoNLP 🤗 co2_eq_emissions: 35.865521343923916 --- # Model Trained Using AutoNLP - Problem type: Summarization - Model ID: 660519466 - CO2 Emissions (in grams): 35.865521343923916 ## Validation Metrics - Loss: 1.3210543394088745 - Rouge1: 52.1593 - Rouge2: 34.5464 - RougeL: 50.1141 - RougeLsum: 50.1067 - Gen Len: 11.93 ## Usage You can use cURL to access this model: ``` $ curl -X POST -H "Authorization: Bearer YOUR_HUGGINGFACE_API_KEY" -H "Content-Type: application/json" -d '{"inputs": "I love AutoNLP"}' https://api-inference.huggingface.co/sumedh/autonlp-MeQSum-1-660519466 ```
[ "SUMMARIZATION" ]
[ "MEQSUM" ]
StivenLancheros/biobert-base-cased-v1.2-finetuned-ner-CRAFT_AugmentedTransfer_ES
StivenLancheros
token-classification
[ "transformers", "pytorch", "tensorboard", "bert", "token-classification", "generated_from_trainer", "autotrain_compatible", "endpoints_compatible", "region:us" ]
2022-03-17T13:21:45
2022-03-17T14:51:33
114
0
--- metrics: - precision - recall - f1 - accuracy tags: - generated_from_trainer model-index: - name: biobert-base-cased-v1.2-finetuned-ner-CRAFT_AugmentedTransfer_ES results: [] --- <!-- This model card has been generated automatically according to the information the Trainer had access to. You should probably proofread and complete it, then remove this comment. --> # biobert-base-cased-v1.2-finetuned-ner-CRAFT_AugmentedTransfer_ES This model is a fine-tuned version of [StivenLancheros/biobert-base-cased-v1.2-finetuned-ner-CRAFT_Augmented_ES](https://huggingface.co/StivenLancheros/biobert-base-cased-v1.2-finetuned-ner-CRAFT_Augmented_ES) on the CRAFT dataset. It achieves the following results on the evaluation set: - Loss: 0.2298 - Precision: 0.8535 - Recall: 0.8476 - F1: 0.8505 - Accuracy: 0.9705 ## Model description This model performs Named Entity Recognition for 6 entity tags: Sequence, Cell, Protein, Gene, Taxon, and Chemical from the CRAFT(Colorado Richly Annotated Full Text) Corpus in Spanish (MT translated) and English. Entity tags have been normalized and replaced from the original three letter code to a full name e.g. B-Protein, I-Chemical. This model is trained on augmented data created using Entity Replacement. 20% of the entities were replaced using a list of entities for each entity tag obtained from the official ontologies for each entity class. Three datasets (original, augmented, MT translated CRAFT) were concatenated. To improve F1 score the transfer learning was completed in two steps. Using [StivenLancheros/biobert-base-cased-v1.2-finetuned-ner-CRAFT_Augmented_ES](https://huggingface.co/StivenLancheros/biobert-base-cased-v1.2-finetuned-ner-CRAFT_Augmented_ES) as a base model, I finetuned once more on the original CRAFT dataset in English. Biobert --> Augmented CRAFT --> CRAFT ES (MT translated) ## Intended uses & limitations More information needed ## Training and evaluation data More information needed ## Training procedure ### Training hyperparameters The following hyperparameters were used during training: - learning_rate: 3e-05 - train_batch_size: 8 - eval_batch_size: 8 - seed: 42 - optimizer: Adam with betas=(0.9,0.999) and epsilon=1e-08 - lr_scheduler_type: linear - num_epochs: 4 ### Training results | Training Loss | Epoch | Step | Validation Loss | Precision | Recall | F1 | Accuracy | |:-------------:|:-----:|:----:|:---------------:|:---------:|:------:|:------:|:--------:| | 0.0177 | 1.0 | 1360 | 0.2318 | 0.8510 | 0.8275 | 0.8391 | 0.9684 | | 0.0102 | 2.0 | 2720 | 0.2253 | 0.8322 | 0.8455 | 0.8388 | 0.9683 | | 0.0039 | 3.0 | 4080 | 0.2193 | 0.8383 | 0.8451 | 0.8416 | 0.9689 | | 0.002 | 4.0 | 5440 | 0.2298 | 0.8535 | 0.8476 | 0.8505 | 0.9705 | ### Framework versions - Transformers 4.17.0 - Pytorch 1.10.0+cu111 - Datasets 2.0.0 - Tokenizers 0.11.6
[ "NAMED_ENTITY_RECOGNITION" ]
[ "CRAFT" ]
sunzx0810/gte-Qwen2-7B-instruct-Q5_K_M-GGUF
sunzx0810
sentence-similarity
[ "sentence-transformers", "gguf", "qwen2", "text-generation", "mteb", "transformers", "Qwen2", "sentence-similarity", "llama-cpp", "gguf-my-repo", "custom_code", "base_model:Alibaba-NLP/gte-Qwen2-7B-instruct", "base_model:quantized:Alibaba-NLP/gte-Qwen2-7B-instruct", "license:apache-2.0", "model-index", "autotrain_compatible", "endpoints_compatible", "region:us", "conversational" ]
2024-06-20T03:38:41
2024-06-25T07:02:31
114
6
--- base_model: Alibaba-NLP/gte-Qwen2-7B-instruct license: apache-2.0 tags: - mteb - sentence-transformers - transformers - Qwen2 - sentence-similarity - llama-cpp - gguf-my-repo model-index: - name: gte-qwen2-7B-instruct results: - task: type: Classification dataset: name: MTEB AmazonCounterfactualClassification (en) type: mteb/amazon_counterfactual config: en split: test revision: e8379541af4e31359cca9fbcf4b00f2671dba205 metrics: - type: accuracy value: 91.31343283582089 - type: ap value: 67.64251402604096 - type: f1 value: 87.53372530755692 - task: type: Classification dataset: name: MTEB AmazonPolarityClassification type: mteb/amazon_polarity config: default split: test revision: e2d317d38cd51312af73b3d32a06d1a08b442046 metrics: - type: accuracy value: 97.497825 - type: ap value: 96.30329547047529 - type: f1 value: 97.49769793778039 - task: type: Classification dataset: name: MTEB AmazonReviewsClassification (en) type: mteb/amazon_reviews_multi config: en split: test revision: 1399c76144fd37290681b995c656ef9b2e06e26d metrics: - type: accuracy value: 62.564 - type: f1 value: 60.975777935041066 - task: type: Retrieval dataset: name: MTEB ArguAna type: mteb/arguana config: default split: test revision: c22ab2a51041ffd869aaddef7af8d8215647e41a metrics: - type: map_at_1 value: 36.486000000000004 - type: map_at_10 value: 54.842 - type: map_at_100 value: 55.206999999999994 - type: map_at_1000 value: 55.206999999999994 - type: map_at_3 value: 49.893 - type: map_at_5 value: 53.105000000000004 - type: mrr_at_1 value: 37.34 - type: mrr_at_10 value: 55.143 - type: mrr_at_100 value: 55.509 - type: mrr_at_1000 value: 55.509 - type: mrr_at_3 value: 50.212999999999994 - type: mrr_at_5 value: 53.432 - type: ndcg_at_1 value: 36.486000000000004 - type: ndcg_at_10 value: 64.273 - type: ndcg_at_100 value: 65.66199999999999 - type: ndcg_at_1000 value: 65.66199999999999 - type: ndcg_at_3 value: 54.352999999999994 - type: ndcg_at_5 value: 60.131 - type: precision_at_1 value: 36.486000000000004 - type: precision_at_10 value: 9.395000000000001 - type: precision_at_100 value: 0.996 - type: precision_at_1000 value: 0.1 - type: precision_at_3 value: 22.428 - type: precision_at_5 value: 16.259 - type: recall_at_1 value: 36.486000000000004 - type: recall_at_10 value: 93.95400000000001 - type: recall_at_100 value: 99.644 - type: recall_at_1000 value: 99.644 - type: recall_at_3 value: 67.283 - type: recall_at_5 value: 81.294 - task: type: Clustering dataset: name: MTEB ArxivClusteringP2P type: mteb/arxiv-clustering-p2p config: default split: test revision: a122ad7f3f0291bf49cc6f4d32aa80929df69d5d metrics: - type: v_measure value: 56.461169803700564 - task: type: Clustering dataset: name: MTEB ArxivClusteringS2S type: mteb/arxiv-clustering-s2s config: default split: test revision: f910caf1a6075f7329cdf8c1a6135696f37dbd53 metrics: - type: v_measure value: 51.73600434466286 - task: type: Reranking dataset: name: MTEB AskUbuntuDupQuestions type: mteb/askubuntudupquestions-reranking config: default split: test revision: 2000358ca161889fa9c082cb41daa8dcfb161a54 metrics: - type: map value: 67.57827065898053 - type: mrr value: 79.08136569493911 - task: type: STS dataset: name: MTEB BIOSSES type: mteb/biosses-sts config: default split: test revision: d3fb88f8f02e40887cd149695127462bbcf29b4a metrics: - type: cos_sim_pearson value: 83.53324575999243 - type: cos_sim_spearman value: 81.37173362822374 - type: euclidean_pearson value: 82.19243335103444 - type: euclidean_spearman value: 81.33679307304334 - type: manhattan_pearson value: 82.38752665975699 - type: manhattan_spearman value: 81.31510583189689 - task: type: Classification dataset: name: MTEB Banking77Classification type: mteb/banking77 config: default split: test revision: 0fd18e25b25c072e09e0d92ab615fda904d66300 metrics: - type: accuracy value: 87.56818181818181 - type: f1 value: 87.25826722019875 - task: type: Clustering dataset: name: MTEB BiorxivClusteringP2P type: mteb/biorxiv-clustering-p2p config: default split: test revision: 65b79d1d13f80053f67aca9498d9402c2d9f1f40 metrics: - type: v_measure value: 50.09239610327673 - task: type: Clustering dataset: name: MTEB BiorxivClusteringS2S type: mteb/biorxiv-clustering-s2s config: default split: test revision: 258694dd0231531bc1fd9de6ceb52a0853c6d908 metrics: - type: v_measure value: 46.64733054606282 - task: type: Retrieval dataset: name: MTEB CQADupstackAndroidRetrieval type: BeIR/cqadupstack config: default split: test revision: f46a197baaae43b4f621051089b82a364682dfeb metrics: - type: map_at_1 value: 33.997 - type: map_at_10 value: 48.176 - type: map_at_100 value: 49.82 - type: map_at_1000 value: 49.924 - type: map_at_3 value: 43.626 - type: map_at_5 value: 46.275 - type: mrr_at_1 value: 42.059999999999995 - type: mrr_at_10 value: 53.726 - type: mrr_at_100 value: 54.398 - type: mrr_at_1000 value: 54.416 - type: mrr_at_3 value: 50.714999999999996 - type: mrr_at_5 value: 52.639 - type: ndcg_at_1 value: 42.059999999999995 - type: ndcg_at_10 value: 55.574999999999996 - type: ndcg_at_100 value: 60.744 - type: ndcg_at_1000 value: 61.85699999999999 - type: ndcg_at_3 value: 49.363 - type: ndcg_at_5 value: 52.44 - type: precision_at_1 value: 42.059999999999995 - type: precision_at_10 value: 11.101999999999999 - type: precision_at_100 value: 1.73 - type: precision_at_1000 value: 0.218 - type: precision_at_3 value: 24.464 - type: precision_at_5 value: 18.026 - type: recall_at_1 value: 33.997 - type: recall_at_10 value: 70.35900000000001 - type: recall_at_100 value: 91.642 - type: recall_at_1000 value: 97.977 - type: recall_at_3 value: 52.76 - type: recall_at_5 value: 61.148 - task: type: Retrieval dataset: name: MTEB CQADupstackEnglishRetrieval type: BeIR/cqadupstack config: default split: test revision: ad9991cb51e31e31e430383c75ffb2885547b5f0 metrics: - type: map_at_1 value: 35.884 - type: map_at_10 value: 48.14 - type: map_at_100 value: 49.5 - type: map_at_1000 value: 49.63 - type: map_at_3 value: 44.646 - type: map_at_5 value: 46.617999999999995 - type: mrr_at_1 value: 44.458999999999996 - type: mrr_at_10 value: 53.751000000000005 - type: mrr_at_100 value: 54.37800000000001 - type: mrr_at_1000 value: 54.415 - type: mrr_at_3 value: 51.815 - type: mrr_at_5 value: 52.882 - type: ndcg_at_1 value: 44.458999999999996 - type: ndcg_at_10 value: 54.157 - type: ndcg_at_100 value: 58.362 - type: ndcg_at_1000 value: 60.178 - type: ndcg_at_3 value: 49.661 - type: ndcg_at_5 value: 51.74999999999999 - type: precision_at_1 value: 44.458999999999996 - type: precision_at_10 value: 10.248 - type: precision_at_100 value: 1.5890000000000002 - type: precision_at_1000 value: 0.207 - type: precision_at_3 value: 23.928 - type: precision_at_5 value: 16.878999999999998 - type: recall_at_1 value: 35.884 - type: recall_at_10 value: 64.798 - type: recall_at_100 value: 82.345 - type: recall_at_1000 value: 93.267 - type: recall_at_3 value: 51.847 - type: recall_at_5 value: 57.601 - task: type: Retrieval dataset: name: MTEB CQADupstackGamingRetrieval type: BeIR/cqadupstack config: default split: test revision: 4885aa143210c98657558c04aaf3dc47cfb54340 metrics: - type: map_at_1 value: 39.383 - type: map_at_10 value: 53.714 - type: map_at_100 value: 54.838 - type: map_at_1000 value: 54.87800000000001 - type: map_at_3 value: 50.114999999999995 - type: map_at_5 value: 52.153000000000006 - type: mrr_at_1 value: 45.016 - type: mrr_at_10 value: 56.732000000000006 - type: mrr_at_100 value: 57.411 - type: mrr_at_1000 value: 57.431 - type: mrr_at_3 value: 54.044000000000004 - type: mrr_at_5 value: 55.639 - type: ndcg_at_1 value: 45.016 - type: ndcg_at_10 value: 60.228 - type: ndcg_at_100 value: 64.277 - type: ndcg_at_1000 value: 65.07 - type: ndcg_at_3 value: 54.124 - type: ndcg_at_5 value: 57.147000000000006 - type: precision_at_1 value: 45.016 - type: precision_at_10 value: 9.937 - type: precision_at_100 value: 1.288 - type: precision_at_1000 value: 0.13899999999999998 - type: precision_at_3 value: 24.471999999999998 - type: precision_at_5 value: 16.991 - type: recall_at_1 value: 39.383 - type: recall_at_10 value: 76.175 - type: recall_at_100 value: 93.02 - type: recall_at_1000 value: 98.60900000000001 - type: recall_at_3 value: 60.265 - type: recall_at_5 value: 67.46600000000001 - task: type: Retrieval dataset: name: MTEB CQADupstackGisRetrieval type: BeIR/cqadupstack config: default split: test revision: 5003b3064772da1887988e05400cf3806fe491f2 metrics: - type: map_at_1 value: 27.426000000000002 - type: map_at_10 value: 37.397000000000006 - type: map_at_100 value: 38.61 - type: map_at_1000 value: 38.678000000000004 - type: map_at_3 value: 34.150999999999996 - type: map_at_5 value: 36.137 - type: mrr_at_1 value: 29.944 - type: mrr_at_10 value: 39.654 - type: mrr_at_100 value: 40.638000000000005 - type: mrr_at_1000 value: 40.691 - type: mrr_at_3 value: 36.817 - type: mrr_at_5 value: 38.524 - type: ndcg_at_1 value: 29.944 - type: ndcg_at_10 value: 43.094 - type: ndcg_at_100 value: 48.789 - type: ndcg_at_1000 value: 50.339999999999996 - type: ndcg_at_3 value: 36.984 - type: ndcg_at_5 value: 40.248 - type: precision_at_1 value: 29.944 - type: precision_at_10 value: 6.78 - type: precision_at_100 value: 1.024 - type: precision_at_1000 value: 0.11800000000000001 - type: precision_at_3 value: 15.895000000000001 - type: precision_at_5 value: 11.39 - type: recall_at_1 value: 27.426000000000002 - type: recall_at_10 value: 58.464000000000006 - type: recall_at_100 value: 84.193 - type: recall_at_1000 value: 95.52000000000001 - type: recall_at_3 value: 42.172 - type: recall_at_5 value: 50.101 - task: type: Retrieval dataset: name: MTEB CQADupstackMathematicaRetrieval type: BeIR/cqadupstack config: default split: test revision: 90fceea13679c63fe563ded68f3b6f06e50061de metrics: - type: map_at_1 value: 19.721 - type: map_at_10 value: 31.604 - type: map_at_100 value: 32.972 - type: map_at_1000 value: 33.077 - type: map_at_3 value: 27.218999999999998 - type: map_at_5 value: 29.53 - type: mrr_at_1 value: 25.0 - type: mrr_at_10 value: 35.843 - type: mrr_at_100 value: 36.785000000000004 - type: mrr_at_1000 value: 36.842000000000006 - type: mrr_at_3 value: 32.193 - type: mrr_at_5 value: 34.264 - type: ndcg_at_1 value: 25.0 - type: ndcg_at_10 value: 38.606 - type: ndcg_at_100 value: 44.272 - type: ndcg_at_1000 value: 46.527 - type: ndcg_at_3 value: 30.985000000000003 - type: ndcg_at_5 value: 34.43 - type: precision_at_1 value: 25.0 - type: precision_at_10 value: 7.811 - type: precision_at_100 value: 1.203 - type: precision_at_1000 value: 0.15 - type: precision_at_3 value: 15.423 - type: precision_at_5 value: 11.791 - type: recall_at_1 value: 19.721 - type: recall_at_10 value: 55.625 - type: recall_at_100 value: 79.34400000000001 - type: recall_at_1000 value: 95.208 - type: recall_at_3 value: 35.19 - type: recall_at_5 value: 43.626 - task: type: Retrieval dataset: name: MTEB CQADupstackPhysicsRetrieval type: BeIR/cqadupstack config: default split: test revision: 79531abbd1fb92d06c6d6315a0cbbbf5bb247ea4 metrics: - type: map_at_1 value: 33.784 - type: map_at_10 value: 47.522 - type: map_at_100 value: 48.949999999999996 - type: map_at_1000 value: 49.038 - type: map_at_3 value: 43.284 - type: map_at_5 value: 45.629 - type: mrr_at_1 value: 41.482 - type: mrr_at_10 value: 52.830999999999996 - type: mrr_at_100 value: 53.559999999999995 - type: mrr_at_1000 value: 53.588 - type: mrr_at_3 value: 50.016000000000005 - type: mrr_at_5 value: 51.614000000000004 - type: ndcg_at_1 value: 41.482 - type: ndcg_at_10 value: 54.569 - type: ndcg_at_100 value: 59.675999999999995 - type: ndcg_at_1000 value: 60.989000000000004 - type: ndcg_at_3 value: 48.187000000000005 - type: ndcg_at_5 value: 51.183 - type: precision_at_1 value: 41.482 - type: precision_at_10 value: 10.221 - type: precision_at_100 value: 1.486 - type: precision_at_1000 value: 0.17500000000000002 - type: precision_at_3 value: 23.548 - type: precision_at_5 value: 16.805 - type: recall_at_1 value: 33.784 - type: recall_at_10 value: 69.798 - type: recall_at_100 value: 90.098 - type: recall_at_1000 value: 98.176 - type: recall_at_3 value: 52.127 - type: recall_at_5 value: 59.861 - task: type: Retrieval dataset: name: MTEB CQADupstackProgrammersRetrieval type: BeIR/cqadupstack config: default split: test revision: 6184bc1440d2dbc7612be22b50686b8826d22b32 metrics: - type: map_at_1 value: 28.038999999999998 - type: map_at_10 value: 41.904 - type: map_at_100 value: 43.36 - type: map_at_1000 value: 43.453 - type: map_at_3 value: 37.785999999999994 - type: map_at_5 value: 40.105000000000004 - type: mrr_at_1 value: 35.046 - type: mrr_at_10 value: 46.926 - type: mrr_at_100 value: 47.815000000000005 - type: mrr_at_1000 value: 47.849000000000004 - type: mrr_at_3 value: 44.273 - type: mrr_at_5 value: 45.774 - type: ndcg_at_1 value: 35.046 - type: ndcg_at_10 value: 48.937000000000005 - type: ndcg_at_100 value: 54.544000000000004 - type: ndcg_at_1000 value: 56.069 - type: ndcg_at_3 value: 42.858000000000004 - type: ndcg_at_5 value: 45.644 - type: precision_at_1 value: 35.046 - type: precision_at_10 value: 9.452 - type: precision_at_100 value: 1.429 - type: precision_at_1000 value: 0.173 - type: precision_at_3 value: 21.346999999999998 - type: precision_at_5 value: 15.342 - type: recall_at_1 value: 28.038999999999998 - type: recall_at_10 value: 64.59700000000001 - type: recall_at_100 value: 87.735 - type: recall_at_1000 value: 97.41300000000001 - type: recall_at_3 value: 47.368 - type: recall_at_5 value: 54.93900000000001 - task: type: Retrieval dataset: name: MTEB CQADupstackRetrieval type: BeIR/cqadupstack config: default split: test revision: 4ffe81d471b1924886b33c7567bfb200e9eec5c4 metrics: - type: map_at_1 value: 28.17291666666667 - type: map_at_10 value: 40.025749999999995 - type: map_at_100 value: 41.39208333333333 - type: map_at_1000 value: 41.499249999999996 - type: map_at_3 value: 36.347 - type: map_at_5 value: 38.41391666666667 - type: mrr_at_1 value: 33.65925 - type: mrr_at_10 value: 44.085499999999996 - type: mrr_at_100 value: 44.94116666666667 - type: mrr_at_1000 value: 44.9855 - type: mrr_at_3 value: 41.2815 - type: mrr_at_5 value: 42.91491666666666 - type: ndcg_at_1 value: 33.65925 - type: ndcg_at_10 value: 46.430833333333325 - type: ndcg_at_100 value: 51.761 - type: ndcg_at_1000 value: 53.50899999999999 - type: ndcg_at_3 value: 40.45133333333333 - type: ndcg_at_5 value: 43.31483333333334 - type: precision_at_1 value: 33.65925 - type: precision_at_10 value: 8.4995 - type: precision_at_100 value: 1.3210000000000004 - type: precision_at_1000 value: 0.16591666666666666 - type: precision_at_3 value: 19.165083333333335 - type: precision_at_5 value: 13.81816666666667 - type: recall_at_1 value: 28.17291666666667 - type: recall_at_10 value: 61.12624999999999 - type: recall_at_100 value: 83.97266666666667 - type: recall_at_1000 value: 95.66550000000001 - type: recall_at_3 value: 44.661249999999995 - type: recall_at_5 value: 51.983333333333334 - type: map_at_1 value: 17.936 - type: map_at_10 value: 27.399 - type: map_at_100 value: 28.632 - type: map_at_1000 value: 28.738000000000003 - type: map_at_3 value: 24.456 - type: map_at_5 value: 26.06 - type: mrr_at_1 value: 19.224 - type: mrr_at_10 value: 28.998 - type: mrr_at_100 value: 30.11 - type: mrr_at_1000 value: 30.177 - type: mrr_at_3 value: 26.247999999999998 - type: mrr_at_5 value: 27.708 - type: ndcg_at_1 value: 19.224 - type: ndcg_at_10 value: 32.911 - type: ndcg_at_100 value: 38.873999999999995 - type: ndcg_at_1000 value: 41.277 - type: ndcg_at_3 value: 27.142 - type: ndcg_at_5 value: 29.755 - type: precision_at_1 value: 19.224 - type: precision_at_10 value: 5.6930000000000005 - type: precision_at_100 value: 0.9259999999999999 - type: precision_at_1000 value: 0.126 - type: precision_at_3 value: 12.138 - type: precision_at_5 value: 8.909 - type: recall_at_1 value: 17.936 - type: recall_at_10 value: 48.096 - type: recall_at_100 value: 75.389 - type: recall_at_1000 value: 92.803 - type: recall_at_3 value: 32.812999999999995 - type: recall_at_5 value: 38.851 - task: type: Retrieval dataset: name: MTEB CQADupstackStatsRetrieval type: BeIR/cqadupstack config: default split: test revision: 65ac3a16b8e91f9cee4c9828cc7c335575432a2a metrics: - type: map_at_1 value: 24.681 - type: map_at_10 value: 34.892 - type: map_at_100 value: 35.996 - type: map_at_1000 value: 36.083 - type: map_at_3 value: 31.491999999999997 - type: map_at_5 value: 33.632 - type: mrr_at_1 value: 28.528 - type: mrr_at_10 value: 37.694 - type: mrr_at_100 value: 38.613 - type: mrr_at_1000 value: 38.668 - type: mrr_at_3 value: 34.714 - type: mrr_at_5 value: 36.616 - type: ndcg_at_1 value: 28.528 - type: ndcg_at_10 value: 40.703 - type: ndcg_at_100 value: 45.993 - type: ndcg_at_1000 value: 47.847 - type: ndcg_at_3 value: 34.622 - type: ndcg_at_5 value: 38.035999999999994 - type: precision_at_1 value: 28.528 - type: precision_at_10 value: 6.902 - type: precision_at_100 value: 1.0370000000000001 - type: precision_at_1000 value: 0.126 - type: precision_at_3 value: 15.798000000000002 - type: precision_at_5 value: 11.655999999999999 - type: recall_at_1 value: 24.681 - type: recall_at_10 value: 55.81 - type: recall_at_100 value: 79.785 - type: recall_at_1000 value: 92.959 - type: recall_at_3 value: 39.074 - type: recall_at_5 value: 47.568 - task: type: Retrieval dataset: name: MTEB CQADupstackTexRetrieval type: BeIR/cqadupstack config: default split: test revision: 46989137a86843e03a6195de44b09deda022eec7 metrics: - type: map_at_1 value: 18.627 - type: map_at_10 value: 27.872000000000003 - type: map_at_100 value: 29.237999999999996 - type: map_at_1000 value: 29.363 - type: map_at_3 value: 24.751 - type: map_at_5 value: 26.521 - type: mrr_at_1 value: 23.021 - type: mrr_at_10 value: 31.924000000000003 - type: mrr_at_100 value: 32.922000000000004 - type: mrr_at_1000 value: 32.988 - type: mrr_at_3 value: 29.192 - type: mrr_at_5 value: 30.798 - type: ndcg_at_1 value: 23.021 - type: ndcg_at_10 value: 33.535 - type: ndcg_at_100 value: 39.732 - type: ndcg_at_1000 value: 42.201 - type: ndcg_at_3 value: 28.153 - type: ndcg_at_5 value: 30.746000000000002 - type: precision_at_1 value: 23.021 - type: precision_at_10 value: 6.459 - type: precision_at_100 value: 1.1320000000000001 - type: precision_at_1000 value: 0.153 - type: precision_at_3 value: 13.719000000000001 - type: precision_at_5 value: 10.193000000000001 - type: recall_at_1 value: 18.627 - type: recall_at_10 value: 46.463 - type: recall_at_100 value: 74.226 - type: recall_at_1000 value: 91.28500000000001 - type: recall_at_3 value: 31.357000000000003 - type: recall_at_5 value: 38.067 - task: type: Retrieval dataset: name: MTEB CQADupstackUnixRetrieval type: BeIR/cqadupstack config: default split: test revision: 6c6430d3a6d36f8d2a829195bc5dc94d7e063e53 metrics: - type: map_at_1 value: 31.457 - type: map_at_10 value: 42.888 - type: map_at_100 value: 44.24 - type: map_at_1000 value: 44.327 - type: map_at_3 value: 39.588 - type: map_at_5 value: 41.423 - type: mrr_at_1 value: 37.126999999999995 - type: mrr_at_10 value: 47.083000000000006 - type: mrr_at_100 value: 47.997 - type: mrr_at_1000 value: 48.044 - type: mrr_at_3 value: 44.574000000000005 - type: mrr_at_5 value: 46.202 - type: ndcg_at_1 value: 37.126999999999995 - type: ndcg_at_10 value: 48.833 - type: ndcg_at_100 value: 54.327000000000005 - type: ndcg_at_1000 value: 56.011 - type: ndcg_at_3 value: 43.541999999999994 - type: ndcg_at_5 value: 46.127 - type: precision_at_1 value: 37.126999999999995 - type: precision_at_10 value: 8.376999999999999 - type: precision_at_100 value: 1.2309999999999999 - type: precision_at_1000 value: 0.146 - type: precision_at_3 value: 20.211000000000002 - type: precision_at_5 value: 14.16 - type: recall_at_1 value: 31.457 - type: recall_at_10 value: 62.369 - type: recall_at_100 value: 85.444 - type: recall_at_1000 value: 96.65599999999999 - type: recall_at_3 value: 47.961 - type: recall_at_5 value: 54.676 - task: type: Retrieval dataset: name: MTEB CQADupstackWebmastersRetrieval type: BeIR/cqadupstack config: default split: test revision: 160c094312a0e1facb97e55eeddb698c0abe3571 metrics: - type: map_at_1 value: 27.139999999999997 - type: map_at_10 value: 38.801 - type: map_at_100 value: 40.549 - type: map_at_1000 value: 40.802 - type: map_at_3 value: 35.05 - type: map_at_5 value: 36.884 - type: mrr_at_1 value: 33.004 - type: mrr_at_10 value: 43.864 - type: mrr_at_100 value: 44.667 - type: mrr_at_1000 value: 44.717 - type: mrr_at_3 value: 40.777 - type: mrr_at_5 value: 42.319 - type: ndcg_at_1 value: 33.004 - type: ndcg_at_10 value: 46.022 - type: ndcg_at_100 value: 51.542 - type: ndcg_at_1000 value: 53.742000000000004 - type: ndcg_at_3 value: 39.795 - type: ndcg_at_5 value: 42.272 - type: precision_at_1 value: 33.004 - type: precision_at_10 value: 9.012 - type: precision_at_100 value: 1.7770000000000001 - type: precision_at_1000 value: 0.26 - type: precision_at_3 value: 19.038 - type: precision_at_5 value: 13.675999999999998 - type: recall_at_1 value: 27.139999999999997 - type: recall_at_10 value: 60.961 - type: recall_at_100 value: 84.451 - type: recall_at_1000 value: 98.113 - type: recall_at_3 value: 43.001 - type: recall_at_5 value: 49.896 - task: type: Retrieval dataset: name: MTEB ClimateFEVER type: mteb/climate-fever config: default split: test revision: 47f2ac6acb640fc46020b02a5b59fdda04d39380 metrics: - type: map_at_1 value: 22.076999999999998 - type: map_at_10 value: 35.44 - type: map_at_100 value: 37.651 - type: map_at_1000 value: 37.824999999999996 - type: map_at_3 value: 30.764999999999997 - type: map_at_5 value: 33.26 - type: mrr_at_1 value: 50.163000000000004 - type: mrr_at_10 value: 61.207 - type: mrr_at_100 value: 61.675000000000004 - type: mrr_at_1000 value: 61.692 - type: mrr_at_3 value: 58.60999999999999 - type: mrr_at_5 value: 60.307 - type: ndcg_at_1 value: 50.163000000000004 - type: ndcg_at_10 value: 45.882 - type: ndcg_at_100 value: 53.239999999999995 - type: ndcg_at_1000 value: 55.852000000000004 - type: ndcg_at_3 value: 40.514 - type: ndcg_at_5 value: 42.038 - type: precision_at_1 value: 50.163000000000004 - type: precision_at_10 value: 13.466000000000001 - type: precision_at_100 value: 2.164 - type: precision_at_1000 value: 0.266 - type: precision_at_3 value: 29.707 - type: precision_at_5 value: 21.694 - type: recall_at_1 value: 22.076999999999998 - type: recall_at_10 value: 50.193 - type: recall_at_100 value: 74.993 - type: recall_at_1000 value: 89.131 - type: recall_at_3 value: 35.472 - type: recall_at_5 value: 41.814 - task: type: Retrieval dataset: name: MTEB DBPedia type: mteb/dbpedia config: default split: test revision: c0f706b76e590d620bd6618b3ca8efdd34e2d659 metrics: - type: map_at_1 value: 9.953 - type: map_at_10 value: 24.515 - type: map_at_100 value: 36.173 - type: map_at_1000 value: 38.351 - type: map_at_3 value: 16.592000000000002 - type: map_at_5 value: 20.036 - type: mrr_at_1 value: 74.25 - type: mrr_at_10 value: 81.813 - type: mrr_at_100 value: 82.006 - type: mrr_at_1000 value: 82.011 - type: mrr_at_3 value: 80.875 - type: mrr_at_5 value: 81.362 - type: ndcg_at_1 value: 62.5 - type: ndcg_at_10 value: 52.42 - type: ndcg_at_100 value: 56.808 - type: ndcg_at_1000 value: 63.532999999999994 - type: ndcg_at_3 value: 56.654 - type: ndcg_at_5 value: 54.18300000000001 - type: precision_at_1 value: 74.25 - type: precision_at_10 value: 42.699999999999996 - type: precision_at_100 value: 13.675 - type: precision_at_1000 value: 2.664 - type: precision_at_3 value: 60.5 - type: precision_at_5 value: 52.800000000000004 - type: recall_at_1 value: 9.953 - type: recall_at_10 value: 30.253999999999998 - type: recall_at_100 value: 62.516000000000005 - type: recall_at_1000 value: 84.163 - type: recall_at_3 value: 18.13 - type: recall_at_5 value: 22.771 - task: type: Classification dataset: name: MTEB EmotionClassification type: mteb/emotion config: default split: test revision: 4f58c6b202a23cf9a4da393831edf4f9183cad37 metrics: - type: accuracy value: 79.455 - type: f1 value: 74.16798697647569 - task: type: Retrieval dataset: name: MTEB FEVER type: mteb/fever config: default split: test revision: bea83ef9e8fb933d90a2f1d5515737465d613e12 metrics: - type: map_at_1 value: 87.531 - type: map_at_10 value: 93.16799999999999 - type: map_at_100 value: 93.341 - type: map_at_1000 value: 93.349 - type: map_at_3 value: 92.444 - type: map_at_5 value: 92.865 - type: mrr_at_1 value: 94.014 - type: mrr_at_10 value: 96.761 - type: mrr_at_100 value: 96.762 - type: mrr_at_1000 value: 96.762 - type: mrr_at_3 value: 96.672 - type: mrr_at_5 value: 96.736 - type: ndcg_at_1 value: 94.014 - type: ndcg_at_10 value: 95.112 - type: ndcg_at_100 value: 95.578 - type: ndcg_at_1000 value: 95.68900000000001 - type: ndcg_at_3 value: 94.392 - type: ndcg_at_5 value: 94.72500000000001 - type: precision_at_1 value: 94.014 - type: precision_at_10 value: 11.065 - type: precision_at_100 value: 1.157 - type: precision_at_1000 value: 0.11800000000000001 - type: precision_at_3 value: 35.259 - type: precision_at_5 value: 21.599 - type: recall_at_1 value: 87.531 - type: recall_at_10 value: 97.356 - type: recall_at_100 value: 98.965 - type: recall_at_1000 value: 99.607 - type: recall_at_3 value: 95.312 - type: recall_at_5 value: 96.295 - task: type: Retrieval dataset: name: MTEB FiQA2018 type: mteb/fiqa config: default split: test revision: 27a168819829fe9bcd655c2df245fb19452e8e06 metrics: - type: map_at_1 value: 32.055 - type: map_at_10 value: 53.114 - type: map_at_100 value: 55.235 - type: map_at_1000 value: 55.345 - type: map_at_3 value: 45.854 - type: map_at_5 value: 50.025 - type: mrr_at_1 value: 60.34 - type: mrr_at_10 value: 68.804 - type: mrr_at_100 value: 69.309 - type: mrr_at_1000 value: 69.32199999999999 - type: mrr_at_3 value: 66.40899999999999 - type: mrr_at_5 value: 67.976 - type: ndcg_at_1 value: 60.34 - type: ndcg_at_10 value: 62.031000000000006 - type: ndcg_at_100 value: 68.00500000000001 - type: ndcg_at_1000 value: 69.286 - type: ndcg_at_3 value: 56.355999999999995 - type: ndcg_at_5 value: 58.687 - type: precision_at_1 value: 60.34 - type: precision_at_10 value: 17.176 - type: precision_at_100 value: 2.36 - type: precision_at_1000 value: 0.259 - type: precision_at_3 value: 37.14 - type: precision_at_5 value: 27.809 - type: recall_at_1 value: 32.055 - type: recall_at_10 value: 70.91 - type: recall_at_100 value: 91.83 - type: recall_at_1000 value: 98.871 - type: recall_at_3 value: 51.202999999999996 - type: recall_at_5 value: 60.563 - task: type: Retrieval dataset: name: MTEB HotpotQA type: mteb/hotpotqa config: default split: test revision: ab518f4d6fcca38d87c25209f94beba119d02014 metrics: - type: map_at_1 value: 43.68 - type: map_at_10 value: 64.389 - type: map_at_100 value: 65.24 - type: map_at_1000 value: 65.303 - type: map_at_3 value: 61.309000000000005 - type: map_at_5 value: 63.275999999999996 - type: mrr_at_1 value: 87.36 - type: mrr_at_10 value: 91.12 - type: mrr_at_100 value: 91.227 - type: mrr_at_1000 value: 91.229 - type: mrr_at_3 value: 90.57600000000001 - type: mrr_at_5 value: 90.912 - type: ndcg_at_1 value: 87.36 - type: ndcg_at_10 value: 73.076 - type: ndcg_at_100 value: 75.895 - type: ndcg_at_1000 value: 77.049 - type: ndcg_at_3 value: 68.929 - type: ndcg_at_5 value: 71.28 - type: precision_at_1 value: 87.36 - type: precision_at_10 value: 14.741000000000001 - type: precision_at_100 value: 1.694 - type: precision_at_1000 value: 0.185 - type: precision_at_3 value: 43.043 - type: precision_at_5 value: 27.681 - type: recall_at_1 value: 43.68 - type: recall_at_10 value: 73.707 - type: recall_at_100 value: 84.7 - type: recall_at_1000 value: 92.309 - type: recall_at_3 value: 64.564 - type: recall_at_5 value: 69.203 - task: type: Classification dataset: name: MTEB ImdbClassification type: mteb/imdb config: default split: test revision: 3d86128a09e091d6018b6d26cad27f2739fc2db7 metrics: - type: accuracy value: 96.75399999999999 - type: ap value: 95.29389839242187 - type: f1 value: 96.75348377433475 - task: type: Retrieval dataset: name: MTEB MSMARCO type: mteb/msmarco config: default split: dev revision: c5a29a104738b98a9e76336939199e264163d4a0 metrics: - type: map_at_1 value: 25.176 - type: map_at_10 value: 38.598 - type: map_at_100 value: 39.707 - type: map_at_1000 value: 39.744 - type: map_at_3 value: 34.566 - type: map_at_5 value: 36.863 - type: mrr_at_1 value: 25.874000000000002 - type: mrr_at_10 value: 39.214 - type: mrr_at_100 value: 40.251 - type: mrr_at_1000 value: 40.281 - type: mrr_at_3 value: 35.291 - type: mrr_at_5 value: 37.545 - type: ndcg_at_1 value: 25.874000000000002 - type: ndcg_at_10 value: 45.98 - type: ndcg_at_100 value: 51.197 - type: ndcg_at_1000 value: 52.073 - type: ndcg_at_3 value: 37.785999999999994 - type: ndcg_at_5 value: 41.870000000000005 - type: precision_at_1 value: 25.874000000000002 - type: precision_at_10 value: 7.181 - type: precision_at_100 value: 0.979 - type: precision_at_1000 value: 0.106 - type: precision_at_3 value: 16.051000000000002 - type: precision_at_5 value: 11.713 - type: recall_at_1 value: 25.176 - type: recall_at_10 value: 68.67699999999999 - type: recall_at_100 value: 92.55 - type: recall_at_1000 value: 99.164 - type: recall_at_3 value: 46.372 - type: recall_at_5 value: 56.16 - task: type: Classification dataset: name: MTEB MTOPDomainClassification (en) type: mteb/mtop_domain config: en split: test revision: d80d48c1eb48d3562165c59d59d0034df9fff0bf metrics: - type: accuracy value: 99.03784769721841 - type: f1 value: 98.97791641821495 - task: type: Classification dataset: name: MTEB MTOPIntentClassification (en) type: mteb/mtop_intent config: en split: test revision: ae001d0e6b1228650b7bd1c2c65fb50ad11a8aba metrics: - type: accuracy value: 91.88326493388054 - type: f1 value: 73.74809928034335 - task: type: Classification dataset: name: MTEB MassiveIntentClassification (en) type: mteb/amazon_massive_intent config: en split: test revision: 31efe3c427b0bae9c22cbb560b8f15491cc6bed7 metrics: - type: accuracy value: 85.41358439811701 - type: f1 value: 83.503679460639 - task: type: Classification dataset: name: MTEB MassiveScenarioClassification (en) type: mteb/amazon_massive_scenario config: en split: test revision: 7d571f92784cd94a019292a1f45445077d0ef634 metrics: - type: accuracy value: 89.77135171486215 - type: f1 value: 88.89843747468366 - task: type: Clustering dataset: name: MTEB MedrxivClusteringP2P type: mteb/medrxiv-clustering-p2p config: default split: test revision: e7a26af6f3ae46b30dde8737f02c07b1505bcc73 metrics: - type: v_measure value: 46.22695362087359 - task: type: Clustering dataset: name: MTEB MedrxivClusteringS2S type: mteb/medrxiv-clustering-s2s config: default split: test revision: 35191c8c0dca72d8ff3efcd72aa802307d469663 metrics: - type: v_measure value: 44.132372165849425 - task: type: Reranking dataset: name: MTEB MindSmallReranking type: mteb/mind_small config: default split: test revision: 3bdac13927fdc888b903db93b2ffdbd90b295a69 metrics: - type: map value: 33.35680810650402 - type: mrr value: 34.72625715637218 - task: type: Retrieval dataset: name: MTEB NFCorpus type: mteb/nfcorpus config: default split: test revision: ec0fa4fe99da2ff19ca1214b7966684033a58814 metrics: - type: map_at_1 value: 7.165000000000001 - type: map_at_10 value: 15.424 - type: map_at_100 value: 20.28 - type: map_at_1000 value: 22.065 - type: map_at_3 value: 11.236 - type: map_at_5 value: 13.025999999999998 - type: mrr_at_1 value: 51.702999999999996 - type: mrr_at_10 value: 59.965 - type: mrr_at_100 value: 60.667 - type: mrr_at_1000 value: 60.702999999999996 - type: mrr_at_3 value: 58.772000000000006 - type: mrr_at_5 value: 59.267 - type: ndcg_at_1 value: 49.536 - type: ndcg_at_10 value: 40.6 - type: ndcg_at_100 value: 37.848 - type: ndcg_at_1000 value: 46.657 - type: ndcg_at_3 value: 46.117999999999995 - type: ndcg_at_5 value: 43.619 - type: precision_at_1 value: 51.393 - type: precision_at_10 value: 30.31 - type: precision_at_100 value: 9.972 - type: precision_at_1000 value: 2.329 - type: precision_at_3 value: 43.137 - type: precision_at_5 value: 37.585 - type: recall_at_1 value: 7.165000000000001 - type: recall_at_10 value: 19.689999999999998 - type: recall_at_100 value: 39.237 - type: recall_at_1000 value: 71.417 - type: recall_at_3 value: 12.247 - type: recall_at_5 value: 14.902999999999999 - task: type: Retrieval dataset: name: MTEB NQ type: mteb/nq config: default split: test revision: b774495ed302d8c44a3a7ea25c90dbce03968f31 metrics: - type: map_at_1 value: 42.653999999999996 - type: map_at_10 value: 59.611999999999995 - type: map_at_100 value: 60.32300000000001 - type: map_at_1000 value: 60.336 - type: map_at_3 value: 55.584999999999994 - type: map_at_5 value: 58.19 - type: mrr_at_1 value: 47.683 - type: mrr_at_10 value: 62.06700000000001 - type: mrr_at_100 value: 62.537 - type: mrr_at_1000 value: 62.544999999999995 - type: mrr_at_3 value: 59.178 - type: mrr_at_5 value: 61.034 - type: ndcg_at_1 value: 47.654 - type: ndcg_at_10 value: 67.001 - type: ndcg_at_100 value: 69.73899999999999 - type: ndcg_at_1000 value: 69.986 - type: ndcg_at_3 value: 59.95700000000001 - type: ndcg_at_5 value: 64.025 - type: precision_at_1 value: 47.654 - type: precision_at_10 value: 10.367999999999999 - type: precision_at_100 value: 1.192 - type: precision_at_1000 value: 0.121 - type: precision_at_3 value: 26.651000000000003 - type: precision_at_5 value: 18.459 - type: recall_at_1 value: 42.653999999999996 - type: recall_at_10 value: 86.619 - type: recall_at_100 value: 98.04899999999999 - type: recall_at_1000 value: 99.812 - type: recall_at_3 value: 68.987 - type: recall_at_5 value: 78.158 - task: type: Retrieval dataset: name: MTEB QuoraRetrieval type: mteb/quora config: default split: test revision: None metrics: - type: map_at_1 value: 72.538 - type: map_at_10 value: 86.702 - type: map_at_100 value: 87.31 - type: map_at_1000 value: 87.323 - type: map_at_3 value: 83.87 - type: map_at_5 value: 85.682 - type: mrr_at_1 value: 83.31 - type: mrr_at_10 value: 89.225 - type: mrr_at_100 value: 89.30399999999999 - type: mrr_at_1000 value: 89.30399999999999 - type: mrr_at_3 value: 88.44300000000001 - type: mrr_at_5 value: 89.005 - type: ndcg_at_1 value: 83.32000000000001 - type: ndcg_at_10 value: 90.095 - type: ndcg_at_100 value: 91.12 - type: ndcg_at_1000 value: 91.179 - type: ndcg_at_3 value: 87.606 - type: ndcg_at_5 value: 89.031 - type: precision_at_1 value: 83.32000000000001 - type: precision_at_10 value: 13.641 - type: precision_at_100 value: 1.541 - type: precision_at_1000 value: 0.157 - type: precision_at_3 value: 38.377 - type: precision_at_5 value: 25.162000000000003 - type: recall_at_1 value: 72.538 - type: recall_at_10 value: 96.47200000000001 - type: recall_at_100 value: 99.785 - type: recall_at_1000 value: 99.99900000000001 - type: recall_at_3 value: 89.278 - type: recall_at_5 value: 93.367 - task: type: Clustering dataset: name: MTEB RedditClustering type: mteb/reddit-clustering config: default split: test revision: 24640382cdbf8abc73003fb0fa6d111a705499eb metrics: - type: v_measure value: 73.55219145406065 - task: type: Clustering dataset: name: MTEB RedditClusteringP2P type: mteb/reddit-clustering-p2p config: default split: test revision: 282350215ef01743dc01b456c7f5241fa8937f16 metrics: - type: v_measure value: 74.13437105242755 - task: type: Retrieval dataset: name: MTEB SCIDOCS type: mteb/scidocs config: default split: test revision: None metrics: - type: map_at_1 value: 6.873 - type: map_at_10 value: 17.944 - type: map_at_100 value: 21.171 - type: map_at_1000 value: 21.528 - type: map_at_3 value: 12.415 - type: map_at_5 value: 15.187999999999999 - type: mrr_at_1 value: 33.800000000000004 - type: mrr_at_10 value: 46.455 - type: mrr_at_100 value: 47.378 - type: mrr_at_1000 value: 47.394999999999996 - type: mrr_at_3 value: 42.367 - type: mrr_at_5 value: 44.972 - type: ndcg_at_1 value: 33.800000000000004 - type: ndcg_at_10 value: 28.907 - type: ndcg_at_100 value: 39.695 - type: ndcg_at_1000 value: 44.582 - type: ndcg_at_3 value: 26.949 - type: ndcg_at_5 value: 23.988 - type: precision_at_1 value: 33.800000000000004 - type: precision_at_10 value: 15.079999999999998 - type: precision_at_100 value: 3.056 - type: precision_at_1000 value: 0.42100000000000004 - type: precision_at_3 value: 25.167 - type: precision_at_5 value: 21.26 - type: recall_at_1 value: 6.873 - type: recall_at_10 value: 30.568 - type: recall_at_100 value: 62.062 - type: recall_at_1000 value: 85.37700000000001 - type: recall_at_3 value: 15.312999999999999 - type: recall_at_5 value: 21.575 - task: type: STS dataset: name: MTEB SICK-R type: mteb/sickr-sts config: default split: test revision: a6ea5a8cab320b040a23452cc28066d9beae2cee metrics: - type: cos_sim_pearson value: 82.37009118256057 - type: cos_sim_spearman value: 79.27986395671529 - type: euclidean_pearson value: 79.18037715442115 - type: euclidean_spearman value: 79.28004791561621 - type: manhattan_pearson value: 79.34062972800541 - type: manhattan_spearman value: 79.43106695543402 - task: type: STS dataset: name: MTEB STS12 type: mteb/sts12-sts config: default split: test revision: a0d554a64d88156834ff5ae9920b964011b16384 metrics: - type: cos_sim_pearson value: 87.48474767383833 - type: cos_sim_spearman value: 79.54505388752513 - type: euclidean_pearson value: 83.43282704179565 - type: euclidean_spearman value: 79.54579919925405 - type: manhattan_pearson value: 83.77564492427952 - type: manhattan_spearman value: 79.84558396989286 - task: type: STS dataset: name: MTEB STS13 type: mteb/sts13-sts config: default split: test revision: 7e90230a92c190f1bf69ae9002b8cea547a64cca metrics: - type: cos_sim_pearson value: 88.803698035802 - type: cos_sim_spearman value: 88.83451367754881 - type: euclidean_pearson value: 88.28939285711628 - type: euclidean_spearman value: 88.83528996073112 - type: manhattan_pearson value: 88.28017412671795 - type: manhattan_spearman value: 88.9228828016344 - task: type: STS dataset: name: MTEB STS14 type: mteb/sts14-sts config: default split: test revision: 6031580fec1f6af667f0bd2da0a551cf4f0b2375 metrics: - type: cos_sim_pearson value: 85.27469288153428 - type: cos_sim_spearman value: 83.87477064876288 - type: euclidean_pearson value: 84.2601737035379 - type: euclidean_spearman value: 83.87431082479074 - type: manhattan_pearson value: 84.3621547772745 - type: manhattan_spearman value: 84.12094375000423 - task: type: STS dataset: name: MTEB STS15 type: mteb/sts15-sts config: default split: test revision: ae752c7c21bf194d8b67fd573edf7ae58183cbe3 metrics: - type: cos_sim_pearson value: 88.12749863201587 - type: cos_sim_spearman value: 88.54287568368565 - type: euclidean_pearson value: 87.90429700607999 - type: euclidean_spearman value: 88.5437689576261 - type: manhattan_pearson value: 88.19276653356833 - type: manhattan_spearman value: 88.99995393814679 - task: type: STS dataset: name: MTEB STS16 type: mteb/sts16-sts config: default split: test revision: 4d8694f8f0e0100860b497b999b3dbed754a0513 metrics: - type: cos_sim_pearson value: 85.68398747560902 - type: cos_sim_spearman value: 86.48815303460574 - type: euclidean_pearson value: 85.52356631237954 - type: euclidean_spearman value: 86.486391949551 - type: manhattan_pearson value: 85.67267981761788 - type: manhattan_spearman value: 86.7073696332485 - task: type: STS dataset: name: MTEB STS17 (en-en) type: mteb/sts17-crosslingual-sts config: en-en split: test revision: af5e6fb845001ecf41f4c1e033ce921939a2a68d metrics: - type: cos_sim_pearson value: 88.9057107443124 - type: cos_sim_spearman value: 88.7312168757697 - type: euclidean_pearson value: 88.72810439714794 - type: euclidean_spearman value: 88.71976185854771 - type: manhattan_pearson value: 88.50433745949111 - type: manhattan_spearman value: 88.51726175544195 - task: type: STS dataset: name: MTEB STS22 (en) type: mteb/sts22-crosslingual-sts config: en split: test revision: eea2b4fe26a775864c896887d910b76a8098ad3f metrics: - type: cos_sim_pearson value: 67.59391795109886 - type: cos_sim_spearman value: 66.87613008631367 - type: euclidean_pearson value: 69.23198488262217 - type: euclidean_spearman value: 66.85427723013692 - type: manhattan_pearson value: 69.50730124841084 - type: manhattan_spearman value: 67.10404669820792 - task: type: STS dataset: name: MTEB STSBenchmark type: mteb/stsbenchmark-sts config: default split: test revision: b0fddb56ed78048fa8b90373c8a3cfc37b684831 metrics: - type: cos_sim_pearson value: 87.0820605344619 - type: cos_sim_spearman value: 86.8518089863434 - type: euclidean_pearson value: 86.31087134689284 - type: euclidean_spearman value: 86.8518520517941 - type: manhattan_pearson value: 86.47203796160612 - type: manhattan_spearman value: 87.1080149734421 - task: type: Reranking dataset: name: MTEB SciDocsRR type: mteb/scidocs-reranking config: default split: test revision: d3c5e1fc0b855ab6097bf1cda04dd73947d7caab metrics: - type: map value: 89.09255369305481 - type: mrr value: 97.10323445617563 - task: type: Retrieval dataset: name: MTEB SciFact type: mteb/scifact config: default split: test revision: 0228b52cf27578f30900b9e5271d331663a030d7 metrics: - type: map_at_1 value: 61.260999999999996 - type: map_at_10 value: 74.043 - type: map_at_100 value: 74.37700000000001 - type: map_at_1000 value: 74.384 - type: map_at_3 value: 71.222 - type: map_at_5 value: 72.875 - type: mrr_at_1 value: 64.333 - type: mrr_at_10 value: 74.984 - type: mrr_at_100 value: 75.247 - type: mrr_at_1000 value: 75.25500000000001 - type: mrr_at_3 value: 73.167 - type: mrr_at_5 value: 74.35000000000001 - type: ndcg_at_1 value: 64.333 - type: ndcg_at_10 value: 79.06 - type: ndcg_at_100 value: 80.416 - type: ndcg_at_1000 value: 80.55600000000001 - type: ndcg_at_3 value: 74.753 - type: ndcg_at_5 value: 76.97500000000001 - type: precision_at_1 value: 64.333 - type: precision_at_10 value: 10.567 - type: precision_at_100 value: 1.1199999999999999 - type: precision_at_1000 value: 0.11299999999999999 - type: precision_at_3 value: 29.889 - type: precision_at_5 value: 19.533 - type: recall_at_1 value: 61.260999999999996 - type: recall_at_10 value: 93.167 - type: recall_at_100 value: 99.0 - type: recall_at_1000 value: 100.0 - type: recall_at_3 value: 81.667 - type: recall_at_5 value: 87.394 - task: type: PairClassification dataset: name: MTEB SprintDuplicateQuestions type: mteb/sprintduplicatequestions-pairclassification config: default split: test revision: d66bd1f72af766a5cc4b0ca5e00c162f89e8cc46 metrics: - type: cos_sim_accuracy value: 99.71980198019801 - type: cos_sim_ap value: 92.81616007802704 - type: cos_sim_f1 value: 85.17548454688318 - type: cos_sim_precision value: 89.43894389438944 - type: cos_sim_recall value: 81.3 - type: dot_accuracy value: 99.71980198019801 - type: dot_ap value: 92.81398760591358 - type: dot_f1 value: 85.17548454688318 - type: dot_precision value: 89.43894389438944 - type: dot_recall value: 81.3 - type: euclidean_accuracy value: 99.71980198019801 - type: euclidean_ap value: 92.81560637245072 - type: euclidean_f1 value: 85.17548454688318 - type: euclidean_precision value: 89.43894389438944 - type: euclidean_recall value: 81.3 - type: manhattan_accuracy value: 99.73069306930694 - type: manhattan_ap value: 93.14005487480794 - type: manhattan_f1 value: 85.56263269639068 - type: manhattan_precision value: 91.17647058823529 - type: manhattan_recall value: 80.60000000000001 - type: max_accuracy value: 99.73069306930694 - type: max_ap value: 93.14005487480794 - type: max_f1 value: 85.56263269639068 - task: type: Clustering dataset: name: MTEB StackExchangeClustering type: mteb/stackexchange-clustering config: default split: test revision: 6cbc1f7b2bc0622f2e39d2c77fa502909748c259 metrics: - type: v_measure value: 79.86443362395185 - task: type: Clustering dataset: name: MTEB StackExchangeClusteringP2P type: mteb/stackexchange-clustering-p2p config: default split: test revision: 815ca46b2622cec33ccafc3735d572c266efdb44 metrics: - type: v_measure value: 49.40897096662564 - task: type: Reranking dataset: name: MTEB StackOverflowDupQuestions type: mteb/stackoverflowdupquestions-reranking config: default split: test revision: e185fbe320c72810689fc5848eb6114e1ef5ec69 metrics: - type: map value: 55.66040806627947 - type: mrr value: 56.58670475766064 - task: type: Summarization dataset: name: MTEB SummEval type: mteb/summeval config: default split: test revision: cda12ad7615edc362dbf25a00fdd61d3b1eaf93c metrics: - type: cos_sim_pearson value: 31.51015090598575 - type: cos_sim_spearman value: 31.35016454939226 - type: dot_pearson value: 31.5150068731 - type: dot_spearman value: 31.34790869023487 - task: type: Retrieval dataset: name: MTEB TRECCOVID type: mteb/trec-covid config: default split: test revision: None metrics: - type: map_at_1 value: 0.254 - type: map_at_10 value: 2.064 - type: map_at_100 value: 12.909 - type: map_at_1000 value: 31.761 - type: map_at_3 value: 0.738 - type: map_at_5 value: 1.155 - type: mrr_at_1 value: 96.0 - type: mrr_at_10 value: 98.0 - type: mrr_at_100 value: 98.0 - type: mrr_at_1000 value: 98.0 - type: mrr_at_3 value: 98.0 - type: mrr_at_5 value: 98.0 - type: ndcg_at_1 value: 93.0 - type: ndcg_at_10 value: 82.258 - type: ndcg_at_100 value: 64.34 - type: ndcg_at_1000 value: 57.912 - type: ndcg_at_3 value: 90.827 - type: ndcg_at_5 value: 86.79 - type: precision_at_1 value: 96.0 - type: precision_at_10 value: 84.8 - type: precision_at_100 value: 66.0 - type: precision_at_1000 value: 25.356 - type: precision_at_3 value: 94.667 - type: precision_at_5 value: 90.4 - type: recall_at_1 value: 0.254 - type: recall_at_10 value: 2.1950000000000003 - type: recall_at_100 value: 16.088 - type: recall_at_1000 value: 54.559000000000005 - type: recall_at_3 value: 0.75 - type: recall_at_5 value: 1.191 - task: type: Retrieval dataset: name: MTEB Touche2020 type: mteb/touche2020 config: default split: test revision: a34f9a33db75fa0cbb21bb5cfc3dae8dc8bec93f metrics: - type: map_at_1 value: 2.976 - type: map_at_10 value: 11.389000000000001 - type: map_at_100 value: 18.429000000000002 - type: map_at_1000 value: 20.113 - type: map_at_3 value: 6.483 - type: map_at_5 value: 8.770999999999999 - type: mrr_at_1 value: 40.816 - type: mrr_at_10 value: 58.118 - type: mrr_at_100 value: 58.489999999999995 - type: mrr_at_1000 value: 58.489999999999995 - type: mrr_at_3 value: 53.061 - type: mrr_at_5 value: 57.041 - type: ndcg_at_1 value: 40.816 - type: ndcg_at_10 value: 30.567 - type: ndcg_at_100 value: 42.44 - type: ndcg_at_1000 value: 53.480000000000004 - type: ndcg_at_3 value: 36.016 - type: ndcg_at_5 value: 34.257 - type: precision_at_1 value: 42.857 - type: precision_at_10 value: 25.714 - type: precision_at_100 value: 8.429 - type: precision_at_1000 value: 1.5939999999999999 - type: precision_at_3 value: 36.735 - type: precision_at_5 value: 33.878 - type: recall_at_1 value: 2.976 - type: recall_at_10 value: 17.854999999999997 - type: recall_at_100 value: 51.833 - type: recall_at_1000 value: 86.223 - type: recall_at_3 value: 7.887 - type: recall_at_5 value: 12.026 - task: type: Classification dataset: name: MTEB ToxicConversationsClassification type: mteb/toxic_conversations_50k config: default split: test revision: d7c0de2777da35d6aae2200a62c6e0e5af397c4c metrics: - type: accuracy value: 85.1174 - type: ap value: 30.169441069345748 - type: f1 value: 69.79254701873245 - task: type: Classification dataset: name: MTEB TweetSentimentExtractionClassification type: mteb/tweet_sentiment_extraction config: default split: test revision: d604517c81ca91fe16a244d1248fc021f9ecee7a metrics: - type: accuracy value: 72.58347481607245 - type: f1 value: 72.74877295564937 - task: type: Clustering dataset: name: MTEB TwentyNewsgroupsClustering type: mteb/twentynewsgroups-clustering config: default split: test revision: 6125ec4e24fa026cec8a478383ee943acfbd5449 metrics: - type: v_measure value: 53.90586138221305 - task: type: PairClassification dataset: name: MTEB TwitterSemEval2015 type: mteb/twittersemeval2015-pairclassification config: default split: test revision: 70970daeab8776df92f5ea462b6173c0b46fd2d1 metrics: - type: cos_sim_accuracy value: 87.35769207844072 - type: cos_sim_ap value: 77.9645072410354 - type: cos_sim_f1 value: 71.32352941176471 - type: cos_sim_precision value: 66.5903890160183 - type: cos_sim_recall value: 76.78100263852242 - type: dot_accuracy value: 87.37557370209214 - type: dot_ap value: 77.96250046429908 - type: dot_f1 value: 71.28932757557064 - type: dot_precision value: 66.95249130938586 - type: dot_recall value: 76.22691292875989 - type: euclidean_accuracy value: 87.35173153722357 - type: euclidean_ap value: 77.96520460741593 - type: euclidean_f1 value: 71.32470733210104 - type: euclidean_precision value: 66.91329479768785 - type: euclidean_recall value: 76.35883905013192 - type: manhattan_accuracy value: 87.25636287774931 - type: manhattan_ap value: 77.77752485611796 - type: manhattan_f1 value: 71.18148599269183 - type: manhattan_precision value: 66.10859728506787 - type: manhattan_recall value: 77.0976253298153 - type: max_accuracy value: 87.37557370209214 - type: max_ap value: 77.96520460741593 - type: max_f1 value: 71.32470733210104 - task: type: PairClassification dataset: name: MTEB TwitterURLCorpus type: mteb/twitterurlcorpus-pairclassification config: default split: test revision: 8b6510b0b1fa4e4c4f879467980e9be563ec1cdf metrics: - type: cos_sim_accuracy value: 89.38176737687739 - type: cos_sim_ap value: 86.58811861657401 - type: cos_sim_f1 value: 79.09430644097604 - type: cos_sim_precision value: 75.45085977911366 - type: cos_sim_recall value: 83.10748383122882 - type: dot_accuracy value: 89.38370784336554 - type: dot_ap value: 86.58840606004333 - type: dot_f1 value: 79.10179860068133 - type: dot_precision value: 75.44546153308643 - type: dot_recall value: 83.13058207576223 - type: euclidean_accuracy value: 89.38564830985369 - type: euclidean_ap value: 86.58820721061164 - type: euclidean_f1 value: 79.09070942235888 - type: euclidean_precision value: 75.38729937194697 - type: euclidean_recall value: 83.17677856482906 - type: manhattan_accuracy value: 89.40699344122326 - type: manhattan_ap value: 86.60631843011362 - type: manhattan_f1 value: 79.14949970570925 - type: manhattan_precision value: 75.78191039729502 - type: manhattan_recall value: 82.83030489682784 - type: max_accuracy value: 89.40699344122326 - type: max_ap value: 86.60631843011362 - type: max_f1 value: 79.14949970570925 - task: type: STS dataset: name: MTEB AFQMC type: C-MTEB/AFQMC config: default split: validation revision: b44c3b011063adb25877c13823db83bb193913c4 metrics: - type: cos_sim_pearson value: 65.58442135663871 - type: cos_sim_spearman value: 72.2538631361313 - type: euclidean_pearson value: 70.97255486607429 - type: euclidean_spearman value: 72.25374250228647 - type: manhattan_pearson value: 70.83250199989911 - type: manhattan_spearman value: 72.14819496536272 - task: type: STS dataset: name: MTEB ATEC type: C-MTEB/ATEC config: default split: test revision: 0f319b1142f28d00e055a6770f3f726ae9b7d865 metrics: - type: cos_sim_pearson value: 59.99478404929932 - type: cos_sim_spearman value: 62.61836216999812 - type: euclidean_pearson value: 66.86429811933593 - type: euclidean_spearman value: 62.6183520374191 - type: manhattan_pearson value: 66.8063778911633 - type: manhattan_spearman value: 62.569607573241115 - task: type: Classification dataset: name: MTEB AmazonReviewsClassification (zh) type: mteb/amazon_reviews_multi config: zh split: test revision: 1399c76144fd37290681b995c656ef9b2e06e26d metrics: - type: accuracy value: 53.98400000000001 - type: f1 value: 51.21447361350723 - task: type: STS dataset: name: MTEB BQ type: C-MTEB/BQ config: default split: test revision: e3dda5e115e487b39ec7e618c0c6a29137052a55 metrics: - type: cos_sim_pearson value: 79.11941660686553 - type: cos_sim_spearman value: 81.25029594540435 - type: euclidean_pearson value: 82.06973504238826 - type: euclidean_spearman value: 81.2501989488524 - type: manhattan_pearson value: 82.10094630392753 - type: manhattan_spearman value: 81.27987244392389 - task: type: Clustering dataset: name: MTEB CLSClusteringP2P type: C-MTEB/CLSClusteringP2P config: default split: test revision: 4b6227591c6c1a73bc76b1055f3b7f3588e72476 metrics: - type: v_measure value: 47.07270168705156 - task: type: Clustering dataset: name: MTEB CLSClusteringS2S type: C-MTEB/CLSClusteringS2S config: default split: test revision: e458b3f5414b62b7f9f83499ac1f5497ae2e869f metrics: - type: v_measure value: 45.98511703185043 - task: type: Reranking dataset: name: MTEB CMedQAv1 type: C-MTEB/CMedQAv1-reranking config: default split: test revision: 8d7f1e942507dac42dc58017c1a001c3717da7df metrics: - type: map value: 88.19895157194931 - type: mrr value: 90.21424603174603 - task: type: Reranking dataset: name: MTEB CMedQAv2 type: C-MTEB/CMedQAv2-reranking config: default split: test revision: 23d186750531a14a0357ca22cd92d712fd512ea0 metrics: - type: map value: 88.03317320980119 - type: mrr value: 89.9461507936508 - task: type: Retrieval dataset: name: MTEB CmedqaRetrieval type: C-MTEB/CmedqaRetrieval config: default split: dev revision: cd540c506dae1cf9e9a59c3e06f42030d54e7301 metrics: - type: map_at_1 value: 29.037000000000003 - type: map_at_10 value: 42.001 - type: map_at_100 value: 43.773 - type: map_at_1000 value: 43.878 - type: map_at_3 value: 37.637 - type: map_at_5 value: 40.034 - type: mrr_at_1 value: 43.136 - type: mrr_at_10 value: 51.158 - type: mrr_at_100 value: 52.083 - type: mrr_at_1000 value: 52.12 - type: mrr_at_3 value: 48.733 - type: mrr_at_5 value: 50.025 - type: ndcg_at_1 value: 43.136 - type: ndcg_at_10 value: 48.685 - type: ndcg_at_100 value: 55.513 - type: ndcg_at_1000 value: 57.242000000000004 - type: ndcg_at_3 value: 43.329 - type: ndcg_at_5 value: 45.438 - type: precision_at_1 value: 43.136 - type: precision_at_10 value: 10.56 - type: precision_at_100 value: 1.6129999999999998 - type: precision_at_1000 value: 0.184 - type: precision_at_3 value: 24.064 - type: precision_at_5 value: 17.269000000000002 - type: recall_at_1 value: 29.037000000000003 - type: recall_at_10 value: 59.245000000000005 - type: recall_at_100 value: 87.355 - type: recall_at_1000 value: 98.74000000000001 - type: recall_at_3 value: 42.99 - type: recall_at_5 value: 49.681999999999995 - task: type: PairClassification dataset: name: MTEB Cmnli type: C-MTEB/CMNLI config: default split: validation revision: 41bc36f332156f7adc9e38f53777c959b2ae9766 metrics: - type: cos_sim_accuracy value: 82.68190018039687 - type: cos_sim_ap value: 90.18017125327886 - type: cos_sim_f1 value: 83.64080906868193 - type: cos_sim_precision value: 79.7076890489303 - type: cos_sim_recall value: 87.98223053542202 - type: dot_accuracy value: 82.68190018039687 - type: dot_ap value: 90.18782350103646 - type: dot_f1 value: 83.64242087729039 - type: dot_precision value: 79.65313028764805 - type: dot_recall value: 88.05237315875614 - type: euclidean_accuracy value: 82.68190018039687 - type: euclidean_ap value: 90.1801957900632 - type: euclidean_f1 value: 83.63636363636364 - type: euclidean_precision value: 79.52772506852203 - type: euclidean_recall value: 88.19265840542437 - type: manhattan_accuracy value: 82.14070956103427 - type: manhattan_ap value: 89.96178420101427 - type: manhattan_f1 value: 83.21087838578791 - type: manhattan_precision value: 78.35605121850475 - type: manhattan_recall value: 88.70703764320785 - type: max_accuracy value: 82.68190018039687 - type: max_ap value: 90.18782350103646 - type: max_f1 value: 83.64242087729039 - task: type: Retrieval dataset: name: MTEB CovidRetrieval type: C-MTEB/CovidRetrieval config: default split: dev revision: 1271c7809071a13532e05f25fb53511ffce77117 metrics: - type: map_at_1 value: 72.234 - type: map_at_10 value: 80.10000000000001 - type: map_at_100 value: 80.36 - type: map_at_1000 value: 80.363 - type: map_at_3 value: 78.315 - type: map_at_5 value: 79.607 - type: mrr_at_1 value: 72.392 - type: mrr_at_10 value: 80.117 - type: mrr_at_100 value: 80.36999999999999 - type: mrr_at_1000 value: 80.373 - type: mrr_at_3 value: 78.469 - type: mrr_at_5 value: 79.633 - type: ndcg_at_1 value: 72.392 - type: ndcg_at_10 value: 83.651 - type: ndcg_at_100 value: 84.749 - type: ndcg_at_1000 value: 84.83000000000001 - type: ndcg_at_3 value: 80.253 - type: ndcg_at_5 value: 82.485 - type: precision_at_1 value: 72.392 - type: precision_at_10 value: 9.557 - type: precision_at_100 value: 1.004 - type: precision_at_1000 value: 0.101 - type: precision_at_3 value: 28.732000000000003 - type: precision_at_5 value: 18.377 - type: recall_at_1 value: 72.234 - type: recall_at_10 value: 94.573 - type: recall_at_100 value: 99.368 - type: recall_at_1000 value: 100.0 - type: recall_at_3 value: 85.669 - type: recall_at_5 value: 91.01700000000001 - task: type: Retrieval dataset: name: MTEB DuRetrieval type: C-MTEB/DuRetrieval config: default split: dev revision: a1a333e290fe30b10f3f56498e3a0d911a693ced metrics: - type: map_at_1 value: 26.173999999999996 - type: map_at_10 value: 80.04 - type: map_at_100 value: 82.94500000000001 - type: map_at_1000 value: 82.98100000000001 - type: map_at_3 value: 55.562999999999995 - type: map_at_5 value: 69.89800000000001 - type: mrr_at_1 value: 89.5 - type: mrr_at_10 value: 92.996 - type: mrr_at_100 value: 93.06400000000001 - type: mrr_at_1000 value: 93.065 - type: mrr_at_3 value: 92.658 - type: mrr_at_5 value: 92.84599999999999 - type: ndcg_at_1 value: 89.5 - type: ndcg_at_10 value: 87.443 - type: ndcg_at_100 value: 90.253 - type: ndcg_at_1000 value: 90.549 - type: ndcg_at_3 value: 85.874 - type: ndcg_at_5 value: 84.842 - type: precision_at_1 value: 89.5 - type: precision_at_10 value: 41.805 - type: precision_at_100 value: 4.827 - type: precision_at_1000 value: 0.49 - type: precision_at_3 value: 76.85 - type: precision_at_5 value: 64.8 - type: recall_at_1 value: 26.173999999999996 - type: recall_at_10 value: 89.101 - type: recall_at_100 value: 98.08099999999999 - type: recall_at_1000 value: 99.529 - type: recall_at_3 value: 57.902 - type: recall_at_5 value: 74.602 - task: type: Retrieval dataset: name: MTEB EcomRetrieval type: C-MTEB/EcomRetrieval config: default split: dev revision: 687de13dc7294d6fd9be10c6945f9e8fec8166b9 metrics: - type: map_at_1 value: 56.10000000000001 - type: map_at_10 value: 66.15299999999999 - type: map_at_100 value: 66.625 - type: map_at_1000 value: 66.636 - type: map_at_3 value: 63.632999999999996 - type: map_at_5 value: 65.293 - type: mrr_at_1 value: 56.10000000000001 - type: mrr_at_10 value: 66.15299999999999 - type: mrr_at_100 value: 66.625 - type: mrr_at_1000 value: 66.636 - type: mrr_at_3 value: 63.632999999999996 - type: mrr_at_5 value: 65.293 - type: ndcg_at_1 value: 56.10000000000001 - type: ndcg_at_10 value: 71.146 - type: ndcg_at_100 value: 73.27799999999999 - type: ndcg_at_1000 value: 73.529 - type: ndcg_at_3 value: 66.09 - type: ndcg_at_5 value: 69.08999999999999 - type: precision_at_1 value: 56.10000000000001 - type: precision_at_10 value: 8.68 - type: precision_at_100 value: 0.964 - type: precision_at_1000 value: 0.098 - type: precision_at_3 value: 24.4 - type: precision_at_5 value: 16.1 - type: recall_at_1 value: 56.10000000000001 - type: recall_at_10 value: 86.8 - type: recall_at_100 value: 96.39999999999999 - type: recall_at_1000 value: 98.3 - type: recall_at_3 value: 73.2 - type: recall_at_5 value: 80.5 - task: type: Classification dataset: name: MTEB IFlyTek type: C-MTEB/IFlyTek-classification config: default split: validation revision: 421605374b29664c5fc098418fe20ada9bd55f8a metrics: - type: accuracy value: 54.52096960369373 - type: f1 value: 40.930845295808695 - task: type: Classification dataset: name: MTEB JDReview type: C-MTEB/JDReview-classification config: default split: test revision: b7c64bd89eb87f8ded463478346f76731f07bf8b metrics: - type: accuracy value: 86.51031894934334 - type: ap value: 55.9516014323483 - type: f1 value: 81.54813679326381 - task: type: STS dataset: name: MTEB LCQMC type: C-MTEB/LCQMC config: default split: test revision: 17f9b096f80380fce5ed12a9be8be7784b337daf metrics: - type: cos_sim_pearson value: 69.67437838574276 - type: cos_sim_spearman value: 73.81314174653045 - type: euclidean_pearson value: 72.63430276680275 - type: euclidean_spearman value: 73.81358736777001 - type: manhattan_pearson value: 72.58743833842829 - type: manhattan_spearman value: 73.7590419009179 - task: type: Reranking dataset: name: MTEB MMarcoReranking type: C-MTEB/Mmarco-reranking config: default split: dev revision: None metrics: - type: map value: 31.648613483640254 - type: mrr value: 30.37420634920635 - task: type: Retrieval dataset: name: MTEB MMarcoRetrieval type: C-MTEB/MMarcoRetrieval config: default split: dev revision: 539bbde593d947e2a124ba72651aafc09eb33fc2 metrics: - type: map_at_1 value: 73.28099999999999 - type: map_at_10 value: 81.977 - type: map_at_100 value: 82.222 - type: map_at_1000 value: 82.22699999999999 - type: map_at_3 value: 80.441 - type: map_at_5 value: 81.46600000000001 - type: mrr_at_1 value: 75.673 - type: mrr_at_10 value: 82.41000000000001 - type: mrr_at_100 value: 82.616 - type: mrr_at_1000 value: 82.621 - type: mrr_at_3 value: 81.094 - type: mrr_at_5 value: 81.962 - type: ndcg_at_1 value: 75.673 - type: ndcg_at_10 value: 85.15599999999999 - type: ndcg_at_100 value: 86.151 - type: ndcg_at_1000 value: 86.26899999999999 - type: ndcg_at_3 value: 82.304 - type: ndcg_at_5 value: 84.009 - type: precision_at_1 value: 75.673 - type: precision_at_10 value: 10.042 - type: precision_at_100 value: 1.052 - type: precision_at_1000 value: 0.106 - type: precision_at_3 value: 30.673000000000002 - type: precision_at_5 value: 19.326999999999998 - type: recall_at_1 value: 73.28099999999999 - type: recall_at_10 value: 94.446 - type: recall_at_100 value: 98.737 - type: recall_at_1000 value: 99.649 - type: recall_at_3 value: 86.984 - type: recall_at_5 value: 91.024 - task: type: Classification dataset: name: MTEB MassiveIntentClassification (zh-CN) type: mteb/amazon_massive_intent config: zh-CN split: test revision: 31efe3c427b0bae9c22cbb560b8f15491cc6bed7 metrics: - type: accuracy value: 81.08607935440484 - type: f1 value: 78.24879986066307 - task: type: Classification dataset: name: MTEB MassiveScenarioClassification (zh-CN) type: mteb/amazon_massive_scenario config: zh-CN split: test revision: 7d571f92784cd94a019292a1f45445077d0ef634 metrics: - type: accuracy value: 86.05917955615332 - type: f1 value: 85.05279279434997 - task: type: Retrieval dataset: name: MTEB MedicalRetrieval type: C-MTEB/MedicalRetrieval config: default split: dev revision: 2039188fb5800a9803ba5048df7b76e6fb151fc6 metrics: - type: map_at_1 value: 56.2 - type: map_at_10 value: 62.57899999999999 - type: map_at_100 value: 63.154999999999994 - type: map_at_1000 value: 63.193 - type: map_at_3 value: 61.217 - type: map_at_5 value: 62.012 - type: mrr_at_1 value: 56.3 - type: mrr_at_10 value: 62.629000000000005 - type: mrr_at_100 value: 63.205999999999996 - type: mrr_at_1000 value: 63.244 - type: mrr_at_3 value: 61.267 - type: mrr_at_5 value: 62.062 - type: ndcg_at_1 value: 56.2 - type: ndcg_at_10 value: 65.592 - type: ndcg_at_100 value: 68.657 - type: ndcg_at_1000 value: 69.671 - type: ndcg_at_3 value: 62.808 - type: ndcg_at_5 value: 64.24499999999999 - type: precision_at_1 value: 56.2 - type: precision_at_10 value: 7.5 - type: precision_at_100 value: 0.899 - type: precision_at_1000 value: 0.098 - type: precision_at_3 value: 22.467000000000002 - type: precision_at_5 value: 14.180000000000001 - type: recall_at_1 value: 56.2 - type: recall_at_10 value: 75.0 - type: recall_at_100 value: 89.9 - type: recall_at_1000 value: 97.89999999999999 - type: recall_at_3 value: 67.4 - type: recall_at_5 value: 70.89999999999999 - task: type: Classification dataset: name: MTEB MultilingualSentiment type: C-MTEB/MultilingualSentiment-classification config: default split: validation revision: 46958b007a63fdbf239b7672c25d0bea67b5ea1a metrics: - type: accuracy value: 76.87666666666667 - type: f1 value: 76.7317686219665 - task: type: PairClassification dataset: name: MTEB Ocnli type: C-MTEB/OCNLI config: default split: validation revision: 66e76a618a34d6d565d5538088562851e6daa7ec metrics: - type: cos_sim_accuracy value: 79.64266377910124 - type: cos_sim_ap value: 84.78274442344829 - type: cos_sim_f1 value: 81.16947472745292 - type: cos_sim_precision value: 76.47058823529412 - type: cos_sim_recall value: 86.48363252375924 - type: dot_accuracy value: 79.64266377910124 - type: dot_ap value: 84.7851404063692 - type: dot_f1 value: 81.16947472745292 - type: dot_precision value: 76.47058823529412 - type: dot_recall value: 86.48363252375924 - type: euclidean_accuracy value: 79.64266377910124 - type: euclidean_ap value: 84.78068373762378 - type: euclidean_f1 value: 81.14794656110837 - type: euclidean_precision value: 76.35009310986965 - type: euclidean_recall value: 86.58922914466737 - type: manhattan_accuracy value: 79.48023822414727 - type: manhattan_ap value: 84.72928897427576 - type: manhattan_f1 value: 81.32084770823064 - type: manhattan_precision value: 76.24768946395564 - type: manhattan_recall value: 87.11721224920802 - type: max_accuracy value: 79.64266377910124 - type: max_ap value: 84.7851404063692 - type: max_f1 value: 81.32084770823064 - task: type: Classification dataset: name: MTEB OnlineShopping type: C-MTEB/OnlineShopping-classification config: default split: test revision: e610f2ebd179a8fda30ae534c3878750a96db120 metrics: - type: accuracy value: 94.3 - type: ap value: 92.8664032274438 - type: f1 value: 94.29311102997727 - task: type: STS dataset: name: MTEB PAWSX type: C-MTEB/PAWSX config: default split: test revision: 9c6a90e430ac22b5779fb019a23e820b11a8b5e1 metrics: - type: cos_sim_pearson value: 48.51392279882909 - type: cos_sim_spearman value: 54.06338895994974 - type: euclidean_pearson value: 52.58480559573412 - type: euclidean_spearman value: 54.06417276612201 - type: manhattan_pearson value: 52.69525121721343 - type: manhattan_spearman value: 54.048147455389675 - task: type: STS dataset: name: MTEB QBQTC type: C-MTEB/QBQTC config: default split: test revision: 790b0510dc52b1553e8c49f3d2afb48c0e5c48b7 metrics: - type: cos_sim_pearson value: 29.728387290757325 - type: cos_sim_spearman value: 31.366121633635284 - type: euclidean_pearson value: 29.14588368552961 - type: euclidean_spearman value: 31.36764411112844 - type: manhattan_pearson value: 29.63517350523121 - type: manhattan_spearman value: 31.94157020583762 - task: type: STS dataset: name: MTEB STS22 (zh) type: mteb/sts22-crosslingual-sts config: zh split: test revision: eea2b4fe26a775864c896887d910b76a8098ad3f metrics: - type: cos_sim_pearson value: 63.64868296271406 - type: cos_sim_spearman value: 66.12800618164744 - type: euclidean_pearson value: 63.21405767340238 - type: euclidean_spearman value: 66.12786567790748 - type: manhattan_pearson value: 64.04300276525848 - type: manhattan_spearman value: 66.5066857145652 - task: type: STS dataset: name: MTEB STSB type: C-MTEB/STSB config: default split: test revision: 0cde68302b3541bb8b3c340dc0644b0b745b3dc0 metrics: - type: cos_sim_pearson value: 81.2302623912794 - type: cos_sim_spearman value: 81.16833673266562 - type: euclidean_pearson value: 79.47647843876024 - type: euclidean_spearman value: 81.16944349524972 - type: manhattan_pearson value: 79.84947238492208 - type: manhattan_spearman value: 81.64626599410026 - task: type: Reranking dataset: name: MTEB T2Reranking type: C-MTEB/T2Reranking config: default split: dev revision: 76631901a18387f85eaa53e5450019b87ad58ef9 metrics: - type: map value: 67.80129586475687 - type: mrr value: 77.77402311635554 - task: type: Retrieval dataset: name: MTEB T2Retrieval type: C-MTEB/T2Retrieval config: default split: dev revision: 8731a845f1bf500a4f111cf1070785c793d10e64 metrics: - type: map_at_1 value: 28.666999999999998 - type: map_at_10 value: 81.063 - type: map_at_100 value: 84.504 - type: map_at_1000 value: 84.552 - type: map_at_3 value: 56.897 - type: map_at_5 value: 70.073 - type: mrr_at_1 value: 92.087 - type: mrr_at_10 value: 94.132 - type: mrr_at_100 value: 94.19800000000001 - type: mrr_at_1000 value: 94.19999999999999 - type: mrr_at_3 value: 93.78999999999999 - type: mrr_at_5 value: 94.002 - type: ndcg_at_1 value: 92.087 - type: ndcg_at_10 value: 87.734 - type: ndcg_at_100 value: 90.736 - type: ndcg_at_1000 value: 91.184 - type: ndcg_at_3 value: 88.78 - type: ndcg_at_5 value: 87.676 - type: precision_at_1 value: 92.087 - type: precision_at_10 value: 43.46 - type: precision_at_100 value: 5.07 - type: precision_at_1000 value: 0.518 - type: precision_at_3 value: 77.49000000000001 - type: precision_at_5 value: 65.194 - type: recall_at_1 value: 28.666999999999998 - type: recall_at_10 value: 86.632 - type: recall_at_100 value: 96.646 - type: recall_at_1000 value: 98.917 - type: recall_at_3 value: 58.333999999999996 - type: recall_at_5 value: 72.974 - task: type: Classification dataset: name: MTEB TNews type: C-MTEB/TNews-classification config: default split: validation revision: 317f262bf1e6126357bbe89e875451e4b0938fe4 metrics: - type: accuracy value: 52.971999999999994 - type: f1 value: 50.2898280984929 - task: type: Clustering dataset: name: MTEB ThuNewsClusteringP2P type: C-MTEB/ThuNewsClusteringP2P config: default split: test revision: 5798586b105c0434e4f0fe5e767abe619442cf93 metrics: - type: v_measure value: 86.0797948663824 - task: type: Clustering dataset: name: MTEB ThuNewsClusteringS2S type: C-MTEB/ThuNewsClusteringS2S config: default split: test revision: 8a8b2caeda43f39e13c4bc5bea0f8a667896e10d metrics: - type: v_measure value: 85.10759092255017 - task: type: Retrieval dataset: name: MTEB VideoRetrieval type: C-MTEB/VideoRetrieval config: default split: dev revision: 58c2597a5943a2ba48f4668c3b90d796283c5639 metrics: - type: map_at_1 value: 65.60000000000001 - type: map_at_10 value: 74.773 - type: map_at_100 value: 75.128 - type: map_at_1000 value: 75.136 - type: map_at_3 value: 73.05 - type: map_at_5 value: 74.13499999999999 - type: mrr_at_1 value: 65.60000000000001 - type: mrr_at_10 value: 74.773 - type: mrr_at_100 value: 75.128 - type: mrr_at_1000 value: 75.136 - type: mrr_at_3 value: 73.05 - type: mrr_at_5 value: 74.13499999999999 - type: ndcg_at_1 value: 65.60000000000001 - type: ndcg_at_10 value: 78.84299999999999 - type: ndcg_at_100 value: 80.40899999999999 - type: ndcg_at_1000 value: 80.57 - type: ndcg_at_3 value: 75.40599999999999 - type: ndcg_at_5 value: 77.351 - type: precision_at_1 value: 65.60000000000001 - type: precision_at_10 value: 9.139999999999999 - type: precision_at_100 value: 0.984 - type: precision_at_1000 value: 0.1 - type: precision_at_3 value: 27.400000000000002 - type: precision_at_5 value: 17.380000000000003 - type: recall_at_1 value: 65.60000000000001 - type: recall_at_10 value: 91.4 - type: recall_at_100 value: 98.4 - type: recall_at_1000 value: 99.6 - type: recall_at_3 value: 82.19999999999999 - type: recall_at_5 value: 86.9 - task: type: Classification dataset: name: MTEB Waimai type: C-MTEB/waimai-classification config: default split: test revision: 339287def212450dcaa9df8c22bf93e9980c7023 metrics: - type: accuracy value: 89.47 - type: ap value: 75.59561751845389 - type: f1 value: 87.95207751382563 --- # sunzx0810/gte-Qwen2-7B-instruct-Q5_K_M-GGUF This model was converted to GGUF format from [`Alibaba-NLP/gte-Qwen2-7B-instruct`](https://huggingface.co/Alibaba-NLP/gte-Qwen2-7B-instruct) using llama.cpp via the ggml.ai's [GGUF-my-repo](https://huggingface.co/spaces/ggml-org/gguf-my-repo) space. Refer to the [original model card](https://huggingface.co/Alibaba-NLP/gte-Qwen2-7B-instruct) for more details on the model. ## Use with llama.cpp Install llama.cpp through brew (works on Mac and Linux) ```bash brew install llama.cpp ``` Invoke the llama.cpp server or the CLI. ### CLI: ```bash llama-cli --hf-repo sunzx0810/gte-Qwen2-7B-instruct-Q5_K_M-GGUF --hf-file gte-qwen2-7b-instruct-q5_k_m.gguf -p "The meaning to life and the universe is" ``` ### Server: ```bash llama-server --hf-repo sunzx0810/gte-Qwen2-7B-instruct-Q5_K_M-GGUF --hf-file gte-qwen2-7b-instruct-q5_k_m.gguf -c 2048 ``` Note: You can also use this checkpoint directly through the [usage steps](https://github.com/ggerganov/llama.cpp?tab=readme-ov-file#usage) listed in the Llama.cpp repo as well. Step 1: Clone llama.cpp from GitHub. ``` git clone https://github.com/ggerganov/llama.cpp ``` Step 2: Move into the llama.cpp folder and build it with `LLAMA_CURL=1` flag along with other hardware-specific flags (for ex: LLAMA_CUDA=1 for Nvidia GPUs on Linux). ``` cd llama.cpp && LLAMA_CURL=1 make ``` Step 3: Run inference through the main binary. ``` ./llama-cli --hf-repo sunzx0810/gte-Qwen2-7B-instruct-Q5_K_M-GGUF --hf-file gte-qwen2-7b-instruct-q5_k_m.gguf -p "The meaning to life and the universe is" ``` or ``` ./llama-server --hf-repo sunzx0810/gte-Qwen2-7B-instruct-Q5_K_M-GGUF --hf-file gte-qwen2-7b-instruct-q5_k_m.gguf -c 2048 ```
[ "SUMMARIZATION" ]
[ "BIOSSES", "SCIFACT" ]
tensorblock/gte-Qwen2-1.5B-instruct-GGUF
tensorblock
sentence-similarity
[ "sentence-transformers", "gguf", "mteb", "transformers", "Qwen2", "sentence-similarity", "TensorBlock", "GGUF", "base_model:Alibaba-NLP/gte-Qwen2-1.5B-instruct", "base_model:quantized:Alibaba-NLP/gte-Qwen2-1.5B-instruct", "license:apache-2.0", "model-index", "autotrain_compatible", "endpoints_compatible", "region:us", "conversational" ]
2024-11-08T19:34:45
2024-11-16T00:49:57
114
0
--- base_model: Alibaba-NLP/gte-Qwen2-1.5B-instruct license: apache-2.0 tags: - mteb - sentence-transformers - transformers - Qwen2 - sentence-similarity - TensorBlock - GGUF model-index: - name: gte-qwen2-7B-instruct results: - task: type: Classification dataset: name: MTEB AmazonCounterfactualClassification (en) type: mteb/amazon_counterfactual config: en split: test revision: e8379541af4e31359cca9fbcf4b00f2671dba205 metrics: - type: accuracy value: 83.98507462686567 - type: ap value: 50.93015252587014 - type: f1 value: 78.50416599051215 - task: type: Classification dataset: name: MTEB AmazonPolarityClassification type: mteb/amazon_polarity config: default split: test revision: e2d317d38cd51312af73b3d32a06d1a08b442046 metrics: - type: accuracy value: 96.61065 - type: ap value: 94.89174052954196 - type: f1 value: 96.60942596940565 - task: type: Classification dataset: name: MTEB AmazonReviewsClassification (en) type: mteb/amazon_reviews_multi config: en split: test revision: 1399c76144fd37290681b995c656ef9b2e06e26d metrics: - type: accuracy value: 55.614000000000004 - type: f1 value: 54.90553480294904 - task: type: Retrieval dataset: name: MTEB ArguAna type: mteb/arguana config: default split: test revision: c22ab2a51041ffd869aaddef7af8d8215647e41a metrics: - type: map_at_1 value: 45.164 - type: map_at_10 value: 61.519 - type: map_at_100 value: 61.769 - type: map_at_1000 value: 61.769 - type: map_at_3 value: 57.443999999999996 - type: map_at_5 value: 60.058 - type: mrr_at_1 value: 46.088 - type: mrr_at_10 value: 61.861 - type: mrr_at_100 value: 62.117999999999995 - type: mrr_at_1000 value: 62.117999999999995 - type: mrr_at_3 value: 57.729 - type: mrr_at_5 value: 60.392 - type: ndcg_at_1 value: 45.164 - type: ndcg_at_10 value: 69.72 - type: ndcg_at_100 value: 70.719 - type: ndcg_at_1000 value: 70.719 - type: ndcg_at_3 value: 61.517999999999994 - type: ndcg_at_5 value: 66.247 - type: precision_at_1 value: 45.164 - type: precision_at_10 value: 9.545 - type: precision_at_100 value: 0.996 - type: precision_at_1000 value: 0.1 - type: precision_at_3 value: 24.443 - type: precision_at_5 value: 16.97 - type: recall_at_1 value: 45.164 - type: recall_at_10 value: 95.448 - type: recall_at_100 value: 99.644 - type: recall_at_1000 value: 99.644 - type: recall_at_3 value: 73.329 - type: recall_at_5 value: 84.851 - task: type: Clustering dataset: name: MTEB ArxivClusteringP2P type: mteb/arxiv-clustering-p2p config: default split: test revision: a122ad7f3f0291bf49cc6f4d32aa80929df69d5d metrics: - type: v_measure value: 50.511868162026175 - task: type: Clustering dataset: name: MTEB ArxivClusteringS2S type: mteb/arxiv-clustering-s2s config: default split: test revision: f910caf1a6075f7329cdf8c1a6135696f37dbd53 metrics: - type: v_measure value: 45.007803189284004 - task: type: Reranking dataset: name: MTEB AskUbuntuDupQuestions type: mteb/askubuntudupquestions-reranking config: default split: test revision: 2000358ca161889fa9c082cb41daa8dcfb161a54 metrics: - type: map value: 64.55292107723382 - type: mrr value: 77.66158818097877 - task: type: STS dataset: name: MTEB BIOSSES type: mteb/biosses-sts config: default split: test revision: d3fb88f8f02e40887cd149695127462bbcf29b4a metrics: - type: cos_sim_pearson value: 85.65459047085452 - type: cos_sim_spearman value: 82.10729255710761 - type: euclidean_pearson value: 82.78079159312476 - type: euclidean_spearman value: 80.50002701880933 - type: manhattan_pearson value: 82.41372641383016 - type: manhattan_spearman value: 80.57412509272639 - task: type: Classification dataset: name: MTEB Banking77Classification type: mteb/banking77 config: default split: test revision: 0fd18e25b25c072e09e0d92ab615fda904d66300 metrics: - type: accuracy value: 87.30844155844156 - type: f1 value: 87.25307322443255 - task: type: Clustering dataset: name: MTEB BiorxivClusteringP2P type: mteb/biorxiv-clustering-p2p config: default split: test revision: 65b79d1d13f80053f67aca9498d9402c2d9f1f40 metrics: - type: v_measure value: 43.20754608934859 - task: type: Clustering dataset: name: MTEB BiorxivClusteringS2S type: mteb/biorxiv-clustering-s2s config: default split: test revision: 258694dd0231531bc1fd9de6ceb52a0853c6d908 metrics: - type: v_measure value: 38.818037697335505 - task: type: Retrieval dataset: name: MTEB CQADupstackAndroidRetrieval type: BeIR/cqadupstack config: default split: test revision: f46a197baaae43b4f621051089b82a364682dfeb metrics: - type: map_at_1 value: 35.423 - type: map_at_10 value: 47.198 - type: map_at_100 value: 48.899 - type: map_at_1000 value: 49.004 - type: map_at_3 value: 43.114999999999995 - type: map_at_5 value: 45.491 - type: mrr_at_1 value: 42.918 - type: mrr_at_10 value: 53.299 - type: mrr_at_100 value: 54.032000000000004 - type: mrr_at_1000 value: 54.055 - type: mrr_at_3 value: 50.453 - type: mrr_at_5 value: 52.205999999999996 - type: ndcg_at_1 value: 42.918 - type: ndcg_at_10 value: 53.98 - type: ndcg_at_100 value: 59.57 - type: ndcg_at_1000 value: 60.879000000000005 - type: ndcg_at_3 value: 48.224000000000004 - type: ndcg_at_5 value: 50.998 - type: precision_at_1 value: 42.918 - type: precision_at_10 value: 10.299999999999999 - type: precision_at_100 value: 1.687 - type: precision_at_1000 value: 0.211 - type: precision_at_3 value: 22.842000000000002 - type: precision_at_5 value: 16.681 - type: recall_at_1 value: 35.423 - type: recall_at_10 value: 66.824 - type: recall_at_100 value: 89.564 - type: recall_at_1000 value: 97.501 - type: recall_at_3 value: 50.365 - type: recall_at_5 value: 57.921 - task: type: Retrieval dataset: name: MTEB CQADupstackEnglishRetrieval type: BeIR/cqadupstack config: default split: test revision: ad9991cb51e31e31e430383c75ffb2885547b5f0 metrics: - type: map_at_1 value: 33.205 - type: map_at_10 value: 44.859 - type: map_at_100 value: 46.135 - type: map_at_1000 value: 46.259 - type: map_at_3 value: 41.839 - type: map_at_5 value: 43.662 - type: mrr_at_1 value: 41.146 - type: mrr_at_10 value: 50.621 - type: mrr_at_100 value: 51.207 - type: mrr_at_1000 value: 51.246 - type: mrr_at_3 value: 48.535000000000004 - type: mrr_at_5 value: 49.818 - type: ndcg_at_1 value: 41.146 - type: ndcg_at_10 value: 50.683 - type: ndcg_at_100 value: 54.82 - type: ndcg_at_1000 value: 56.69 - type: ndcg_at_3 value: 46.611000000000004 - type: ndcg_at_5 value: 48.66 - type: precision_at_1 value: 41.146 - type: precision_at_10 value: 9.439 - type: precision_at_100 value: 1.465 - type: precision_at_1000 value: 0.194 - type: precision_at_3 value: 22.59 - type: precision_at_5 value: 15.86 - type: recall_at_1 value: 33.205 - type: recall_at_10 value: 61.028999999999996 - type: recall_at_100 value: 78.152 - type: recall_at_1000 value: 89.59700000000001 - type: recall_at_3 value: 49.05 - type: recall_at_5 value: 54.836 - task: type: Retrieval dataset: name: MTEB CQADupstackGamingRetrieval type: BeIR/cqadupstack config: default split: test revision: 4885aa143210c98657558c04aaf3dc47cfb54340 metrics: - type: map_at_1 value: 41.637 - type: map_at_10 value: 55.162 - type: map_at_100 value: 56.142 - type: map_at_1000 value: 56.188 - type: map_at_3 value: 51.564 - type: map_at_5 value: 53.696 - type: mrr_at_1 value: 47.524 - type: mrr_at_10 value: 58.243 - type: mrr_at_100 value: 58.879999999999995 - type: mrr_at_1000 value: 58.9 - type: mrr_at_3 value: 55.69499999999999 - type: mrr_at_5 value: 57.284 - type: ndcg_at_1 value: 47.524 - type: ndcg_at_10 value: 61.305 - type: ndcg_at_100 value: 65.077 - type: ndcg_at_1000 value: 65.941 - type: ndcg_at_3 value: 55.422000000000004 - type: ndcg_at_5 value: 58.516 - type: precision_at_1 value: 47.524 - type: precision_at_10 value: 9.918000000000001 - type: precision_at_100 value: 1.276 - type: precision_at_1000 value: 0.13899999999999998 - type: precision_at_3 value: 24.765 - type: precision_at_5 value: 17.204 - type: recall_at_1 value: 41.637 - type: recall_at_10 value: 76.185 - type: recall_at_100 value: 92.149 - type: recall_at_1000 value: 98.199 - type: recall_at_3 value: 60.856 - type: recall_at_5 value: 68.25099999999999 - task: type: Retrieval dataset: name: MTEB CQADupstackGisRetrieval type: BeIR/cqadupstack config: default split: test revision: 5003b3064772da1887988e05400cf3806fe491f2 metrics: - type: map_at_1 value: 26.27 - type: map_at_10 value: 37.463 - type: map_at_100 value: 38.434000000000005 - type: map_at_1000 value: 38.509 - type: map_at_3 value: 34.226 - type: map_at_5 value: 36.161 - type: mrr_at_1 value: 28.588 - type: mrr_at_10 value: 39.383 - type: mrr_at_100 value: 40.23 - type: mrr_at_1000 value: 40.281 - type: mrr_at_3 value: 36.422 - type: mrr_at_5 value: 38.252 - type: ndcg_at_1 value: 28.588 - type: ndcg_at_10 value: 43.511 - type: ndcg_at_100 value: 48.274 - type: ndcg_at_1000 value: 49.975 - type: ndcg_at_3 value: 37.319 - type: ndcg_at_5 value: 40.568 - type: precision_at_1 value: 28.588 - type: precision_at_10 value: 6.893000000000001 - type: precision_at_100 value: 0.9900000000000001 - type: precision_at_1000 value: 0.117 - type: precision_at_3 value: 16.347 - type: precision_at_5 value: 11.661000000000001 - type: recall_at_1 value: 26.27 - type: recall_at_10 value: 60.284000000000006 - type: recall_at_100 value: 81.902 - type: recall_at_1000 value: 94.43 - type: recall_at_3 value: 43.537 - type: recall_at_5 value: 51.475 - task: type: Retrieval dataset: name: MTEB CQADupstackMathematicaRetrieval type: BeIR/cqadupstack config: default split: test revision: 90fceea13679c63fe563ded68f3b6f06e50061de metrics: - type: map_at_1 value: 18.168 - type: map_at_10 value: 28.410000000000004 - type: map_at_100 value: 29.78 - type: map_at_1000 value: 29.892999999999997 - type: map_at_3 value: 25.238 - type: map_at_5 value: 26.96 - type: mrr_at_1 value: 23.507 - type: mrr_at_10 value: 33.382 - type: mrr_at_100 value: 34.404 - type: mrr_at_1000 value: 34.467999999999996 - type: mrr_at_3 value: 30.637999999999998 - type: mrr_at_5 value: 32.199 - type: ndcg_at_1 value: 23.507 - type: ndcg_at_10 value: 34.571000000000005 - type: ndcg_at_100 value: 40.663 - type: ndcg_at_1000 value: 43.236000000000004 - type: ndcg_at_3 value: 29.053 - type: ndcg_at_5 value: 31.563999999999997 - type: precision_at_1 value: 23.507 - type: precision_at_10 value: 6.654 - type: precision_at_100 value: 1.113 - type: precision_at_1000 value: 0.146 - type: precision_at_3 value: 14.427999999999999 - type: precision_at_5 value: 10.498000000000001 - type: recall_at_1 value: 18.168 - type: recall_at_10 value: 48.443000000000005 - type: recall_at_100 value: 74.47 - type: recall_at_1000 value: 92.494 - type: recall_at_3 value: 33.379999999999995 - type: recall_at_5 value: 39.76 - task: type: Retrieval dataset: name: MTEB CQADupstackPhysicsRetrieval type: BeIR/cqadupstack config: default split: test revision: 79531abbd1fb92d06c6d6315a0cbbbf5bb247ea4 metrics: - type: map_at_1 value: 32.39 - type: map_at_10 value: 44.479 - type: map_at_100 value: 45.977000000000004 - type: map_at_1000 value: 46.087 - type: map_at_3 value: 40.976 - type: map_at_5 value: 43.038 - type: mrr_at_1 value: 40.135 - type: mrr_at_10 value: 50.160000000000004 - type: mrr_at_100 value: 51.052 - type: mrr_at_1000 value: 51.087 - type: mrr_at_3 value: 47.818 - type: mrr_at_5 value: 49.171 - type: ndcg_at_1 value: 40.135 - type: ndcg_at_10 value: 50.731 - type: ndcg_at_100 value: 56.452000000000005 - type: ndcg_at_1000 value: 58.123000000000005 - type: ndcg_at_3 value: 45.507 - type: ndcg_at_5 value: 48.11 - type: precision_at_1 value: 40.135 - type: precision_at_10 value: 9.192 - type: precision_at_100 value: 1.397 - type: precision_at_1000 value: 0.169 - type: precision_at_3 value: 21.816 - type: precision_at_5 value: 15.476 - type: recall_at_1 value: 32.39 - type: recall_at_10 value: 63.597 - type: recall_at_100 value: 86.737 - type: recall_at_1000 value: 97.039 - type: recall_at_3 value: 48.906 - type: recall_at_5 value: 55.659000000000006 - task: type: Retrieval dataset: name: MTEB CQADupstackProgrammersRetrieval type: BeIR/cqadupstack config: default split: test revision: 6184bc1440d2dbc7612be22b50686b8826d22b32 metrics: - type: map_at_1 value: 28.397 - type: map_at_10 value: 39.871 - type: map_at_100 value: 41.309000000000005 - type: map_at_1000 value: 41.409 - type: map_at_3 value: 36.047000000000004 - type: map_at_5 value: 38.104 - type: mrr_at_1 value: 34.703 - type: mrr_at_10 value: 44.773 - type: mrr_at_100 value: 45.64 - type: mrr_at_1000 value: 45.678999999999995 - type: mrr_at_3 value: 41.705 - type: mrr_at_5 value: 43.406 - type: ndcg_at_1 value: 34.703 - type: ndcg_at_10 value: 46.271 - type: ndcg_at_100 value: 52.037 - type: ndcg_at_1000 value: 53.81700000000001 - type: ndcg_at_3 value: 39.966 - type: ndcg_at_5 value: 42.801 - type: precision_at_1 value: 34.703 - type: precision_at_10 value: 8.744 - type: precision_at_100 value: 1.348 - type: precision_at_1000 value: 0.167 - type: precision_at_3 value: 19.102 - type: precision_at_5 value: 13.836 - type: recall_at_1 value: 28.397 - type: recall_at_10 value: 60.299 - type: recall_at_100 value: 84.595 - type: recall_at_1000 value: 96.155 - type: recall_at_3 value: 43.065 - type: recall_at_5 value: 50.371 - task: type: Retrieval dataset: name: MTEB CQADupstackRetrieval type: BeIR/cqadupstack config: default split: test revision: 4ffe81d471b1924886b33c7567bfb200e9eec5c4 metrics: - type: map_at_1 value: 28.044333333333338 - type: map_at_10 value: 38.78691666666666 - type: map_at_100 value: 40.113 - type: map_at_1000 value: 40.22125 - type: map_at_3 value: 35.52966666666667 - type: map_at_5 value: 37.372749999999996 - type: mrr_at_1 value: 33.159083333333335 - type: mrr_at_10 value: 42.913583333333335 - type: mrr_at_100 value: 43.7845 - type: mrr_at_1000 value: 43.830333333333336 - type: mrr_at_3 value: 40.29816666666667 - type: mrr_at_5 value: 41.81366666666667 - type: ndcg_at_1 value: 33.159083333333335 - type: ndcg_at_10 value: 44.75750000000001 - type: ndcg_at_100 value: 50.13658333333334 - type: ndcg_at_1000 value: 52.037 - type: ndcg_at_3 value: 39.34258333333334 - type: ndcg_at_5 value: 41.93708333333333 - type: precision_at_1 value: 33.159083333333335 - type: precision_at_10 value: 7.952416666666667 - type: precision_at_100 value: 1.2571666666666668 - type: precision_at_1000 value: 0.16099999999999998 - type: precision_at_3 value: 18.303833333333337 - type: precision_at_5 value: 13.057083333333333 - type: recall_at_1 value: 28.044333333333338 - type: recall_at_10 value: 58.237249999999996 - type: recall_at_100 value: 81.35391666666666 - type: recall_at_1000 value: 94.21283333333334 - type: recall_at_3 value: 43.32341666666667 - type: recall_at_5 value: 49.94908333333333 - type: map_at_1 value: 18.398 - type: map_at_10 value: 27.929 - type: map_at_100 value: 29.032999999999998 - type: map_at_1000 value: 29.126 - type: map_at_3 value: 25.070999999999998 - type: map_at_5 value: 26.583000000000002 - type: mrr_at_1 value: 19.963 - type: mrr_at_10 value: 29.997 - type: mrr_at_100 value: 30.9 - type: mrr_at_1000 value: 30.972 - type: mrr_at_3 value: 27.264 - type: mrr_at_5 value: 28.826 - type: ndcg_at_1 value: 19.963 - type: ndcg_at_10 value: 33.678999999999995 - type: ndcg_at_100 value: 38.931 - type: ndcg_at_1000 value: 41.379 - type: ndcg_at_3 value: 28.000000000000004 - type: ndcg_at_5 value: 30.637999999999998 - type: precision_at_1 value: 19.963 - type: precision_at_10 value: 5.7299999999999995 - type: precision_at_100 value: 0.902 - type: precision_at_1000 value: 0.122 - type: precision_at_3 value: 12.631 - type: precision_at_5 value: 9.057 - type: recall_at_1 value: 18.398 - type: recall_at_10 value: 49.254 - type: recall_at_100 value: 73.182 - type: recall_at_1000 value: 91.637 - type: recall_at_3 value: 34.06 - type: recall_at_5 value: 40.416000000000004 - task: type: Retrieval dataset: name: MTEB CQADupstackStatsRetrieval type: BeIR/cqadupstack config: default split: test revision: 65ac3a16b8e91f9cee4c9828cc7c335575432a2a metrics: - type: map_at_1 value: 27.838 - type: map_at_10 value: 36.04 - type: map_at_100 value: 37.113 - type: map_at_1000 value: 37.204 - type: map_at_3 value: 33.585 - type: map_at_5 value: 34.845 - type: mrr_at_1 value: 30.982 - type: mrr_at_10 value: 39.105000000000004 - type: mrr_at_100 value: 39.98 - type: mrr_at_1000 value: 40.042 - type: mrr_at_3 value: 36.912 - type: mrr_at_5 value: 38.062000000000005 - type: ndcg_at_1 value: 30.982 - type: ndcg_at_10 value: 40.982 - type: ndcg_at_100 value: 46.092 - type: ndcg_at_1000 value: 48.25 - type: ndcg_at_3 value: 36.41 - type: ndcg_at_5 value: 38.379999999999995 - type: precision_at_1 value: 30.982 - type: precision_at_10 value: 6.534 - type: precision_at_100 value: 0.9820000000000001 - type: precision_at_1000 value: 0.124 - type: precision_at_3 value: 15.745999999999999 - type: precision_at_5 value: 10.828 - type: recall_at_1 value: 27.838 - type: recall_at_10 value: 52.971000000000004 - type: recall_at_100 value: 76.357 - type: recall_at_1000 value: 91.973 - type: recall_at_3 value: 40.157 - type: recall_at_5 value: 45.147999999999996 - task: type: Retrieval dataset: name: MTEB CQADupstackTexRetrieval type: BeIR/cqadupstack config: default split: test revision: 46989137a86843e03a6195de44b09deda022eec7 metrics: - type: map_at_1 value: 19.059 - type: map_at_10 value: 27.454 - type: map_at_100 value: 28.736 - type: map_at_1000 value: 28.865000000000002 - type: map_at_3 value: 24.773999999999997 - type: map_at_5 value: 26.266000000000002 - type: mrr_at_1 value: 23.125 - type: mrr_at_10 value: 31.267 - type: mrr_at_100 value: 32.32 - type: mrr_at_1000 value: 32.394 - type: mrr_at_3 value: 28.894 - type: mrr_at_5 value: 30.281000000000002 - type: ndcg_at_1 value: 23.125 - type: ndcg_at_10 value: 32.588 - type: ndcg_at_100 value: 38.432 - type: ndcg_at_1000 value: 41.214 - type: ndcg_at_3 value: 27.938000000000002 - type: ndcg_at_5 value: 30.127 - type: precision_at_1 value: 23.125 - type: precision_at_10 value: 5.9639999999999995 - type: precision_at_100 value: 1.047 - type: precision_at_1000 value: 0.148 - type: precision_at_3 value: 13.294 - type: precision_at_5 value: 9.628 - type: recall_at_1 value: 19.059 - type: recall_at_10 value: 44.25 - type: recall_at_100 value: 69.948 - type: recall_at_1000 value: 89.35300000000001 - type: recall_at_3 value: 31.114000000000004 - type: recall_at_5 value: 36.846000000000004 - task: type: Retrieval dataset: name: MTEB CQADupstackUnixRetrieval type: BeIR/cqadupstack config: default split: test revision: 6c6430d3a6d36f8d2a829195bc5dc94d7e063e53 metrics: - type: map_at_1 value: 28.355999999999998 - type: map_at_10 value: 39.055 - type: map_at_100 value: 40.486 - type: map_at_1000 value: 40.571 - type: map_at_3 value: 35.69 - type: map_at_5 value: 37.605 - type: mrr_at_1 value: 33.302 - type: mrr_at_10 value: 42.986000000000004 - type: mrr_at_100 value: 43.957 - type: mrr_at_1000 value: 43.996 - type: mrr_at_3 value: 40.111999999999995 - type: mrr_at_5 value: 41.735 - type: ndcg_at_1 value: 33.302 - type: ndcg_at_10 value: 44.962999999999994 - type: ndcg_at_100 value: 50.917 - type: ndcg_at_1000 value: 52.622 - type: ndcg_at_3 value: 39.182 - type: ndcg_at_5 value: 41.939 - type: precision_at_1 value: 33.302 - type: precision_at_10 value: 7.779999999999999 - type: precision_at_100 value: 1.203 - type: precision_at_1000 value: 0.145 - type: precision_at_3 value: 18.035 - type: precision_at_5 value: 12.873000000000001 - type: recall_at_1 value: 28.355999999999998 - type: recall_at_10 value: 58.782000000000004 - type: recall_at_100 value: 84.02199999999999 - type: recall_at_1000 value: 95.511 - type: recall_at_3 value: 43.126999999999995 - type: recall_at_5 value: 50.14999999999999 - task: type: Retrieval dataset: name: MTEB CQADupstackWebmastersRetrieval type: BeIR/cqadupstack config: default split: test revision: 160c094312a0e1facb97e55eeddb698c0abe3571 metrics: - type: map_at_1 value: 27.391 - type: map_at_10 value: 37.523 - type: map_at_100 value: 39.312000000000005 - type: map_at_1000 value: 39.54 - type: map_at_3 value: 34.231 - type: map_at_5 value: 36.062 - type: mrr_at_1 value: 32.016 - type: mrr_at_10 value: 41.747 - type: mrr_at_100 value: 42.812 - type: mrr_at_1000 value: 42.844 - type: mrr_at_3 value: 39.129999999999995 - type: mrr_at_5 value: 40.524 - type: ndcg_at_1 value: 32.016 - type: ndcg_at_10 value: 43.826 - type: ndcg_at_100 value: 50.373999999999995 - type: ndcg_at_1000 value: 52.318 - type: ndcg_at_3 value: 38.479 - type: ndcg_at_5 value: 40.944 - type: precision_at_1 value: 32.016 - type: precision_at_10 value: 8.280999999999999 - type: precision_at_100 value: 1.6760000000000002 - type: precision_at_1000 value: 0.25 - type: precision_at_3 value: 18.05 - type: precision_at_5 value: 13.083 - type: recall_at_1 value: 27.391 - type: recall_at_10 value: 56.928999999999995 - type: recall_at_100 value: 85.169 - type: recall_at_1000 value: 96.665 - type: recall_at_3 value: 42.264 - type: recall_at_5 value: 48.556 - task: type: Retrieval dataset: name: MTEB ClimateFEVER type: mteb/climate-fever config: default split: test revision: 47f2ac6acb640fc46020b02a5b59fdda04d39380 metrics: - type: map_at_1 value: 19.681 - type: map_at_10 value: 32.741 - type: map_at_100 value: 34.811 - type: map_at_1000 value: 35.003 - type: map_at_3 value: 27.697 - type: map_at_5 value: 30.372 - type: mrr_at_1 value: 44.951 - type: mrr_at_10 value: 56.34400000000001 - type: mrr_at_100 value: 56.961 - type: mrr_at_1000 value: 56.987 - type: mrr_at_3 value: 53.681 - type: mrr_at_5 value: 55.407 - type: ndcg_at_1 value: 44.951 - type: ndcg_at_10 value: 42.905 - type: ndcg_at_100 value: 49.95 - type: ndcg_at_1000 value: 52.917 - type: ndcg_at_3 value: 36.815 - type: ndcg_at_5 value: 38.817 - type: precision_at_1 value: 44.951 - type: precision_at_10 value: 12.989999999999998 - type: precision_at_100 value: 2.068 - type: precision_at_1000 value: 0.263 - type: precision_at_3 value: 27.275 - type: precision_at_5 value: 20.365 - type: recall_at_1 value: 19.681 - type: recall_at_10 value: 48.272999999999996 - type: recall_at_100 value: 71.87400000000001 - type: recall_at_1000 value: 87.929 - type: recall_at_3 value: 32.653999999999996 - type: recall_at_5 value: 39.364 - task: type: Retrieval dataset: name: MTEB DBPedia type: mteb/dbpedia config: default split: test revision: c0f706b76e590d620bd6618b3ca8efdd34e2d659 metrics: - type: map_at_1 value: 10.231 - type: map_at_10 value: 22.338 - type: map_at_100 value: 31.927 - type: map_at_1000 value: 33.87 - type: map_at_3 value: 15.559999999999999 - type: map_at_5 value: 18.239 - type: mrr_at_1 value: 75.0 - type: mrr_at_10 value: 81.303 - type: mrr_at_100 value: 81.523 - type: mrr_at_1000 value: 81.53 - type: mrr_at_3 value: 80.083 - type: mrr_at_5 value: 80.758 - type: ndcg_at_1 value: 64.625 - type: ndcg_at_10 value: 48.687000000000005 - type: ndcg_at_100 value: 52.791 - type: ndcg_at_1000 value: 60.041999999999994 - type: ndcg_at_3 value: 53.757999999999996 - type: ndcg_at_5 value: 50.76500000000001 - type: precision_at_1 value: 75.0 - type: precision_at_10 value: 38.3 - type: precision_at_100 value: 12.025 - type: precision_at_1000 value: 2.3970000000000002 - type: precision_at_3 value: 55.417 - type: precision_at_5 value: 47.5 - type: recall_at_1 value: 10.231 - type: recall_at_10 value: 27.697 - type: recall_at_100 value: 57.409 - type: recall_at_1000 value: 80.547 - type: recall_at_3 value: 16.668 - type: recall_at_5 value: 20.552 - task: type: Classification dataset: name: MTEB EmotionClassification type: mteb/emotion config: default split: test revision: 4f58c6b202a23cf9a4da393831edf4f9183cad37 metrics: - type: accuracy value: 61.365 - type: f1 value: 56.7540827912991 - task: type: Retrieval dataset: name: MTEB FEVER type: mteb/fever config: default split: test revision: bea83ef9e8fb933d90a2f1d5515737465d613e12 metrics: - type: map_at_1 value: 83.479 - type: map_at_10 value: 88.898 - type: map_at_100 value: 89.11 - type: map_at_1000 value: 89.12400000000001 - type: map_at_3 value: 88.103 - type: map_at_5 value: 88.629 - type: mrr_at_1 value: 89.934 - type: mrr_at_10 value: 93.91000000000001 - type: mrr_at_100 value: 93.937 - type: mrr_at_1000 value: 93.938 - type: mrr_at_3 value: 93.62700000000001 - type: mrr_at_5 value: 93.84599999999999 - type: ndcg_at_1 value: 89.934 - type: ndcg_at_10 value: 91.574 - type: ndcg_at_100 value: 92.238 - type: ndcg_at_1000 value: 92.45 - type: ndcg_at_3 value: 90.586 - type: ndcg_at_5 value: 91.16300000000001 - type: precision_at_1 value: 89.934 - type: precision_at_10 value: 10.555 - type: precision_at_100 value: 1.1159999999999999 - type: precision_at_1000 value: 0.11499999999999999 - type: precision_at_3 value: 33.588 - type: precision_at_5 value: 20.642 - type: recall_at_1 value: 83.479 - type: recall_at_10 value: 94.971 - type: recall_at_100 value: 97.397 - type: recall_at_1000 value: 98.666 - type: recall_at_3 value: 92.24799999999999 - type: recall_at_5 value: 93.797 - task: type: Retrieval dataset: name: MTEB FiQA2018 type: mteb/fiqa config: default split: test revision: 27a168819829fe9bcd655c2df245fb19452e8e06 metrics: - type: map_at_1 value: 27.16 - type: map_at_10 value: 45.593 - type: map_at_100 value: 47.762 - type: map_at_1000 value: 47.899 - type: map_at_3 value: 39.237 - type: map_at_5 value: 42.970000000000006 - type: mrr_at_1 value: 52.623 - type: mrr_at_10 value: 62.637 - type: mrr_at_100 value: 63.169 - type: mrr_at_1000 value: 63.185 - type: mrr_at_3 value: 59.928000000000004 - type: mrr_at_5 value: 61.702999999999996 - type: ndcg_at_1 value: 52.623 - type: ndcg_at_10 value: 54.701 - type: ndcg_at_100 value: 61.263 - type: ndcg_at_1000 value: 63.134 - type: ndcg_at_3 value: 49.265 - type: ndcg_at_5 value: 51.665000000000006 - type: precision_at_1 value: 52.623 - type: precision_at_10 value: 15.185 - type: precision_at_100 value: 2.202 - type: precision_at_1000 value: 0.254 - type: precision_at_3 value: 32.767 - type: precision_at_5 value: 24.722 - type: recall_at_1 value: 27.16 - type: recall_at_10 value: 63.309000000000005 - type: recall_at_100 value: 86.722 - type: recall_at_1000 value: 97.505 - type: recall_at_3 value: 45.045 - type: recall_at_5 value: 54.02400000000001 - task: type: Retrieval dataset: name: MTEB HotpotQA type: mteb/hotpotqa config: default split: test revision: ab518f4d6fcca38d87c25209f94beba119d02014 metrics: - type: map_at_1 value: 42.573 - type: map_at_10 value: 59.373 - type: map_at_100 value: 60.292 - type: map_at_1000 value: 60.358999999999995 - type: map_at_3 value: 56.159000000000006 - type: map_at_5 value: 58.123999999999995 - type: mrr_at_1 value: 85.14500000000001 - type: mrr_at_10 value: 89.25999999999999 - type: mrr_at_100 value: 89.373 - type: mrr_at_1000 value: 89.377 - type: mrr_at_3 value: 88.618 - type: mrr_at_5 value: 89.036 - type: ndcg_at_1 value: 85.14500000000001 - type: ndcg_at_10 value: 68.95 - type: ndcg_at_100 value: 71.95 - type: ndcg_at_1000 value: 73.232 - type: ndcg_at_3 value: 64.546 - type: ndcg_at_5 value: 66.945 - type: precision_at_1 value: 85.14500000000001 - type: precision_at_10 value: 13.865 - type: precision_at_100 value: 1.619 - type: precision_at_1000 value: 0.179 - type: precision_at_3 value: 39.703 - type: precision_at_5 value: 25.718000000000004 - type: recall_at_1 value: 42.573 - type: recall_at_10 value: 69.325 - type: recall_at_100 value: 80.932 - type: recall_at_1000 value: 89.446 - type: recall_at_3 value: 59.553999999999995 - type: recall_at_5 value: 64.294 - task: type: Classification dataset: name: MTEB ImdbClassification type: mteb/imdb config: default split: test revision: 3d86128a09e091d6018b6d26cad27f2739fc2db7 metrics: - type: accuracy value: 95.8336 - type: ap value: 93.78862962194073 - type: f1 value: 95.83192650728371 - task: type: Retrieval dataset: name: MTEB MSMARCO type: mteb/msmarco config: default split: dev revision: c5a29a104738b98a9e76336939199e264163d4a0 metrics: - type: map_at_1 value: 23.075000000000003 - type: map_at_10 value: 36.102000000000004 - type: map_at_100 value: 37.257 - type: map_at_1000 value: 37.3 - type: map_at_3 value: 32.144 - type: map_at_5 value: 34.359 - type: mrr_at_1 value: 23.711 - type: mrr_at_10 value: 36.671 - type: mrr_at_100 value: 37.763999999999996 - type: mrr_at_1000 value: 37.801 - type: mrr_at_3 value: 32.775 - type: mrr_at_5 value: 34.977000000000004 - type: ndcg_at_1 value: 23.711 - type: ndcg_at_10 value: 43.361 - type: ndcg_at_100 value: 48.839 - type: ndcg_at_1000 value: 49.88 - type: ndcg_at_3 value: 35.269 - type: ndcg_at_5 value: 39.224 - type: precision_at_1 value: 23.711 - type: precision_at_10 value: 6.866999999999999 - type: precision_at_100 value: 0.96 - type: precision_at_1000 value: 0.105 - type: precision_at_3 value: 15.096000000000002 - type: precision_at_5 value: 11.083 - type: recall_at_1 value: 23.075000000000003 - type: recall_at_10 value: 65.756 - type: recall_at_100 value: 90.88199999999999 - type: recall_at_1000 value: 98.739 - type: recall_at_3 value: 43.691 - type: recall_at_5 value: 53.15800000000001 - task: type: Classification dataset: name: MTEB MTOPDomainClassification (en) type: mteb/mtop_domain config: en split: test revision: d80d48c1eb48d3562165c59d59d0034df9fff0bf metrics: - type: accuracy value: 97.69493844049248 - type: f1 value: 97.55048089616261 - task: type: Classification dataset: name: MTEB MTOPIntentClassification (en) type: mteb/mtop_intent config: en split: test revision: ae001d0e6b1228650b7bd1c2c65fb50ad11a8aba metrics: - type: accuracy value: 88.75968992248062 - type: f1 value: 72.26321223399123 - task: type: Classification dataset: name: MTEB MassiveIntentClassification (en) type: mteb/amazon_massive_intent config: en split: test revision: 31efe3c427b0bae9c22cbb560b8f15491cc6bed7 metrics: - type: accuracy value: 82.40080699394754 - type: f1 value: 79.62590029057968 - task: type: Classification dataset: name: MTEB MassiveScenarioClassification (en) type: mteb/amazon_massive_scenario config: en split: test revision: 7d571f92784cd94a019292a1f45445077d0ef634 metrics: - type: accuracy value: 84.49562878278414 - type: f1 value: 84.0040193313333 - task: type: Clustering dataset: name: MTEB MedrxivClusteringP2P type: mteb/medrxiv-clustering-p2p config: default split: test revision: e7a26af6f3ae46b30dde8737f02c07b1505bcc73 metrics: - type: v_measure value: 39.386760057101945 - task: type: Clustering dataset: name: MTEB MedrxivClusteringS2S type: mteb/medrxiv-clustering-s2s config: default split: test revision: 35191c8c0dca72d8ff3efcd72aa802307d469663 metrics: - type: v_measure value: 37.89687154075537 - task: type: Reranking dataset: name: MTEB MindSmallReranking type: mteb/mind_small config: default split: test revision: 3bdac13927fdc888b903db93b2ffdbd90b295a69 metrics: - type: map value: 33.94151656057482 - type: mrr value: 35.32684700746953 - task: type: Retrieval dataset: name: MTEB NFCorpus type: mteb/nfcorpus config: default split: test revision: ec0fa4fe99da2ff19ca1214b7966684033a58814 metrics: - type: map_at_1 value: 6.239999999999999 - type: map_at_10 value: 14.862 - type: map_at_100 value: 18.955 - type: map_at_1000 value: 20.694000000000003 - type: map_at_3 value: 10.683 - type: map_at_5 value: 12.674 - type: mrr_at_1 value: 50.15500000000001 - type: mrr_at_10 value: 59.697 - type: mrr_at_100 value: 60.095 - type: mrr_at_1000 value: 60.129999999999995 - type: mrr_at_3 value: 58.35900000000001 - type: mrr_at_5 value: 58.839 - type: ndcg_at_1 value: 48.452 - type: ndcg_at_10 value: 39.341 - type: ndcg_at_100 value: 35.866 - type: ndcg_at_1000 value: 45.111000000000004 - type: ndcg_at_3 value: 44.527 - type: ndcg_at_5 value: 42.946 - type: precision_at_1 value: 50.15500000000001 - type: precision_at_10 value: 29.536 - type: precision_at_100 value: 9.142 - type: precision_at_1000 value: 2.2849999999999997 - type: precision_at_3 value: 41.899 - type: precision_at_5 value: 37.647000000000006 - type: recall_at_1 value: 6.239999999999999 - type: recall_at_10 value: 19.278000000000002 - type: recall_at_100 value: 36.074 - type: recall_at_1000 value: 70.017 - type: recall_at_3 value: 12.066 - type: recall_at_5 value: 15.254000000000001 - task: type: Retrieval dataset: name: MTEB NQ type: mteb/nq config: default split: test revision: b774495ed302d8c44a3a7ea25c90dbce03968f31 metrics: - type: map_at_1 value: 39.75 - type: map_at_10 value: 56.443 - type: map_at_100 value: 57.233999999999995 - type: map_at_1000 value: 57.249 - type: map_at_3 value: 52.032999999999994 - type: map_at_5 value: 54.937999999999995 - type: mrr_at_1 value: 44.728 - type: mrr_at_10 value: 58.939 - type: mrr_at_100 value: 59.489000000000004 - type: mrr_at_1000 value: 59.499 - type: mrr_at_3 value: 55.711999999999996 - type: mrr_at_5 value: 57.89 - type: ndcg_at_1 value: 44.728 - type: ndcg_at_10 value: 63.998999999999995 - type: ndcg_at_100 value: 67.077 - type: ndcg_at_1000 value: 67.40899999999999 - type: ndcg_at_3 value: 56.266000000000005 - type: ndcg_at_5 value: 60.88 - type: precision_at_1 value: 44.728 - type: precision_at_10 value: 10.09 - type: precision_at_100 value: 1.1809999999999998 - type: precision_at_1000 value: 0.121 - type: precision_at_3 value: 25.145 - type: precision_at_5 value: 17.822 - type: recall_at_1 value: 39.75 - type: recall_at_10 value: 84.234 - type: recall_at_100 value: 97.055 - type: recall_at_1000 value: 99.517 - type: recall_at_3 value: 64.851 - type: recall_at_5 value: 75.343 - task: type: Retrieval dataset: name: MTEB QuoraRetrieval type: mteb/quora config: default split: test revision: None metrics: - type: map_at_1 value: 72.085 - type: map_at_10 value: 86.107 - type: map_at_100 value: 86.727 - type: map_at_1000 value: 86.74 - type: map_at_3 value: 83.21 - type: map_at_5 value: 85.06 - type: mrr_at_1 value: 82.94 - type: mrr_at_10 value: 88.845 - type: mrr_at_100 value: 88.926 - type: mrr_at_1000 value: 88.927 - type: mrr_at_3 value: 87.993 - type: mrr_at_5 value: 88.62299999999999 - type: ndcg_at_1 value: 82.97 - type: ndcg_at_10 value: 89.645 - type: ndcg_at_100 value: 90.717 - type: ndcg_at_1000 value: 90.78 - type: ndcg_at_3 value: 86.99900000000001 - type: ndcg_at_5 value: 88.52600000000001 - type: precision_at_1 value: 82.97 - type: precision_at_10 value: 13.569 - type: precision_at_100 value: 1.539 - type: precision_at_1000 value: 0.157 - type: precision_at_3 value: 38.043 - type: precision_at_5 value: 24.992 - type: recall_at_1 value: 72.085 - type: recall_at_10 value: 96.262 - type: recall_at_100 value: 99.77000000000001 - type: recall_at_1000 value: 99.997 - type: recall_at_3 value: 88.652 - type: recall_at_5 value: 93.01899999999999 - task: type: Clustering dataset: name: MTEB RedditClustering type: mteb/reddit-clustering config: default split: test revision: 24640382cdbf8abc73003fb0fa6d111a705499eb metrics: - type: v_measure value: 55.82153952668092 - task: type: Clustering dataset: name: MTEB RedditClusteringP2P type: mteb/reddit-clustering-p2p config: default split: test revision: 282350215ef01743dc01b456c7f5241fa8937f16 metrics: - type: v_measure value: 62.094465801879295 - task: type: Retrieval dataset: name: MTEB SCIDOCS type: mteb/scidocs config: default split: test revision: None metrics: - type: map_at_1 value: 5.688 - type: map_at_10 value: 15.201999999999998 - type: map_at_100 value: 18.096 - type: map_at_1000 value: 18.481 - type: map_at_3 value: 10.734 - type: map_at_5 value: 12.94 - type: mrr_at_1 value: 28.000000000000004 - type: mrr_at_10 value: 41.101 - type: mrr_at_100 value: 42.202 - type: mrr_at_1000 value: 42.228 - type: mrr_at_3 value: 37.683 - type: mrr_at_5 value: 39.708 - type: ndcg_at_1 value: 28.000000000000004 - type: ndcg_at_10 value: 24.976000000000003 - type: ndcg_at_100 value: 35.129 - type: ndcg_at_1000 value: 40.77 - type: ndcg_at_3 value: 23.787 - type: ndcg_at_5 value: 20.816000000000003 - type: precision_at_1 value: 28.000000000000004 - type: precision_at_10 value: 13.04 - type: precision_at_100 value: 2.761 - type: precision_at_1000 value: 0.41000000000000003 - type: precision_at_3 value: 22.6 - type: precision_at_5 value: 18.52 - type: recall_at_1 value: 5.688 - type: recall_at_10 value: 26.43 - type: recall_at_100 value: 56.02 - type: recall_at_1000 value: 83.21 - type: recall_at_3 value: 13.752 - type: recall_at_5 value: 18.777 - task: type: STS dataset: name: MTEB SICK-R type: mteb/sickr-sts config: default split: test revision: a6ea5a8cab320b040a23452cc28066d9beae2cee metrics: - type: cos_sim_pearson value: 85.15084859283178 - type: cos_sim_spearman value: 80.49030614009419 - type: euclidean_pearson value: 81.84574978672468 - type: euclidean_spearman value: 79.89787150656818 - type: manhattan_pearson value: 81.63076538567131 - type: manhattan_spearman value: 79.69867352121841 - task: type: STS dataset: name: MTEB STS12 type: mteb/sts12-sts config: default split: test revision: a0d554a64d88156834ff5ae9920b964011b16384 metrics: - type: cos_sim_pearson value: 84.64097921490992 - type: cos_sim_spearman value: 77.25370084896514 - type: euclidean_pearson value: 82.71210826468788 - type: euclidean_spearman value: 78.50445584994826 - type: manhattan_pearson value: 82.92580164330298 - type: manhattan_spearman value: 78.69686891301019 - task: type: STS dataset: name: MTEB STS13 type: mteb/sts13-sts config: default split: test revision: 7e90230a92c190f1bf69ae9002b8cea547a64cca metrics: - type: cos_sim_pearson value: 87.24596417308994 - type: cos_sim_spearman value: 87.79454220555091 - type: euclidean_pearson value: 87.40242561671164 - type: euclidean_spearman value: 88.25955597373556 - type: manhattan_pearson value: 87.25160240485849 - type: manhattan_spearman value: 88.155794979818 - task: type: STS dataset: name: MTEB STS14 type: mteb/sts14-sts config: default split: test revision: 6031580fec1f6af667f0bd2da0a551cf4f0b2375 metrics: - type: cos_sim_pearson value: 84.44914233422564 - type: cos_sim_spearman value: 82.91015471820322 - type: euclidean_pearson value: 84.7206656630327 - type: euclidean_spearman value: 83.86408872059216 - type: manhattan_pearson value: 84.72816725158454 - type: manhattan_spearman value: 84.01603388572788 - task: type: STS dataset: name: MTEB STS15 type: mteb/sts15-sts config: default split: test revision: ae752c7c21bf194d8b67fd573edf7ae58183cbe3 metrics: - type: cos_sim_pearson value: 87.6168026237477 - type: cos_sim_spearman value: 88.45414278092397 - type: euclidean_pearson value: 88.57023240882022 - type: euclidean_spearman value: 89.04102190922094 - type: manhattan_pearson value: 88.66695535796354 - type: manhattan_spearman value: 89.19898476680969 - task: type: STS dataset: name: MTEB STS16 type: mteb/sts16-sts config: default split: test revision: 4d8694f8f0e0100860b497b999b3dbed754a0513 metrics: - type: cos_sim_pearson value: 84.27925826089424 - type: cos_sim_spearman value: 85.45291099550461 - type: euclidean_pearson value: 83.63853036580834 - type: euclidean_spearman value: 84.33468035821484 - type: manhattan_pearson value: 83.72778773251596 - type: manhattan_spearman value: 84.51583132445376 - task: type: STS dataset: name: MTEB STS17 (en-en) type: mteb/sts17-crosslingual-sts config: en-en split: test revision: af5e6fb845001ecf41f4c1e033ce921939a2a68d metrics: - type: cos_sim_pearson value: 89.67375185692552 - type: cos_sim_spearman value: 90.32542469203855 - type: euclidean_pearson value: 89.63513717951847 - type: euclidean_spearman value: 89.87760271003745 - type: manhattan_pearson value: 89.28381452982924 - type: manhattan_spearman value: 89.53568197785721 - task: type: STS dataset: name: MTEB STS22 (en) type: mteb/sts22-crosslingual-sts config: en split: test revision: eea2b4fe26a775864c896887d910b76a8098ad3f metrics: - type: cos_sim_pearson value: 66.24644693819846 - type: cos_sim_spearman value: 66.09889420525377 - type: euclidean_pearson value: 63.72551583520747 - type: euclidean_spearman value: 63.01385470780679 - type: manhattan_pearson value: 64.09258157214097 - type: manhattan_spearman value: 63.080517752822594 - task: type: STS dataset: name: MTEB STSBenchmark type: mteb/stsbenchmark-sts config: default split: test revision: b0fddb56ed78048fa8b90373c8a3cfc37b684831 metrics: - type: cos_sim_pearson value: 86.27321463839989 - type: cos_sim_spearman value: 86.37572865993327 - type: euclidean_pearson value: 86.36268020198149 - type: euclidean_spearman value: 86.31089339478922 - type: manhattan_pearson value: 86.4260445761947 - type: manhattan_spearman value: 86.45885895320457 - task: type: Reranking dataset: name: MTEB SciDocsRR type: mteb/scidocs-reranking config: default split: test revision: d3c5e1fc0b855ab6097bf1cda04dd73947d7caab metrics: - type: map value: 86.52456702387798 - type: mrr value: 96.34556529164372 - task: type: Retrieval dataset: name: MTEB SciFact type: mteb/scifact config: default split: test revision: 0228b52cf27578f30900b9e5271d331663a030d7 metrics: - type: map_at_1 value: 61.99400000000001 - type: map_at_10 value: 73.38799999999999 - type: map_at_100 value: 73.747 - type: map_at_1000 value: 73.75 - type: map_at_3 value: 70.04599999999999 - type: map_at_5 value: 72.095 - type: mrr_at_1 value: 65.0 - type: mrr_at_10 value: 74.42800000000001 - type: mrr_at_100 value: 74.722 - type: mrr_at_1000 value: 74.725 - type: mrr_at_3 value: 72.056 - type: mrr_at_5 value: 73.60600000000001 - type: ndcg_at_1 value: 65.0 - type: ndcg_at_10 value: 78.435 - type: ndcg_at_100 value: 79.922 - type: ndcg_at_1000 value: 80.00500000000001 - type: ndcg_at_3 value: 73.05199999999999 - type: ndcg_at_5 value: 75.98 - type: precision_at_1 value: 65.0 - type: precision_at_10 value: 10.5 - type: precision_at_100 value: 1.123 - type: precision_at_1000 value: 0.11299999999999999 - type: precision_at_3 value: 28.555999999999997 - type: precision_at_5 value: 19.0 - type: recall_at_1 value: 61.99400000000001 - type: recall_at_10 value: 92.72200000000001 - type: recall_at_100 value: 99.333 - type: recall_at_1000 value: 100.0 - type: recall_at_3 value: 78.739 - type: recall_at_5 value: 85.828 - task: type: PairClassification dataset: name: MTEB SprintDuplicateQuestions type: mteb/sprintduplicatequestions-pairclassification config: default split: test revision: d66bd1f72af766a5cc4b0ca5e00c162f89e8cc46 metrics: - type: cos_sim_accuracy value: 99.79009900990098 - type: cos_sim_ap value: 95.3203137438653 - type: cos_sim_f1 value: 89.12386706948641 - type: cos_sim_precision value: 89.75659229208925 - type: cos_sim_recall value: 88.5 - type: dot_accuracy value: 99.67821782178218 - type: dot_ap value: 89.94069840000675 - type: dot_f1 value: 83.45902463549521 - type: dot_precision value: 83.9231547017189 - type: dot_recall value: 83.0 - type: euclidean_accuracy value: 99.78613861386138 - type: euclidean_ap value: 95.10648259135526 - type: euclidean_f1 value: 88.77338877338877 - type: euclidean_precision value: 92.42424242424242 - type: euclidean_recall value: 85.39999999999999 - type: manhattan_accuracy value: 99.7950495049505 - type: manhattan_ap value: 95.29987661320946 - type: manhattan_f1 value: 89.21313183949972 - type: manhattan_precision value: 93.14472252448314 - type: manhattan_recall value: 85.6 - type: max_accuracy value: 99.7950495049505 - type: max_ap value: 95.3203137438653 - type: max_f1 value: 89.21313183949972 - task: type: Clustering dataset: name: MTEB StackExchangeClustering type: mteb/stackexchange-clustering config: default split: test revision: 6cbc1f7b2bc0622f2e39d2c77fa502909748c259 metrics: - type: v_measure value: 67.65446577183913 - task: type: Clustering dataset: name: MTEB StackExchangeClusteringP2P type: mteb/stackexchange-clustering-p2p config: default split: test revision: 815ca46b2622cec33ccafc3735d572c266efdb44 metrics: - type: v_measure value: 46.30749237193961 - task: type: Reranking dataset: name: MTEB StackOverflowDupQuestions type: mteb/stackoverflowdupquestions-reranking config: default split: test revision: e185fbe320c72810689fc5848eb6114e1ef5ec69 metrics: - type: map value: 54.91481849959949 - type: mrr value: 55.853506175197346 - task: type: Summarization dataset: name: MTEB SummEval type: mteb/summeval config: default split: test revision: cda12ad7615edc362dbf25a00fdd61d3b1eaf93c metrics: - type: cos_sim_pearson value: 30.08196549170419 - type: cos_sim_spearman value: 31.16661390597077 - type: dot_pearson value: 29.892258410943466 - type: dot_spearman value: 30.51328811965085 - task: type: Retrieval dataset: name: MTEB TRECCOVID type: mteb/trec-covid config: default split: test revision: None metrics: - type: map_at_1 value: 0.23900000000000002 - type: map_at_10 value: 2.173 - type: map_at_100 value: 14.24 - type: map_at_1000 value: 35.309000000000005 - type: map_at_3 value: 0.7100000000000001 - type: map_at_5 value: 1.163 - type: mrr_at_1 value: 92.0 - type: mrr_at_10 value: 96.0 - type: mrr_at_100 value: 96.0 - type: mrr_at_1000 value: 96.0 - type: mrr_at_3 value: 96.0 - type: mrr_at_5 value: 96.0 - type: ndcg_at_1 value: 90.0 - type: ndcg_at_10 value: 85.382 - type: ndcg_at_100 value: 68.03 - type: ndcg_at_1000 value: 61.021 - type: ndcg_at_3 value: 89.765 - type: ndcg_at_5 value: 88.444 - type: precision_at_1 value: 92.0 - type: precision_at_10 value: 88.0 - type: precision_at_100 value: 70.02000000000001 - type: precision_at_1000 value: 26.984 - type: precision_at_3 value: 94.0 - type: precision_at_5 value: 92.80000000000001 - type: recall_at_1 value: 0.23900000000000002 - type: recall_at_10 value: 2.313 - type: recall_at_100 value: 17.049 - type: recall_at_1000 value: 57.489999999999995 - type: recall_at_3 value: 0.737 - type: recall_at_5 value: 1.221 - task: type: Retrieval dataset: name: MTEB Touche2020 type: mteb/touche2020 config: default split: test revision: a34f9a33db75fa0cbb21bb5cfc3dae8dc8bec93f metrics: - type: map_at_1 value: 2.75 - type: map_at_10 value: 11.29 - type: map_at_100 value: 18.032999999999998 - type: map_at_1000 value: 19.746 - type: map_at_3 value: 6.555 - type: map_at_5 value: 8.706999999999999 - type: mrr_at_1 value: 34.694 - type: mrr_at_10 value: 50.55 - type: mrr_at_100 value: 51.659 - type: mrr_at_1000 value: 51.659 - type: mrr_at_3 value: 47.278999999999996 - type: mrr_at_5 value: 49.728 - type: ndcg_at_1 value: 32.653 - type: ndcg_at_10 value: 27.894000000000002 - type: ndcg_at_100 value: 39.769 - type: ndcg_at_1000 value: 51.495999999999995 - type: ndcg_at_3 value: 32.954 - type: ndcg_at_5 value: 31.502999999999997 - type: precision_at_1 value: 34.694 - type: precision_at_10 value: 23.265 - type: precision_at_100 value: 7.898 - type: precision_at_1000 value: 1.58 - type: precision_at_3 value: 34.694 - type: precision_at_5 value: 31.429000000000002 - type: recall_at_1 value: 2.75 - type: recall_at_10 value: 16.953 - type: recall_at_100 value: 48.68 - type: recall_at_1000 value: 85.18599999999999 - type: recall_at_3 value: 7.710999999999999 - type: recall_at_5 value: 11.484 - task: type: Classification dataset: name: MTEB ToxicConversationsClassification type: mteb/toxic_conversations_50k config: default split: test revision: d7c0de2777da35d6aae2200a62c6e0e5af397c4c metrics: - type: accuracy value: 82.66099999999999 - type: ap value: 25.555698090238337 - type: f1 value: 66.48402012461622 - task: type: Classification dataset: name: MTEB TweetSentimentExtractionClassification type: mteb/tweet_sentiment_extraction config: default split: test revision: d604517c81ca91fe16a244d1248fc021f9ecee7a metrics: - type: accuracy value: 72.94567062818335 - type: f1 value: 73.28139189595674 - task: type: Clustering dataset: name: MTEB TwentyNewsgroupsClustering type: mteb/twentynewsgroups-clustering config: default split: test revision: 6125ec4e24fa026cec8a478383ee943acfbd5449 metrics: - type: v_measure value: 49.581627240203474 - task: type: PairClassification dataset: name: MTEB TwitterSemEval2015 type: mteb/twittersemeval2015-pairclassification config: default split: test revision: 70970daeab8776df92f5ea462b6173c0b46fd2d1 metrics: - type: cos_sim_accuracy value: 87.78089050485785 - type: cos_sim_ap value: 79.64487116574168 - type: cos_sim_f1 value: 72.46563021970964 - type: cos_sim_precision value: 70.62359128474831 - type: cos_sim_recall value: 74.40633245382587 - type: dot_accuracy value: 86.2609524944865 - type: dot_ap value: 75.513046857613 - type: dot_f1 value: 68.58213616489695 - type: dot_precision value: 65.12455516014235 - type: dot_recall value: 72.42744063324538 - type: euclidean_accuracy value: 87.6080348095607 - type: euclidean_ap value: 79.00204933649795 - type: euclidean_f1 value: 72.14495342605589 - type: euclidean_precision value: 69.85421299728193 - type: euclidean_recall value: 74.5910290237467 - type: manhattan_accuracy value: 87.59611372712642 - type: manhattan_ap value: 78.78523756706264 - type: manhattan_f1 value: 71.86499137718648 - type: manhattan_precision value: 67.39833641404806 - type: manhattan_recall value: 76.96569920844327 - type: max_accuracy value: 87.78089050485785 - type: max_ap value: 79.64487116574168 - type: max_f1 value: 72.46563021970964 - task: type: PairClassification dataset: name: MTEB TwitterURLCorpus type: mteb/twitterurlcorpus-pairclassification config: default split: test revision: 8b6510b0b1fa4e4c4f879467980e9be563ec1cdf metrics: - type: cos_sim_accuracy value: 89.98719292117825 - type: cos_sim_ap value: 87.58146137353202 - type: cos_sim_f1 value: 80.28543232369239 - type: cos_sim_precision value: 79.1735289714029 - type: cos_sim_recall value: 81.42901139513397 - type: dot_accuracy value: 88.9199363526992 - type: dot_ap value: 84.98499998630417 - type: dot_f1 value: 78.21951400757969 - type: dot_precision value: 75.58523624874336 - type: dot_recall value: 81.04404065291038 - type: euclidean_accuracy value: 89.77374160748244 - type: euclidean_ap value: 87.35151562835209 - type: euclidean_f1 value: 79.92160922940393 - type: euclidean_precision value: 76.88531587933979 - type: euclidean_recall value: 83.20757622420696 - type: manhattan_accuracy value: 89.72717041176699 - type: manhattan_ap value: 87.34065592142515 - type: manhattan_f1 value: 79.85603419187943 - type: manhattan_precision value: 77.82243332115455 - type: manhattan_recall value: 81.99876809362489 - type: max_accuracy value: 89.98719292117825 - type: max_ap value: 87.58146137353202 - type: max_f1 value: 80.28543232369239 - task: type: STS dataset: name: MTEB AFQMC type: C-MTEB/AFQMC config: default split: validation revision: b44c3b011063adb25877c13823db83bb193913c4 metrics: - type: cos_sim_pearson value: 53.45954203592337 - type: cos_sim_spearman value: 58.42154680418638 - type: euclidean_pearson value: 56.41543791722753 - type: euclidean_spearman value: 58.39328016640146 - type: manhattan_pearson value: 56.318510356833876 - type: manhattan_spearman value: 58.28423447818184 - task: type: STS dataset: name: MTEB ATEC type: C-MTEB/ATEC config: default split: test revision: 0f319b1142f28d00e055a6770f3f726ae9b7d865 metrics: - type: cos_sim_pearson value: 50.78356460675945 - type: cos_sim_spearman value: 55.6530411663269 - type: euclidean_pearson value: 56.50763660417816 - type: euclidean_spearman value: 55.733823335669065 - type: manhattan_pearson value: 56.45323093512866 - type: manhattan_spearman value: 55.63248619032702 - task: type: Classification dataset: name: MTEB AmazonReviewsClassification (zh) type: mteb/amazon_reviews_multi config: zh split: test revision: 1399c76144fd37290681b995c656ef9b2e06e26d metrics: - type: accuracy value: 47.209999999999994 - type: f1 value: 46.08892432018655 - task: type: STS dataset: name: MTEB BQ type: C-MTEB/BQ config: default split: test revision: e3dda5e115e487b39ec7e618c0c6a29137052a55 metrics: - type: cos_sim_pearson value: 70.25573992001478 - type: cos_sim_spearman value: 73.85247134951433 - type: euclidean_pearson value: 72.60033082168442 - type: euclidean_spearman value: 73.72445893756499 - type: manhattan_pearson value: 72.59932284620231 - type: manhattan_spearman value: 73.68002490614583 - task: type: Clustering dataset: name: MTEB CLSClusteringP2P type: C-MTEB/CLSClusteringP2P config: default split: test revision: 4b6227591c6c1a73bc76b1055f3b7f3588e72476 metrics: - type: v_measure value: 45.21317724305628 - task: type: Clustering dataset: name: MTEB CLSClusteringS2S type: C-MTEB/CLSClusteringS2S config: default split: test revision: e458b3f5414b62b7f9f83499ac1f5497ae2e869f metrics: - type: v_measure value: 42.49825170976724 - task: type: Reranking dataset: name: MTEB CMedQAv1 type: C-MTEB/CMedQAv1-reranking config: default split: test revision: 8d7f1e942507dac42dc58017c1a001c3717da7df metrics: - type: map value: 88.15661686810597 - type: mrr value: 90.11222222222223 - task: type: Reranking dataset: name: MTEB CMedQAv2 type: C-MTEB/CMedQAv2-reranking config: default split: test revision: 23d186750531a14a0357ca22cd92d712fd512ea0 metrics: - type: map value: 88.1204726064383 - type: mrr value: 90.20142857142858 - task: type: Retrieval dataset: name: MTEB CmedqaRetrieval type: C-MTEB/CmedqaRetrieval config: default split: dev revision: cd540c506dae1cf9e9a59c3e06f42030d54e7301 metrics: - type: map_at_1 value: 27.224999999999998 - type: map_at_10 value: 40.169 - type: map_at_100 value: 42.0 - type: map_at_1000 value: 42.109 - type: map_at_3 value: 35.76 - type: map_at_5 value: 38.221 - type: mrr_at_1 value: 40.56 - type: mrr_at_10 value: 49.118 - type: mrr_at_100 value: 50.092999999999996 - type: mrr_at_1000 value: 50.133 - type: mrr_at_3 value: 46.507 - type: mrr_at_5 value: 47.973 - type: ndcg_at_1 value: 40.56 - type: ndcg_at_10 value: 46.972 - type: ndcg_at_100 value: 54.04 - type: ndcg_at_1000 value: 55.862 - type: ndcg_at_3 value: 41.36 - type: ndcg_at_5 value: 43.704 - type: precision_at_1 value: 40.56 - type: precision_at_10 value: 10.302999999999999 - type: precision_at_100 value: 1.606 - type: precision_at_1000 value: 0.184 - type: precision_at_3 value: 23.064 - type: precision_at_5 value: 16.764000000000003 - type: recall_at_1 value: 27.224999999999998 - type: recall_at_10 value: 58.05200000000001 - type: recall_at_100 value: 87.092 - type: recall_at_1000 value: 99.099 - type: recall_at_3 value: 41.373 - type: recall_at_5 value: 48.453 - task: type: PairClassification dataset: name: MTEB Cmnli type: C-MTEB/CMNLI config: default split: validation revision: 41bc36f332156f7adc9e38f53777c959b2ae9766 metrics: - type: cos_sim_accuracy value: 77.40228502705953 - type: cos_sim_ap value: 86.22359172956327 - type: cos_sim_f1 value: 78.96328293736501 - type: cos_sim_precision value: 73.36945615091311 - type: cos_sim_recall value: 85.48047696983868 - type: dot_accuracy value: 75.53818400481059 - type: dot_ap value: 83.70164011305312 - type: dot_f1 value: 77.67298719348754 - type: dot_precision value: 67.49482401656314 - type: dot_recall value: 91.46598082768296 - type: euclidean_accuracy value: 77.94347564642213 - type: euclidean_ap value: 86.4652108728609 - type: euclidean_f1 value: 79.15555555555555 - type: euclidean_precision value: 75.41816641964853 - type: euclidean_recall value: 83.28267477203647 - type: manhattan_accuracy value: 77.45039085989175 - type: manhattan_ap value: 86.09986583900665 - type: manhattan_f1 value: 78.93669264438988 - type: manhattan_precision value: 72.63261296660117 - type: manhattan_recall value: 86.43909282207154 - type: max_accuracy value: 77.94347564642213 - type: max_ap value: 86.4652108728609 - type: max_f1 value: 79.15555555555555 - task: type: Retrieval dataset: name: MTEB CovidRetrieval type: C-MTEB/CovidRetrieval config: default split: dev revision: 1271c7809071a13532e05f25fb53511ffce77117 metrics: - type: map_at_1 value: 69.336 - type: map_at_10 value: 77.16 - type: map_at_100 value: 77.47500000000001 - type: map_at_1000 value: 77.482 - type: map_at_3 value: 75.42999999999999 - type: map_at_5 value: 76.468 - type: mrr_at_1 value: 69.44200000000001 - type: mrr_at_10 value: 77.132 - type: mrr_at_100 value: 77.43299999999999 - type: mrr_at_1000 value: 77.44 - type: mrr_at_3 value: 75.395 - type: mrr_at_5 value: 76.459 - type: ndcg_at_1 value: 69.547 - type: ndcg_at_10 value: 80.794 - type: ndcg_at_100 value: 82.245 - type: ndcg_at_1000 value: 82.40899999999999 - type: ndcg_at_3 value: 77.303 - type: ndcg_at_5 value: 79.168 - type: precision_at_1 value: 69.547 - type: precision_at_10 value: 9.305 - type: precision_at_100 value: 0.9979999999999999 - type: precision_at_1000 value: 0.101 - type: precision_at_3 value: 27.749000000000002 - type: precision_at_5 value: 17.576 - type: recall_at_1 value: 69.336 - type: recall_at_10 value: 92.097 - type: recall_at_100 value: 98.736 - type: recall_at_1000 value: 100.0 - type: recall_at_3 value: 82.64 - type: recall_at_5 value: 87.144 - task: type: Retrieval dataset: name: MTEB DuRetrieval type: C-MTEB/DuRetrieval config: default split: dev revision: a1a333e290fe30b10f3f56498e3a0d911a693ced metrics: - type: map_at_1 value: 26.817999999999998 - type: map_at_10 value: 82.67 - type: map_at_100 value: 85.304 - type: map_at_1000 value: 85.334 - type: map_at_3 value: 57.336 - type: map_at_5 value: 72.474 - type: mrr_at_1 value: 91.45 - type: mrr_at_10 value: 94.272 - type: mrr_at_100 value: 94.318 - type: mrr_at_1000 value: 94.32000000000001 - type: mrr_at_3 value: 94.0 - type: mrr_at_5 value: 94.17699999999999 - type: ndcg_at_1 value: 91.45 - type: ndcg_at_10 value: 89.404 - type: ndcg_at_100 value: 91.724 - type: ndcg_at_1000 value: 91.973 - type: ndcg_at_3 value: 88.104 - type: ndcg_at_5 value: 87.25699999999999 - type: precision_at_1 value: 91.45 - type: precision_at_10 value: 42.585 - type: precision_at_100 value: 4.838 - type: precision_at_1000 value: 0.49 - type: precision_at_3 value: 78.8 - type: precision_at_5 value: 66.66 - type: recall_at_1 value: 26.817999999999998 - type: recall_at_10 value: 90.67 - type: recall_at_100 value: 98.36200000000001 - type: recall_at_1000 value: 99.583 - type: recall_at_3 value: 59.614999999999995 - type: recall_at_5 value: 77.05199999999999 - task: type: Retrieval dataset: name: MTEB EcomRetrieval type: C-MTEB/EcomRetrieval config: default split: dev revision: 687de13dc7294d6fd9be10c6945f9e8fec8166b9 metrics: - type: map_at_1 value: 47.699999999999996 - type: map_at_10 value: 57.589999999999996 - type: map_at_100 value: 58.226 - type: map_at_1000 value: 58.251 - type: map_at_3 value: 55.233 - type: map_at_5 value: 56.633 - type: mrr_at_1 value: 47.699999999999996 - type: mrr_at_10 value: 57.589999999999996 - type: mrr_at_100 value: 58.226 - type: mrr_at_1000 value: 58.251 - type: mrr_at_3 value: 55.233 - type: mrr_at_5 value: 56.633 - type: ndcg_at_1 value: 47.699999999999996 - type: ndcg_at_10 value: 62.505 - type: ndcg_at_100 value: 65.517 - type: ndcg_at_1000 value: 66.19800000000001 - type: ndcg_at_3 value: 57.643 - type: ndcg_at_5 value: 60.181 - type: precision_at_1 value: 47.699999999999996 - type: precision_at_10 value: 7.8 - type: precision_at_100 value: 0.919 - type: precision_at_1000 value: 0.097 - type: precision_at_3 value: 21.532999999999998 - type: precision_at_5 value: 14.16 - type: recall_at_1 value: 47.699999999999996 - type: recall_at_10 value: 78.0 - type: recall_at_100 value: 91.9 - type: recall_at_1000 value: 97.3 - type: recall_at_3 value: 64.60000000000001 - type: recall_at_5 value: 70.8 - task: type: Classification dataset: name: MTEB IFlyTek type: C-MTEB/IFlyTek-classification config: default split: validation revision: 421605374b29664c5fc098418fe20ada9bd55f8a metrics: - type: accuracy value: 44.84801846864178 - type: f1 value: 37.47347897956339 - task: type: Classification dataset: name: MTEB JDReview type: C-MTEB/JDReview-classification config: default split: test revision: b7c64bd89eb87f8ded463478346f76731f07bf8b metrics: - type: accuracy value: 85.81613508442777 - type: ap value: 52.68244615477374 - type: f1 value: 80.0445640948843 - task: type: STS dataset: name: MTEB LCQMC type: C-MTEB/LCQMC config: default split: test revision: 17f9b096f80380fce5ed12a9be8be7784b337daf metrics: - type: cos_sim_pearson value: 69.57786502217138 - type: cos_sim_spearman value: 75.39106054489906 - type: euclidean_pearson value: 73.72082954602402 - type: euclidean_spearman value: 75.14421475913619 - type: manhattan_pearson value: 73.62463076633642 - type: manhattan_spearman value: 75.01301565104112 - task: type: Reranking dataset: name: MTEB MMarcoReranking type: C-MTEB/Mmarco-reranking config: default split: dev revision: None metrics: - type: map value: 29.143797057999134 - type: mrr value: 28.08174603174603 - task: type: Retrieval dataset: name: MTEB MMarcoRetrieval type: C-MTEB/MMarcoRetrieval config: default split: dev revision: 539bbde593d947e2a124ba72651aafc09eb33fc2 metrics: - type: map_at_1 value: 70.492 - type: map_at_10 value: 79.501 - type: map_at_100 value: 79.728 - type: map_at_1000 value: 79.735 - type: map_at_3 value: 77.77 - type: map_at_5 value: 78.851 - type: mrr_at_1 value: 72.822 - type: mrr_at_10 value: 80.001 - type: mrr_at_100 value: 80.19 - type: mrr_at_1000 value: 80.197 - type: mrr_at_3 value: 78.484 - type: mrr_at_5 value: 79.42099999999999 - type: ndcg_at_1 value: 72.822 - type: ndcg_at_10 value: 83.013 - type: ndcg_at_100 value: 84.013 - type: ndcg_at_1000 value: 84.20400000000001 - type: ndcg_at_3 value: 79.728 - type: ndcg_at_5 value: 81.542 - type: precision_at_1 value: 72.822 - type: precision_at_10 value: 9.917 - type: precision_at_100 value: 1.042 - type: precision_at_1000 value: 0.106 - type: precision_at_3 value: 29.847 - type: precision_at_5 value: 18.871 - type: recall_at_1 value: 70.492 - type: recall_at_10 value: 93.325 - type: recall_at_100 value: 97.822 - type: recall_at_1000 value: 99.319 - type: recall_at_3 value: 84.636 - type: recall_at_5 value: 88.93100000000001 - task: type: Classification dataset: name: MTEB MassiveIntentClassification (zh-CN) type: mteb/amazon_massive_intent config: zh-CN split: test revision: 31efe3c427b0bae9c22cbb560b8f15491cc6bed7 metrics: - type: accuracy value: 76.88298587760592 - type: f1 value: 73.89001762017176 - task: type: Classification dataset: name: MTEB MassiveScenarioClassification (zh-CN) type: mteb/amazon_massive_scenario config: zh-CN split: test revision: 7d571f92784cd94a019292a1f45445077d0ef634 metrics: - type: accuracy value: 80.76328177538669 - type: f1 value: 80.24718532423358 - task: type: Retrieval dataset: name: MTEB MedicalRetrieval type: C-MTEB/MedicalRetrieval config: default split: dev revision: 2039188fb5800a9803ba5048df7b76e6fb151fc6 metrics: - type: map_at_1 value: 49.6 - type: map_at_10 value: 55.620999999999995 - type: map_at_100 value: 56.204 - type: map_at_1000 value: 56.251 - type: map_at_3 value: 54.132999999999996 - type: map_at_5 value: 54.933 - type: mrr_at_1 value: 49.7 - type: mrr_at_10 value: 55.67100000000001 - type: mrr_at_100 value: 56.254000000000005 - type: mrr_at_1000 value: 56.301 - type: mrr_at_3 value: 54.18300000000001 - type: mrr_at_5 value: 54.983000000000004 - type: ndcg_at_1 value: 49.6 - type: ndcg_at_10 value: 58.645 - type: ndcg_at_100 value: 61.789 - type: ndcg_at_1000 value: 63.219 - type: ndcg_at_3 value: 55.567 - type: ndcg_at_5 value: 57.008 - type: precision_at_1 value: 49.6 - type: precision_at_10 value: 6.819999999999999 - type: precision_at_100 value: 0.836 - type: precision_at_1000 value: 0.095 - type: precision_at_3 value: 19.900000000000002 - type: precision_at_5 value: 12.64 - type: recall_at_1 value: 49.6 - type: recall_at_10 value: 68.2 - type: recall_at_100 value: 83.6 - type: recall_at_1000 value: 95.3 - type: recall_at_3 value: 59.699999999999996 - type: recall_at_5 value: 63.2 - task: type: Classification dataset: name: MTEB MultilingualSentiment type: C-MTEB/MultilingualSentiment-classification config: default split: validation revision: 46958b007a63fdbf239b7672c25d0bea67b5ea1a metrics: - type: accuracy value: 74.45666666666666 - type: f1 value: 74.32582402190089 - task: type: PairClassification dataset: name: MTEB Ocnli type: C-MTEB/OCNLI config: default split: validation revision: 66e76a618a34d6d565d5538088562851e6daa7ec metrics: - type: cos_sim_accuracy value: 80.67135896047645 - type: cos_sim_ap value: 87.60421240712051 - type: cos_sim_f1 value: 82.1304131408661 - type: cos_sim_precision value: 77.68361581920904 - type: cos_sim_recall value: 87.11721224920802 - type: dot_accuracy value: 79.04710341093666 - type: dot_ap value: 85.6370059719336 - type: dot_f1 value: 80.763723150358 - type: dot_precision value: 73.69337979094077 - type: dot_recall value: 89.33474128827878 - type: euclidean_accuracy value: 81.05035192203573 - type: euclidean_ap value: 87.7880240053663 - type: euclidean_f1 value: 82.50244379276637 - type: euclidean_precision value: 76.7970882620564 - type: euclidean_recall value: 89.1235480464625 - type: manhattan_accuracy value: 80.61721710882512 - type: manhattan_ap value: 87.43568120591175 - type: manhattan_f1 value: 81.89526184538653 - type: manhattan_precision value: 77.5992438563327 - type: manhattan_recall value: 86.6948257655755 - type: max_accuracy value: 81.05035192203573 - type: max_ap value: 87.7880240053663 - type: max_f1 value: 82.50244379276637 - task: type: Classification dataset: name: MTEB OnlineShopping type: C-MTEB/OnlineShopping-classification config: default split: test revision: e610f2ebd179a8fda30ae534c3878750a96db120 metrics: - type: accuracy value: 93.5 - type: ap value: 91.31357903446782 - type: f1 value: 93.48088994006616 - task: type: STS dataset: name: MTEB PAWSX type: C-MTEB/PAWSX config: default split: test revision: 9c6a90e430ac22b5779fb019a23e820b11a8b5e1 metrics: - type: cos_sim_pearson value: 36.93293453538077 - type: cos_sim_spearman value: 42.45972506308574 - type: euclidean_pearson value: 42.34945133152159 - type: euclidean_spearman value: 42.331610303674644 - type: manhattan_pearson value: 42.31455070249498 - type: manhattan_spearman value: 42.19887982891834 - task: type: STS dataset: name: MTEB QBQTC type: C-MTEB/QBQTC config: default split: test revision: 790b0510dc52b1553e8c49f3d2afb48c0e5c48b7 metrics: - type: cos_sim_pearson value: 33.683290790043785 - type: cos_sim_spearman value: 35.149171171202994 - type: euclidean_pearson value: 32.33806561267862 - type: euclidean_spearman value: 34.483576387347966 - type: manhattan_pearson value: 32.47629754599608 - type: manhattan_spearman value: 34.66434471867615 - task: type: STS dataset: name: MTEB STS22 (zh) type: mteb/sts22-crosslingual-sts config: zh split: test revision: eea2b4fe26a775864c896887d910b76a8098ad3f metrics: - type: cos_sim_pearson value: 66.46322760516104 - type: cos_sim_spearman value: 67.398478319726 - type: euclidean_pearson value: 64.7223480293625 - type: euclidean_spearman value: 66.83118568812951 - type: manhattan_pearson value: 64.88440039828305 - type: manhattan_spearman value: 66.80429458952257 - task: type: STS dataset: name: MTEB STSB type: C-MTEB/STSB config: default split: test revision: 0cde68302b3541bb8b3c340dc0644b0b745b3dc0 metrics: - type: cos_sim_pearson value: 79.08991383232105 - type: cos_sim_spearman value: 79.39715677296854 - type: euclidean_pearson value: 78.63201279320496 - type: euclidean_spearman value: 79.40262660785731 - type: manhattan_pearson value: 78.98138363146906 - type: manhattan_spearman value: 79.79968413014194 - task: type: Reranking dataset: name: MTEB T2Reranking type: C-MTEB/T2Reranking config: default split: dev revision: 76631901a18387f85eaa53e5450019b87ad58ef9 metrics: - type: map value: 67.43289278789972 - type: mrr value: 77.53012460908535 - task: type: Retrieval dataset: name: MTEB T2Retrieval type: C-MTEB/T2Retrieval config: default split: dev revision: 8731a845f1bf500a4f111cf1070785c793d10e64 metrics: - type: map_at_1 value: 27.733999999999998 - type: map_at_10 value: 78.24799999999999 - type: map_at_100 value: 81.765 - type: map_at_1000 value: 81.824 - type: map_at_3 value: 54.92 - type: map_at_5 value: 67.61399999999999 - type: mrr_at_1 value: 90.527 - type: mrr_at_10 value: 92.843 - type: mrr_at_100 value: 92.927 - type: mrr_at_1000 value: 92.93 - type: mrr_at_3 value: 92.45100000000001 - type: mrr_at_5 value: 92.693 - type: ndcg_at_1 value: 90.527 - type: ndcg_at_10 value: 85.466 - type: ndcg_at_100 value: 88.846 - type: ndcg_at_1000 value: 89.415 - type: ndcg_at_3 value: 86.768 - type: ndcg_at_5 value: 85.46000000000001 - type: precision_at_1 value: 90.527 - type: precision_at_10 value: 42.488 - type: precision_at_100 value: 5.024 - type: precision_at_1000 value: 0.516 - type: precision_at_3 value: 75.907 - type: precision_at_5 value: 63.727000000000004 - type: recall_at_1 value: 27.733999999999998 - type: recall_at_10 value: 84.346 - type: recall_at_100 value: 95.536 - type: recall_at_1000 value: 98.42999999999999 - type: recall_at_3 value: 56.455 - type: recall_at_5 value: 70.755 - task: type: Classification dataset: name: MTEB TNews type: C-MTEB/TNews-classification config: default split: validation revision: 317f262bf1e6126357bbe89e875451e4b0938fe4 metrics: - type: accuracy value: 49.952000000000005 - type: f1 value: 48.264617195258054 - task: type: Clustering dataset: name: MTEB ThuNewsClusteringP2P type: C-MTEB/ThuNewsClusteringP2P config: default split: test revision: 5798586b105c0434e4f0fe5e767abe619442cf93 metrics: - type: v_measure value: 68.23769904483508 - task: type: Clustering dataset: name: MTEB ThuNewsClusteringS2S type: C-MTEB/ThuNewsClusteringS2S config: default split: test revision: 8a8b2caeda43f39e13c4bc5bea0f8a667896e10d metrics: - type: v_measure value: 62.50294403136556 - task: type: Retrieval dataset: name: MTEB VideoRetrieval type: C-MTEB/VideoRetrieval config: default split: dev revision: 58c2597a5943a2ba48f4668c3b90d796283c5639 metrics: - type: map_at_1 value: 54.0 - type: map_at_10 value: 63.668 - type: map_at_100 value: 64.217 - type: map_at_1000 value: 64.23100000000001 - type: map_at_3 value: 61.7 - type: map_at_5 value: 62.870000000000005 - type: mrr_at_1 value: 54.0 - type: mrr_at_10 value: 63.668 - type: mrr_at_100 value: 64.217 - type: mrr_at_1000 value: 64.23100000000001 - type: mrr_at_3 value: 61.7 - type: mrr_at_5 value: 62.870000000000005 - type: ndcg_at_1 value: 54.0 - type: ndcg_at_10 value: 68.11399999999999 - type: ndcg_at_100 value: 70.723 - type: ndcg_at_1000 value: 71.123 - type: ndcg_at_3 value: 64.074 - type: ndcg_at_5 value: 66.178 - type: precision_at_1 value: 54.0 - type: precision_at_10 value: 8.200000000000001 - type: precision_at_100 value: 0.941 - type: precision_at_1000 value: 0.097 - type: precision_at_3 value: 23.633000000000003 - type: precision_at_5 value: 15.2 - type: recall_at_1 value: 54.0 - type: recall_at_10 value: 82.0 - type: recall_at_100 value: 94.1 - type: recall_at_1000 value: 97.3 - type: recall_at_3 value: 70.89999999999999 - type: recall_at_5 value: 76.0 - task: type: Classification dataset: name: MTEB Waimai type: C-MTEB/waimai-classification config: default split: test revision: 339287def212450dcaa9df8c22bf93e9980c7023 metrics: - type: accuracy value: 86.63000000000001 - type: ap value: 69.99457882599567 - type: f1 value: 85.07735617998541 - task: type: Clustering dataset: name: MTEB 8TagsClustering type: PL-MTEB/8tags-clustering config: default split: test revision: None metrics: - type: v_measure value: 44.594104491193555 - task: type: Classification dataset: name: MTEB AllegroReviews type: PL-MTEB/allegro-reviews config: default split: test revision: None metrics: - type: accuracy value: 63.97614314115309 - type: f1 value: 52.15634261679283 - task: type: Retrieval dataset: name: MTEB ArguAna-PL type: clarin-knext/arguana-pl config: default split: test revision: 63fc86750af76253e8c760fc9e534bbf24d260a2 metrics: - type: map_at_1 value: 32.646 - type: map_at_10 value: 47.963 - type: map_at_100 value: 48.789 - type: map_at_1000 value: 48.797000000000004 - type: map_at_3 value: 43.196 - type: map_at_5 value: 46.016 - type: mrr_at_1 value: 33.073 - type: mrr_at_10 value: 48.126000000000005 - type: mrr_at_100 value: 48.946 - type: mrr_at_1000 value: 48.953 - type: mrr_at_3 value: 43.374 - type: mrr_at_5 value: 46.147 - type: ndcg_at_1 value: 32.646 - type: ndcg_at_10 value: 56.481 - type: ndcg_at_100 value: 59.922 - type: ndcg_at_1000 value: 60.07 - type: ndcg_at_3 value: 46.675 - type: ndcg_at_5 value: 51.76500000000001 - type: precision_at_1 value: 32.646 - type: precision_at_10 value: 8.371 - type: precision_at_100 value: 0.9860000000000001 - type: precision_at_1000 value: 0.1 - type: precision_at_3 value: 18.919 - type: precision_at_5 value: 13.825999999999999 - type: recall_at_1 value: 32.646 - type: recall_at_10 value: 83.71300000000001 - type: recall_at_100 value: 98.578 - type: recall_at_1000 value: 99.644 - type: recall_at_3 value: 56.757000000000005 - type: recall_at_5 value: 69.132 - task: type: Classification dataset: name: MTEB CBD type: PL-MTEB/cbd config: default split: test revision: None metrics: - type: accuracy value: 68.56 - type: ap value: 23.310493680488513 - type: f1 value: 58.85369533105693 - task: type: PairClassification dataset: name: MTEB CDSC-E type: PL-MTEB/cdsce-pairclassification config: default split: test revision: None metrics: - type: cos_sim_accuracy value: 88.5 - type: cos_sim_ap value: 72.42140924378361 - type: cos_sim_f1 value: 66.0919540229885 - type: cos_sim_precision value: 72.78481012658227 - type: cos_sim_recall value: 60.526315789473685 - type: dot_accuracy value: 88.5 - type: dot_ap value: 72.42140924378361 - type: dot_f1 value: 66.0919540229885 - type: dot_precision value: 72.78481012658227 - type: dot_recall value: 60.526315789473685 - type: euclidean_accuracy value: 88.5 - type: euclidean_ap value: 72.42140924378361 - type: euclidean_f1 value: 66.0919540229885 - type: euclidean_precision value: 72.78481012658227 - type: euclidean_recall value: 60.526315789473685 - type: manhattan_accuracy value: 88.5 - type: manhattan_ap value: 72.49745515311696 - type: manhattan_f1 value: 66.0968660968661 - type: manhattan_precision value: 72.04968944099379 - type: manhattan_recall value: 61.05263157894737 - type: max_accuracy value: 88.5 - type: max_ap value: 72.49745515311696 - type: max_f1 value: 66.0968660968661 - task: type: STS dataset: name: MTEB CDSC-R type: PL-MTEB/cdscr-sts config: default split: test revision: None metrics: - type: cos_sim_pearson value: 90.32269765590145 - type: cos_sim_spearman value: 89.73666311491672 - type: euclidean_pearson value: 88.2933868516544 - type: euclidean_spearman value: 89.73666311491672 - type: manhattan_pearson value: 88.33474590219448 - type: manhattan_spearman value: 89.8548364866583 - task: type: Retrieval dataset: name: MTEB DBPedia-PL type: clarin-knext/dbpedia-pl config: default split: test revision: 76afe41d9af165cc40999fcaa92312b8b012064a metrics: - type: map_at_1 value: 7.632999999999999 - type: map_at_10 value: 16.426 - type: map_at_100 value: 22.651 - type: map_at_1000 value: 24.372 - type: map_at_3 value: 11.706 - type: map_at_5 value: 13.529 - type: mrr_at_1 value: 60.75000000000001 - type: mrr_at_10 value: 68.613 - type: mrr_at_100 value: 69.001 - type: mrr_at_1000 value: 69.021 - type: mrr_at_3 value: 67.0 - type: mrr_at_5 value: 67.925 - type: ndcg_at_1 value: 49.875 - type: ndcg_at_10 value: 36.978 - type: ndcg_at_100 value: 40.031 - type: ndcg_at_1000 value: 47.566 - type: ndcg_at_3 value: 41.148 - type: ndcg_at_5 value: 38.702 - type: precision_at_1 value: 60.75000000000001 - type: precision_at_10 value: 29.7 - type: precision_at_100 value: 9.278 - type: precision_at_1000 value: 2.099 - type: precision_at_3 value: 44.0 - type: precision_at_5 value: 37.6 - type: recall_at_1 value: 7.632999999999999 - type: recall_at_10 value: 22.040000000000003 - type: recall_at_100 value: 44.024 - type: recall_at_1000 value: 67.848 - type: recall_at_3 value: 13.093 - type: recall_at_5 value: 15.973 - task: type: Retrieval dataset: name: MTEB FiQA-PL type: clarin-knext/fiqa-pl config: default split: test revision: 2e535829717f8bf9dc829b7f911cc5bbd4e6608e metrics: - type: map_at_1 value: 15.473 - type: map_at_10 value: 24.579 - type: map_at_100 value: 26.387 - type: map_at_1000 value: 26.57 - type: map_at_3 value: 21.278 - type: map_at_5 value: 23.179 - type: mrr_at_1 value: 30.709999999999997 - type: mrr_at_10 value: 38.994 - type: mrr_at_100 value: 39.993 - type: mrr_at_1000 value: 40.044999999999995 - type: mrr_at_3 value: 36.342999999999996 - type: mrr_at_5 value: 37.846999999999994 - type: ndcg_at_1 value: 30.709999999999997 - type: ndcg_at_10 value: 31.608999999999998 - type: ndcg_at_100 value: 38.807 - type: ndcg_at_1000 value: 42.208 - type: ndcg_at_3 value: 28.086 - type: ndcg_at_5 value: 29.323 - type: precision_at_1 value: 30.709999999999997 - type: precision_at_10 value: 8.688 - type: precision_at_100 value: 1.608 - type: precision_at_1000 value: 0.22100000000000003 - type: precision_at_3 value: 18.724 - type: precision_at_5 value: 13.950999999999999 - type: recall_at_1 value: 15.473 - type: recall_at_10 value: 38.361000000000004 - type: recall_at_100 value: 65.2 - type: recall_at_1000 value: 85.789 - type: recall_at_3 value: 25.401 - type: recall_at_5 value: 30.875999999999998 - task: type: Retrieval dataset: name: MTEB HotpotQA-PL type: clarin-knext/hotpotqa-pl config: default split: test revision: a0bd479ac97b4ccb5bd6ce320c415d0bb4beb907 metrics: - type: map_at_1 value: 38.096000000000004 - type: map_at_10 value: 51.44499999999999 - type: map_at_100 value: 52.325 - type: map_at_1000 value: 52.397000000000006 - type: map_at_3 value: 48.626999999999995 - type: map_at_5 value: 50.342 - type: mrr_at_1 value: 76.19200000000001 - type: mrr_at_10 value: 81.191 - type: mrr_at_100 value: 81.431 - type: mrr_at_1000 value: 81.443 - type: mrr_at_3 value: 80.30199999999999 - type: mrr_at_5 value: 80.85900000000001 - type: ndcg_at_1 value: 76.19200000000001 - type: ndcg_at_10 value: 60.9 - type: ndcg_at_100 value: 64.14699999999999 - type: ndcg_at_1000 value: 65.647 - type: ndcg_at_3 value: 56.818000000000005 - type: ndcg_at_5 value: 59.019999999999996 - type: precision_at_1 value: 76.19200000000001 - type: precision_at_10 value: 12.203 - type: precision_at_100 value: 1.478 - type: precision_at_1000 value: 0.168 - type: precision_at_3 value: 34.616 - type: precision_at_5 value: 22.515 - type: recall_at_1 value: 38.096000000000004 - type: recall_at_10 value: 61.013 - type: recall_at_100 value: 73.90299999999999 - type: recall_at_1000 value: 83.91 - type: recall_at_3 value: 51.92400000000001 - type: recall_at_5 value: 56.286 - task: type: Retrieval dataset: name: MTEB MSMARCO-PL type: clarin-knext/msmarco-pl config: default split: test revision: 8634c07806d5cce3a6138e260e59b81760a0a640 metrics: - type: map_at_1 value: 1.548 - type: map_at_10 value: 11.049000000000001 - type: map_at_100 value: 28.874 - type: map_at_1000 value: 34.931 - type: map_at_3 value: 4.162 - type: map_at_5 value: 6.396 - type: mrr_at_1 value: 90.69800000000001 - type: mrr_at_10 value: 92.093 - type: mrr_at_100 value: 92.345 - type: mrr_at_1000 value: 92.345 - type: mrr_at_3 value: 91.86 - type: mrr_at_5 value: 91.86 - type: ndcg_at_1 value: 74.031 - type: ndcg_at_10 value: 63.978 - type: ndcg_at_100 value: 53.101 - type: ndcg_at_1000 value: 60.675999999999995 - type: ndcg_at_3 value: 71.421 - type: ndcg_at_5 value: 68.098 - type: precision_at_1 value: 90.69800000000001 - type: precision_at_10 value: 71.86 - type: precision_at_100 value: 31.395 - type: precision_at_1000 value: 5.981 - type: precision_at_3 value: 84.49600000000001 - type: precision_at_5 value: 79.07 - type: recall_at_1 value: 1.548 - type: recall_at_10 value: 12.149000000000001 - type: recall_at_100 value: 40.794999999999995 - type: recall_at_1000 value: 67.974 - type: recall_at_3 value: 4.244 - type: recall_at_5 value: 6.608 - task: type: Classification dataset: name: MTEB MassiveIntentClassification (pl) type: mteb/amazon_massive_intent config: pl split: test revision: 31efe3c427b0bae9c22cbb560b8f15491cc6bed7 metrics: - type: accuracy value: 73.55413584398119 - type: f1 value: 69.65610882318181 - task: type: Classification dataset: name: MTEB MassiveScenarioClassification (pl) type: mteb/amazon_massive_scenario config: pl split: test revision: 7d571f92784cd94a019292a1f45445077d0ef634 metrics: - type: accuracy value: 76.37188971082716 - type: f1 value: 75.64847309941361 - task: type: Retrieval dataset: name: MTEB NFCorpus-PL type: clarin-knext/nfcorpus-pl config: default split: test revision: 9a6f9567fda928260afed2de480d79c98bf0bec0 metrics: - type: map_at_1 value: 4.919 - type: map_at_10 value: 10.834000000000001 - type: map_at_100 value: 13.38 - type: map_at_1000 value: 14.581 - type: map_at_3 value: 8.198 - type: map_at_5 value: 9.428 - type: mrr_at_1 value: 41.176 - type: mrr_at_10 value: 50.083 - type: mrr_at_100 value: 50.559 - type: mrr_at_1000 value: 50.604000000000006 - type: mrr_at_3 value: 47.936 - type: mrr_at_5 value: 49.407000000000004 - type: ndcg_at_1 value: 39.628 - type: ndcg_at_10 value: 30.098000000000003 - type: ndcg_at_100 value: 27.061 - type: ndcg_at_1000 value: 35.94 - type: ndcg_at_3 value: 35.135 - type: ndcg_at_5 value: 33.335 - type: precision_at_1 value: 41.176 - type: precision_at_10 value: 22.259999999999998 - type: precision_at_100 value: 6.712 - type: precision_at_1000 value: 1.9060000000000001 - type: precision_at_3 value: 33.23 - type: precision_at_5 value: 29.04 - type: recall_at_1 value: 4.919 - type: recall_at_10 value: 14.196 - type: recall_at_100 value: 26.948 - type: recall_at_1000 value: 59.211000000000006 - type: recall_at_3 value: 9.44 - type: recall_at_5 value: 11.569 - task: type: Retrieval dataset: name: MTEB NQ-PL type: clarin-knext/nq-pl config: default split: test revision: f171245712cf85dd4700b06bef18001578d0ca8d metrics: - type: map_at_1 value: 25.35 - type: map_at_10 value: 37.884 - type: map_at_100 value: 38.955 - type: map_at_1000 value: 39.007999999999996 - type: map_at_3 value: 34.239999999999995 - type: map_at_5 value: 36.398 - type: mrr_at_1 value: 28.737000000000002 - type: mrr_at_10 value: 39.973 - type: mrr_at_100 value: 40.844 - type: mrr_at_1000 value: 40.885 - type: mrr_at_3 value: 36.901 - type: mrr_at_5 value: 38.721 - type: ndcg_at_1 value: 28.708 - type: ndcg_at_10 value: 44.204 - type: ndcg_at_100 value: 48.978 - type: ndcg_at_1000 value: 50.33 - type: ndcg_at_3 value: 37.36 - type: ndcg_at_5 value: 40.912 - type: precision_at_1 value: 28.708 - type: precision_at_10 value: 7.367 - type: precision_at_100 value: 1.0030000000000001 - type: precision_at_1000 value: 0.11299999999999999 - type: precision_at_3 value: 17.034 - type: precision_at_5 value: 12.293999999999999 - type: recall_at_1 value: 25.35 - type: recall_at_10 value: 61.411 - type: recall_at_100 value: 82.599 - type: recall_at_1000 value: 92.903 - type: recall_at_3 value: 43.728 - type: recall_at_5 value: 51.854 - task: type: Classification dataset: name: MTEB PAC type: laugustyniak/abusive-clauses-pl config: default split: test revision: None metrics: - type: accuracy value: 69.04141326382856 - type: ap value: 77.49422763833996 - type: f1 value: 66.73472657783407 - task: type: PairClassification dataset: name: MTEB PPC type: PL-MTEB/ppc-pairclassification config: default split: test revision: None metrics: - type: cos_sim_accuracy value: 81.0 - type: cos_sim_ap value: 91.47194213011349 - type: cos_sim_f1 value: 84.73767885532592 - type: cos_sim_precision value: 81.49847094801224 - type: cos_sim_recall value: 88.24503311258279 - type: dot_accuracy value: 81.0 - type: dot_ap value: 91.47194213011349 - type: dot_f1 value: 84.73767885532592 - type: dot_precision value: 81.49847094801224 - type: dot_recall value: 88.24503311258279 - type: euclidean_accuracy value: 81.0 - type: euclidean_ap value: 91.47194213011349 - type: euclidean_f1 value: 84.73767885532592 - type: euclidean_precision value: 81.49847094801224 - type: euclidean_recall value: 88.24503311258279 - type: manhattan_accuracy value: 81.0 - type: manhattan_ap value: 91.46464475050571 - type: manhattan_f1 value: 84.48687350835321 - type: manhattan_precision value: 81.31699846860643 - type: manhattan_recall value: 87.91390728476821 - type: max_accuracy value: 81.0 - type: max_ap value: 91.47194213011349 - type: max_f1 value: 84.73767885532592 - task: type: PairClassification dataset: name: MTEB PSC type: PL-MTEB/psc-pairclassification config: default split: test revision: None metrics: - type: cos_sim_accuracy value: 97.6808905380334 - type: cos_sim_ap value: 99.27948611836348 - type: cos_sim_f1 value: 96.15975422427034 - type: cos_sim_precision value: 96.90402476780186 - type: cos_sim_recall value: 95.42682926829268 - type: dot_accuracy value: 97.6808905380334 - type: dot_ap value: 99.2794861183635 - type: dot_f1 value: 96.15975422427034 - type: dot_precision value: 96.90402476780186 - type: dot_recall value: 95.42682926829268 - type: euclidean_accuracy value: 97.6808905380334 - type: euclidean_ap value: 99.2794861183635 - type: euclidean_f1 value: 96.15975422427034 - type: euclidean_precision value: 96.90402476780186 - type: euclidean_recall value: 95.42682926829268 - type: manhattan_accuracy value: 97.6808905380334 - type: manhattan_ap value: 99.28715055268721 - type: manhattan_f1 value: 96.14791987673343 - type: manhattan_precision value: 97.19626168224299 - type: manhattan_recall value: 95.1219512195122 - type: max_accuracy value: 97.6808905380334 - type: max_ap value: 99.28715055268721 - type: max_f1 value: 96.15975422427034 - task: type: Classification dataset: name: MTEB PolEmo2.0-IN type: PL-MTEB/polemo2_in config: default split: test revision: None metrics: - type: accuracy value: 86.16343490304708 - type: f1 value: 83.3442579486744 - task: type: Classification dataset: name: MTEB PolEmo2.0-OUT type: PL-MTEB/polemo2_out config: default split: test revision: None metrics: - type: accuracy value: 68.40080971659918 - type: f1 value: 53.13720751142237 - task: type: Retrieval dataset: name: MTEB Quora-PL type: clarin-knext/quora-pl config: default split: test revision: 0be27e93455051e531182b85e85e425aba12e9d4 metrics: - type: map_at_1 value: 63.322 - type: map_at_10 value: 76.847 - type: map_at_100 value: 77.616 - type: map_at_1000 value: 77.644 - type: map_at_3 value: 73.624 - type: map_at_5 value: 75.603 - type: mrr_at_1 value: 72.88 - type: mrr_at_10 value: 80.376 - type: mrr_at_100 value: 80.604 - type: mrr_at_1000 value: 80.61 - type: mrr_at_3 value: 78.92 - type: mrr_at_5 value: 79.869 - type: ndcg_at_1 value: 72.89999999999999 - type: ndcg_at_10 value: 81.43 - type: ndcg_at_100 value: 83.394 - type: ndcg_at_1000 value: 83.685 - type: ndcg_at_3 value: 77.62599999999999 - type: ndcg_at_5 value: 79.656 - type: precision_at_1 value: 72.89999999999999 - type: precision_at_10 value: 12.548 - type: precision_at_100 value: 1.4869999999999999 - type: precision_at_1000 value: 0.155 - type: precision_at_3 value: 34.027 - type: precision_at_5 value: 22.654 - type: recall_at_1 value: 63.322 - type: recall_at_10 value: 90.664 - type: recall_at_100 value: 97.974 - type: recall_at_1000 value: 99.636 - type: recall_at_3 value: 80.067 - type: recall_at_5 value: 85.526 - task: type: Retrieval dataset: name: MTEB SCIDOCS-PL type: clarin-knext/scidocs-pl config: default split: test revision: 45452b03f05560207ef19149545f168e596c9337 metrics: - type: map_at_1 value: 3.95 - type: map_at_10 value: 9.658999999999999 - type: map_at_100 value: 11.384 - type: map_at_1000 value: 11.677 - type: map_at_3 value: 7.055 - type: map_at_5 value: 8.244 - type: mrr_at_1 value: 19.5 - type: mrr_at_10 value: 28.777 - type: mrr_at_100 value: 29.936 - type: mrr_at_1000 value: 30.009999999999998 - type: mrr_at_3 value: 25.55 - type: mrr_at_5 value: 27.284999999999997 - type: ndcg_at_1 value: 19.5 - type: ndcg_at_10 value: 16.589000000000002 - type: ndcg_at_100 value: 23.879 - type: ndcg_at_1000 value: 29.279 - type: ndcg_at_3 value: 15.719 - type: ndcg_at_5 value: 13.572000000000001 - type: precision_at_1 value: 19.5 - type: precision_at_10 value: 8.62 - type: precision_at_100 value: 1.924 - type: precision_at_1000 value: 0.322 - type: precision_at_3 value: 14.6 - type: precision_at_5 value: 11.78 - type: recall_at_1 value: 3.95 - type: recall_at_10 value: 17.477999999999998 - type: recall_at_100 value: 38.99 - type: recall_at_1000 value: 65.417 - type: recall_at_3 value: 8.883000000000001 - type: recall_at_5 value: 11.933 - task: type: PairClassification dataset: name: MTEB SICK-E-PL type: PL-MTEB/sicke-pl-pairclassification config: default split: test revision: None metrics: - type: cos_sim_accuracy value: 83.48960456583775 - type: cos_sim_ap value: 76.31522115825375 - type: cos_sim_f1 value: 70.35573122529645 - type: cos_sim_precision value: 70.9934735315446 - type: cos_sim_recall value: 69.72934472934473 - type: dot_accuracy value: 83.48960456583775 - type: dot_ap value: 76.31522115825373 - type: dot_f1 value: 70.35573122529645 - type: dot_precision value: 70.9934735315446 - type: dot_recall value: 69.72934472934473 - type: euclidean_accuracy value: 83.48960456583775 - type: euclidean_ap value: 76.31522115825373 - type: euclidean_f1 value: 70.35573122529645 - type: euclidean_precision value: 70.9934735315446 - type: euclidean_recall value: 69.72934472934473 - type: manhattan_accuracy value: 83.46922136159804 - type: manhattan_ap value: 76.18474601388084 - type: manhattan_f1 value: 70.34779490856937 - type: manhattan_precision value: 70.83032490974729 - type: manhattan_recall value: 69.87179487179486 - type: max_accuracy value: 83.48960456583775 - type: max_ap value: 76.31522115825375 - type: max_f1 value: 70.35573122529645 - task: type: STS dataset: name: MTEB SICK-R-PL type: PL-MTEB/sickr-pl-sts config: default split: test revision: None metrics: - type: cos_sim_pearson value: 77.95374883876302 - type: cos_sim_spearman value: 73.77630219171942 - type: euclidean_pearson value: 75.81927069594934 - type: euclidean_spearman value: 73.7763211303831 - type: manhattan_pearson value: 76.03126859057528 - type: manhattan_spearman value: 73.96528138013369 - task: type: STS dataset: name: MTEB STS22 (pl) type: mteb/sts22-crosslingual-sts config: pl split: test revision: eea2b4fe26a775864c896887d910b76a8098ad3f metrics: - type: cos_sim_pearson value: 37.388282764841826 - type: cos_sim_spearman value: 40.83477184710897 - type: euclidean_pearson value: 26.754737044177805 - type: euclidean_spearman value: 40.83477184710897 - type: manhattan_pearson value: 26.760453110872458 - type: manhattan_spearman value: 41.034477441383856 - task: type: Retrieval dataset: name: MTEB SciFact-PL type: clarin-knext/scifact-pl config: default split: test revision: 47932a35f045ef8ed01ba82bf9ff67f6e109207e metrics: - type: map_at_1 value: 49.15 - type: map_at_10 value: 61.690999999999995 - type: map_at_100 value: 62.348000000000006 - type: map_at_1000 value: 62.38 - type: map_at_3 value: 58.824 - type: map_at_5 value: 60.662000000000006 - type: mrr_at_1 value: 51.333 - type: mrr_at_10 value: 62.731 - type: mrr_at_100 value: 63.245 - type: mrr_at_1000 value: 63.275000000000006 - type: mrr_at_3 value: 60.667 - type: mrr_at_5 value: 61.93300000000001 - type: ndcg_at_1 value: 51.333 - type: ndcg_at_10 value: 67.168 - type: ndcg_at_100 value: 69.833 - type: ndcg_at_1000 value: 70.56700000000001 - type: ndcg_at_3 value: 62.40599999999999 - type: ndcg_at_5 value: 65.029 - type: precision_at_1 value: 51.333 - type: precision_at_10 value: 9.333 - type: precision_at_100 value: 1.0699999999999998 - type: precision_at_1000 value: 0.11299999999999999 - type: precision_at_3 value: 25.333 - type: precision_at_5 value: 17.067 - type: recall_at_1 value: 49.15 - type: recall_at_10 value: 82.533 - type: recall_at_100 value: 94.167 - type: recall_at_1000 value: 99.667 - type: recall_at_3 value: 69.917 - type: recall_at_5 value: 76.356 - task: type: Retrieval dataset: name: MTEB TRECCOVID-PL type: clarin-knext/trec-covid-pl config: default split: test revision: 81bcb408f33366c2a20ac54adafad1ae7e877fdd metrics: - type: map_at_1 value: 0.261 - type: map_at_10 value: 2.1260000000000003 - type: map_at_100 value: 12.171999999999999 - type: map_at_1000 value: 26.884999999999998 - type: map_at_3 value: 0.695 - type: map_at_5 value: 1.134 - type: mrr_at_1 value: 96.0 - type: mrr_at_10 value: 96.952 - type: mrr_at_100 value: 96.952 - type: mrr_at_1000 value: 96.952 - type: mrr_at_3 value: 96.667 - type: mrr_at_5 value: 96.667 - type: ndcg_at_1 value: 92.0 - type: ndcg_at_10 value: 81.193 - type: ndcg_at_100 value: 61.129 - type: ndcg_at_1000 value: 51.157 - type: ndcg_at_3 value: 85.693 - type: ndcg_at_5 value: 84.129 - type: precision_at_1 value: 96.0 - type: precision_at_10 value: 85.39999999999999 - type: precision_at_100 value: 62.03999999999999 - type: precision_at_1000 value: 22.224 - type: precision_at_3 value: 88.0 - type: precision_at_5 value: 88.0 - type: recall_at_1 value: 0.261 - type: recall_at_10 value: 2.262 - type: recall_at_100 value: 14.981 - type: recall_at_1000 value: 46.837 - type: recall_at_3 value: 0.703 - type: recall_at_5 value: 1.172 - task: type: Clustering dataset: name: MTEB AlloProfClusteringP2P type: lyon-nlp/alloprof config: default split: test revision: 392ba3f5bcc8c51f578786c1fc3dae648662cb9b metrics: - type: v_measure value: 70.55290063940157 - type: v_measure value: 55.41500719337263 - task: type: Reranking dataset: name: MTEB AlloprofReranking type: lyon-nlp/mteb-fr-reranking-alloprof-s2p config: default split: test revision: 666fdacebe0291776e86f29345663dfaf80a0db9 metrics: - type: map value: 73.48697375332002 - type: mrr value: 75.01836585523822 - task: type: Retrieval dataset: name: MTEB AlloprofRetrieval type: lyon-nlp/alloprof config: default split: test revision: 392ba3f5bcc8c51f578786c1fc3dae648662cb9b metrics: - type: map_at_1 value: 38.454 - type: map_at_10 value: 51.605000000000004 - type: map_at_100 value: 52.653000000000006 - type: map_at_1000 value: 52.697 - type: map_at_3 value: 48.304 - type: map_at_5 value: 50.073 - type: mrr_at_1 value: 43.307 - type: mrr_at_10 value: 54.400000000000006 - type: mrr_at_100 value: 55.147999999999996 - type: mrr_at_1000 value: 55.174 - type: mrr_at_3 value: 51.77 - type: mrr_at_5 value: 53.166999999999994 - type: ndcg_at_1 value: 43.307 - type: ndcg_at_10 value: 57.891000000000005 - type: ndcg_at_100 value: 62.161 - type: ndcg_at_1000 value: 63.083 - type: ndcg_at_3 value: 51.851 - type: ndcg_at_5 value: 54.605000000000004 - type: precision_at_1 value: 43.307 - type: precision_at_10 value: 9.033 - type: precision_at_100 value: 1.172 - type: precision_at_1000 value: 0.127 - type: precision_at_3 value: 22.798 - type: precision_at_5 value: 15.492 - type: recall_at_1 value: 38.454 - type: recall_at_10 value: 74.166 - type: recall_at_100 value: 92.43599999999999 - type: recall_at_1000 value: 99.071 - type: recall_at_3 value: 58.087 - type: recall_at_5 value: 64.568 - task: type: Classification dataset: name: MTEB AmazonReviewsClassification (fr) type: mteb/amazon_reviews_multi config: fr split: test revision: 1399c76144fd37290681b995c656ef9b2e06e26d metrics: - type: accuracy value: 53.474 - type: f1 value: 50.38275392350236 - task: type: Retrieval dataset: name: MTEB BSARDRetrieval type: maastrichtlawtech/bsard config: default split: test revision: 5effa1b9b5fa3b0f9e12523e6e43e5f86a6e6d59 metrics: - type: map_at_1 value: 2.252 - type: map_at_10 value: 4.661 - type: map_at_100 value: 5.271 - type: map_at_1000 value: 5.3629999999999995 - type: map_at_3 value: 3.604 - type: map_at_5 value: 4.3020000000000005 - type: mrr_at_1 value: 2.252 - type: mrr_at_10 value: 4.661 - type: mrr_at_100 value: 5.271 - type: mrr_at_1000 value: 5.3629999999999995 - type: mrr_at_3 value: 3.604 - type: mrr_at_5 value: 4.3020000000000005 - type: ndcg_at_1 value: 2.252 - type: ndcg_at_10 value: 6.3020000000000005 - type: ndcg_at_100 value: 10.342 - type: ndcg_at_1000 value: 13.475999999999999 - type: ndcg_at_3 value: 4.0649999999999995 - type: ndcg_at_5 value: 5.344 - type: precision_at_1 value: 2.252 - type: precision_at_10 value: 1.171 - type: precision_at_100 value: 0.333 - type: precision_at_1000 value: 0.059000000000000004 - type: precision_at_3 value: 1.802 - type: precision_at_5 value: 1.712 - type: recall_at_1 value: 2.252 - type: recall_at_10 value: 11.712 - type: recall_at_100 value: 33.333 - type: recall_at_1000 value: 59.458999999999996 - type: recall_at_3 value: 5.405 - type: recall_at_5 value: 8.559 - task: type: Clustering dataset: name: MTEB HALClusteringS2S type: lyon-nlp/clustering-hal-s2s config: default split: test revision: e06ebbbb123f8144bef1a5d18796f3dec9ae2915 metrics: - type: v_measure value: 28.301882091023288 - task: type: Clustering dataset: name: MTEB MLSUMClusteringP2P type: mlsum config: default split: test revision: b5d54f8f3b61ae17845046286940f03c6bc79bc7 metrics: - type: v_measure value: 45.26992995191701 - type: v_measure value: 42.773174876871145 - task: type: Classification dataset: name: MTEB MTOPDomainClassification (fr) type: mteb/mtop_domain config: fr split: test revision: d80d48c1eb48d3562165c59d59d0034df9fff0bf metrics: - type: accuracy value: 93.47635452552458 - type: f1 value: 93.19922617577213 - task: type: Classification dataset: name: MTEB MTOPIntentClassification (fr) type: mteb/mtop_intent config: fr split: test revision: ae001d0e6b1228650b7bd1c2c65fb50ad11a8aba metrics: - type: accuracy value: 80.2317569683683 - type: f1 value: 56.18060418621901 - task: type: Classification dataset: name: MTEB MasakhaNEWSClassification (fra) type: masakhane/masakhanews config: fra split: test revision: 8ccc72e69e65f40c70e117d8b3c08306bb788b60 metrics: - type: accuracy value: 85.18957345971565 - type: f1 value: 80.829981537394 - task: type: Clustering dataset: name: MTEB MasakhaNEWSClusteringP2P (fra) type: masakhane/masakhanews config: fra split: test revision: 8ccc72e69e65f40c70e117d8b3c08306bb788b60 metrics: - type: v_measure value: 71.04138999801822 - type: v_measure value: 71.7056263158008 - task: type: Classification dataset: name: MTEB MassiveIntentClassification (fr) type: mteb/amazon_massive_intent config: fr split: test revision: 31efe3c427b0bae9c22cbb560b8f15491cc6bed7 metrics: - type: accuracy value: 76.65097511768661 - type: f1 value: 73.82441070598712 - task: type: Classification dataset: name: MTEB MassiveScenarioClassification (fr) type: mteb/amazon_massive_scenario config: fr split: test revision: 7d571f92784cd94a019292a1f45445077d0ef634 metrics: - type: accuracy value: 79.09885675857431 - type: f1 value: 78.28407777434224 - task: type: Retrieval dataset: name: MTEB MintakaRetrieval (fr) type: jinaai/mintakaqa config: fr split: test revision: efa78cc2f74bbcd21eff2261f9e13aebe40b814e metrics: - type: map_at_1 value: 25.307000000000002 - type: map_at_10 value: 36.723 - type: map_at_100 value: 37.713 - type: map_at_1000 value: 37.769000000000005 - type: map_at_3 value: 33.77 - type: map_at_5 value: 35.463 - type: mrr_at_1 value: 25.307000000000002 - type: mrr_at_10 value: 36.723 - type: mrr_at_100 value: 37.713 - type: mrr_at_1000 value: 37.769000000000005 - type: mrr_at_3 value: 33.77 - type: mrr_at_5 value: 35.463 - type: ndcg_at_1 value: 25.307000000000002 - type: ndcg_at_10 value: 42.559999999999995 - type: ndcg_at_100 value: 47.457 - type: ndcg_at_1000 value: 49.162 - type: ndcg_at_3 value: 36.461 - type: ndcg_at_5 value: 39.504 - type: precision_at_1 value: 25.307000000000002 - type: precision_at_10 value: 6.106 - type: precision_at_100 value: 0.8420000000000001 - type: precision_at_1000 value: 0.098 - type: precision_at_3 value: 14.741999999999999 - type: precision_at_5 value: 10.319 - type: recall_at_1 value: 25.307000000000002 - type: recall_at_10 value: 61.056999999999995 - type: recall_at_100 value: 84.152 - type: recall_at_1000 value: 98.03399999999999 - type: recall_at_3 value: 44.226 - type: recall_at_5 value: 51.597 - task: type: PairClassification dataset: name: MTEB OpusparcusPC (fr) type: GEM/opusparcus config: fr split: test revision: 9e9b1f8ef51616073f47f306f7f47dd91663f86a metrics: - type: cos_sim_accuracy value: 99.90069513406156 - type: cos_sim_ap value: 100.0 - type: cos_sim_f1 value: 99.95032290114257 - type: cos_sim_precision value: 100.0 - type: cos_sim_recall value: 99.90069513406156 - type: dot_accuracy value: 99.90069513406156 - type: dot_ap value: 100.0 - type: dot_f1 value: 99.95032290114257 - type: dot_precision value: 100.0 - type: dot_recall value: 99.90069513406156 - type: euclidean_accuracy value: 99.90069513406156 - type: euclidean_ap value: 100.0 - type: euclidean_f1 value: 99.95032290114257 - type: euclidean_precision value: 100.0 - type: euclidean_recall value: 99.90069513406156 - type: manhattan_accuracy value: 99.90069513406156 - type: manhattan_ap value: 100.0 - type: manhattan_f1 value: 99.95032290114257 - type: manhattan_precision value: 100.0 - type: manhattan_recall value: 99.90069513406156 - type: max_accuracy value: 99.90069513406156 - type: max_ap value: 100.0 - type: max_f1 value: 99.95032290114257 - task: type: PairClassification dataset: name: MTEB PawsX (fr) type: paws-x config: fr split: test revision: 8a04d940a42cd40658986fdd8e3da561533a3646 metrics: - type: cos_sim_accuracy value: 70.8 - type: cos_sim_ap value: 73.7671529695957 - type: cos_sim_f1 value: 68.80964339527875 - type: cos_sim_precision value: 62.95955882352941 - type: cos_sim_recall value: 75.85825027685493 - type: dot_accuracy value: 70.8 - type: dot_ap value: 73.78345265366947 - type: dot_f1 value: 68.80964339527875 - type: dot_precision value: 62.95955882352941 - type: dot_recall value: 75.85825027685493 - type: euclidean_accuracy value: 70.8 - type: euclidean_ap value: 73.7671529695957 - type: euclidean_f1 value: 68.80964339527875 - type: euclidean_precision value: 62.95955882352941 - type: euclidean_recall value: 75.85825027685493 - type: manhattan_accuracy value: 70.75 - type: manhattan_ap value: 73.78996383615953 - type: manhattan_f1 value: 68.79432624113475 - type: manhattan_precision value: 63.39869281045751 - type: manhattan_recall value: 75.1937984496124 - type: max_accuracy value: 70.8 - type: max_ap value: 73.78996383615953 - type: max_f1 value: 68.80964339527875 - task: type: STS dataset: name: MTEB SICKFr type: Lajavaness/SICK-fr config: default split: test revision: e077ab4cf4774a1e36d86d593b150422fafd8e8a metrics: - type: cos_sim_pearson value: 84.03253762760392 - type: cos_sim_spearman value: 79.68280105762004 - type: euclidean_pearson value: 80.98265050044444 - type: euclidean_spearman value: 79.68233242682867 - type: manhattan_pearson value: 80.9678911810704 - type: manhattan_spearman value: 79.70264097683109 - task: type: STS dataset: name: MTEB STS22 (fr) type: mteb/sts22-crosslingual-sts config: fr split: test revision: eea2b4fe26a775864c896887d910b76a8098ad3f metrics: - type: cos_sim_pearson value: 80.56896987572884 - type: cos_sim_spearman value: 81.84352499523287 - type: euclidean_pearson value: 80.40831759421305 - type: euclidean_spearman value: 81.84352499523287 - type: manhattan_pearson value: 80.74333857561238 - type: manhattan_spearman value: 82.41503246733892 - task: type: STS dataset: name: MTEB STSBenchmarkMultilingualSTS (fr) type: stsb_multi_mt config: fr split: test revision: 93d57ef91790589e3ce9c365164337a8a78b7632 metrics: - type: cos_sim_pearson value: 82.71826762276979 - type: cos_sim_spearman value: 82.25433354916042 - type: euclidean_pearson value: 81.87115571724316 - type: euclidean_spearman value: 82.25322342890107 - type: manhattan_pearson value: 82.11174867527224 - type: manhattan_spearman value: 82.55905365203084 - task: type: Summarization dataset: name: MTEB SummEvalFr type: lyon-nlp/summarization-summeval-fr-p2p config: default split: test revision: b385812de6a9577b6f4d0f88c6a6e35395a94054 metrics: - type: cos_sim_pearson value: 30.659441623392887 - type: cos_sim_spearman value: 30.501134097353315 - type: dot_pearson value: 30.659444768851056 - type: dot_spearman value: 30.501134097353315 - task: type: Reranking dataset: name: MTEB SyntecReranking type: lyon-nlp/mteb-fr-reranking-syntec-s2p config: default split: test revision: b205c5084a0934ce8af14338bf03feb19499c84d metrics: - type: map value: 94.03333333333333 - type: mrr value: 94.03333333333333 - task: type: Retrieval dataset: name: MTEB SyntecRetrieval type: lyon-nlp/mteb-fr-retrieval-syntec-s2p config: default split: test revision: 77f7e271bf4a92b24fce5119f3486b583ca016ff metrics: - type: map_at_1 value: 79.0 - type: map_at_10 value: 87.61 - type: map_at_100 value: 87.655 - type: map_at_1000 value: 87.655 - type: map_at_3 value: 87.167 - type: map_at_5 value: 87.36699999999999 - type: mrr_at_1 value: 79.0 - type: mrr_at_10 value: 87.61 - type: mrr_at_100 value: 87.655 - type: mrr_at_1000 value: 87.655 - type: mrr_at_3 value: 87.167 - type: mrr_at_5 value: 87.36699999999999 - type: ndcg_at_1 value: 79.0 - type: ndcg_at_10 value: 90.473 - type: ndcg_at_100 value: 90.694 - type: ndcg_at_1000 value: 90.694 - type: ndcg_at_3 value: 89.464 - type: ndcg_at_5 value: 89.851 - type: precision_at_1 value: 79.0 - type: precision_at_10 value: 9.9 - type: precision_at_100 value: 1.0 - type: precision_at_1000 value: 0.1 - type: precision_at_3 value: 32.0 - type: precision_at_5 value: 19.400000000000002 - type: recall_at_1 value: 79.0 - type: recall_at_10 value: 99.0 - type: recall_at_100 value: 100.0 - type: recall_at_1000 value: 100.0 - type: recall_at_3 value: 96.0 - type: recall_at_5 value: 97.0 - task: type: Retrieval dataset: name: MTEB XPQARetrieval (fr) type: jinaai/xpqa config: fr split: test revision: c99d599f0a6ab9b85b065da6f9d94f9cf731679f metrics: - type: map_at_1 value: 39.395 - type: map_at_10 value: 59.123999999999995 - type: map_at_100 value: 60.704 - type: map_at_1000 value: 60.760000000000005 - type: map_at_3 value: 53.187 - type: map_at_5 value: 56.863 - type: mrr_at_1 value: 62.083 - type: mrr_at_10 value: 68.87299999999999 - type: mrr_at_100 value: 69.46900000000001 - type: mrr_at_1000 value: 69.48299999999999 - type: mrr_at_3 value: 66.8 - type: mrr_at_5 value: 67.928 - type: ndcg_at_1 value: 62.083 - type: ndcg_at_10 value: 65.583 - type: ndcg_at_100 value: 70.918 - type: ndcg_at_1000 value: 71.72800000000001 - type: ndcg_at_3 value: 60.428000000000004 - type: ndcg_at_5 value: 61.853 - type: precision_at_1 value: 62.083 - type: precision_at_10 value: 15.033 - type: precision_at_100 value: 1.9529999999999998 - type: precision_at_1000 value: 0.207 - type: precision_at_3 value: 36.315 - type: precision_at_5 value: 25.955000000000002 - type: recall_at_1 value: 39.395 - type: recall_at_10 value: 74.332 - type: recall_at_100 value: 94.729 - type: recall_at_1000 value: 99.75500000000001 - type: recall_at_3 value: 57.679 - type: recall_at_5 value: 65.036 --- <div style="width: auto; margin-left: auto; margin-right: auto"> <img src="https://i.imgur.com/jC7kdl8.jpeg" alt="TensorBlock" style="width: 100%; min-width: 400px; display: block; margin: auto;"> </div> <div style="display: flex; justify-content: space-between; width: 100%;"> <div style="display: flex; flex-direction: column; align-items: flex-start;"> <p style="margin-top: 0.5em; margin-bottom: 0em;"> Feedback and support: TensorBlock's <a href="https://x.com/tensorblock_aoi">Twitter/X</a>, <a href="https://t.me/TensorBlock">Telegram Group</a> and <a href="https://x.com/tensorblock_aoi">Discord server</a> </p> </div> </div> ## Alibaba-NLP/gte-Qwen2-1.5B-instruct - GGUF This repo contains GGUF format model files for [Alibaba-NLP/gte-Qwen2-1.5B-instruct](https://huggingface.co/Alibaba-NLP/gte-Qwen2-1.5B-instruct). The files were quantized using machines provided by [TensorBlock](https://tensorblock.co/), and they are compatible with llama.cpp as of [commit b4011](https://github.com/ggerganov/llama.cpp/commit/a6744e43e80f4be6398fc7733a01642c846dce1d). <div style="text-align: left; margin: 20px 0;"> <a href="https://tensorblock.co/waitlist/client" style="display: inline-block; padding: 10px 20px; background-color: #007bff; color: white; text-decoration: none; border-radius: 5px; font-weight: bold;"> Run them on the TensorBlock client using your local machine ↗ </a> </div> ## Prompt template ``` <|im_start|>system {system_prompt}<|im_end|> <|im_start|>user {prompt}<|im_end|> <|im_start|>assistant ``` ## Model file specification | Filename | Quant type | File Size | Description | | -------- | ---------- | --------- | ----------- | | [gte-Qwen2-1.5B-instruct-Q2_K.gguf](https://huggingface.co/tensorblock/gte-Qwen2-1.5B-instruct-GGUF/blob/main/gte-Qwen2-1.5B-instruct-Q2_K.gguf) | Q2_K | 0.701 GB | smallest, significant quality loss - not recommended for most purposes | | [gte-Qwen2-1.5B-instruct-Q3_K_S.gguf](https://huggingface.co/tensorblock/gte-Qwen2-1.5B-instruct-GGUF/blob/main/gte-Qwen2-1.5B-instruct-Q3_K_S.gguf) | Q3_K_S | 0.802 GB | very small, high quality loss | | [gte-Qwen2-1.5B-instruct-Q3_K_M.gguf](https://huggingface.co/tensorblock/gte-Qwen2-1.5B-instruct-GGUF/blob/main/gte-Qwen2-1.5B-instruct-Q3_K_M.gguf) | Q3_K_M | 0.860 GB | very small, high quality loss | | [gte-Qwen2-1.5B-instruct-Q3_K_L.gguf](https://huggingface.co/tensorblock/gte-Qwen2-1.5B-instruct-GGUF/blob/main/gte-Qwen2-1.5B-instruct-Q3_K_L.gguf) | Q3_K_L | 0.913 GB | small, substantial quality loss | | [gte-Qwen2-1.5B-instruct-Q4_0.gguf](https://huggingface.co/tensorblock/gte-Qwen2-1.5B-instruct-GGUF/blob/main/gte-Qwen2-1.5B-instruct-Q4_0.gguf) | Q4_0 | 0.992 GB | legacy; small, very high quality loss - prefer using Q3_K_M | | [gte-Qwen2-1.5B-instruct-Q4_K_S.gguf](https://huggingface.co/tensorblock/gte-Qwen2-1.5B-instruct-GGUF/blob/main/gte-Qwen2-1.5B-instruct-Q4_K_S.gguf) | Q4_K_S | 0.997 GB | small, greater quality loss | | [gte-Qwen2-1.5B-instruct-Q4_K_M.gguf](https://huggingface.co/tensorblock/gte-Qwen2-1.5B-instruct-GGUF/blob/main/gte-Qwen2-1.5B-instruct-Q4_K_M.gguf) | Q4_K_M | 1.040 GB | medium, balanced quality - recommended | | [gte-Qwen2-1.5B-instruct-Q5_0.gguf](https://huggingface.co/tensorblock/gte-Qwen2-1.5B-instruct-GGUF/blob/main/gte-Qwen2-1.5B-instruct-Q5_0.gguf) | Q5_0 | 1.172 GB | legacy; medium, balanced quality - prefer using Q4_K_M | | [gte-Qwen2-1.5B-instruct-Q5_K_S.gguf](https://huggingface.co/tensorblock/gte-Qwen2-1.5B-instruct-GGUF/blob/main/gte-Qwen2-1.5B-instruct-Q5_K_S.gguf) | Q5_K_S | 1.172 GB | large, low quality loss - recommended | | [gte-Qwen2-1.5B-instruct-Q5_K_M.gguf](https://huggingface.co/tensorblock/gte-Qwen2-1.5B-instruct-GGUF/blob/main/gte-Qwen2-1.5B-instruct-Q5_K_M.gguf) | Q5_K_M | 1.197 GB | large, very low quality loss - recommended | | [gte-Qwen2-1.5B-instruct-Q6_K.gguf](https://huggingface.co/tensorblock/gte-Qwen2-1.5B-instruct-GGUF/blob/main/gte-Qwen2-1.5B-instruct-Q6_K.gguf) | Q6_K | 1.363 GB | very large, extremely low quality loss | | [gte-Qwen2-1.5B-instruct-Q8_0.gguf](https://huggingface.co/tensorblock/gte-Qwen2-1.5B-instruct-GGUF/blob/main/gte-Qwen2-1.5B-instruct-Q8_0.gguf) | Q8_0 | 1.764 GB | very large, extremely low quality loss - not recommended | ## Downloading instruction ### Command line Firstly, install Huggingface Client ```shell pip install -U "huggingface_hub[cli]" ``` Then, downoad the individual model file the a local directory ```shell huggingface-cli download tensorblock/gte-Qwen2-1.5B-instruct-GGUF --include "gte-Qwen2-1.5B-instruct-Q2_K.gguf" --local-dir MY_LOCAL_DIR ``` If you wanna download multiple model files with a pattern (e.g., `*Q4_K*gguf`), you can try: ```shell huggingface-cli download tensorblock/gte-Qwen2-1.5B-instruct-GGUF --local-dir MY_LOCAL_DIR --local-dir-use-symlinks False --include='*Q4_K*gguf' ```
[ "SUMMARIZATION" ]
[ "BIOSSES", "SCIFACT" ]
abhijithneilabraham/longformer_covid_qa
abhijithneilabraham
question-answering
[ "transformers", "pytorch", "longformer", "question-answering", "endpoints_compatible", "region:us" ]
2022-03-02T23:29:05
2021-05-13T19:09:22
113
0
--- {} --- # Dataset --- --- datasets: - covid_qa_deepset --- --- Covid 19 question answering data obtained from [covid_qa_deepset](https://huggingface.co/datasets/covid_qa_deepset). # Original Repository Repository for the fine tuning, inference and evaluation scripts can be found [here](https://github.com/abhijithneilabraham/Covid-QA). # Model in action ``` import torch from transformers import AutoTokenizer, AutoModelForQuestionAnswering tokenizer = AutoTokenizer.from_pretrained("abhijithneilabraham/longformer_covid_qa") model = AutoModelForQuestionAnswering.from_pretrained("abhijithneilabraham/longformer_covid_qa") question = "In this way, what do the mRNA-destabilising RBPs constitute ?" text = """ In this way, mRNA-destabilising RBPs constitute a 'brake' on the immune system, which may ultimately be toggled therapeutically. I anticipate continued efforts in this area will lead to new methods of regaining control over inflammation in autoimmunity, selectively enhancing immunity in immunotherapy, and modulating RNA synthesis and virus replication during infection. Another mRNA under post-transcriptional regulation by Regnase-1 and Roquin is Furin, which encodes a conserved proprotein convertase crucial in human health and disease. Furin, along with other PCSK family members, is widely implicated in immune regulation, cancer and the entry, maturation or release of a broad array of evolutionarily diverse viruses including human papillomavirus (HPV), influenza (IAV), Ebola (EboV), dengue (DenV) and human immunodeficiency virus (HIV). Here, Braun and Sauter review the roles of furin in these processes, as well as the history and future of furin-targeting therapeutics. 7 They also discuss their recent work revealing how two IFN-cinducible factors exhibit broad-spectrum inhibition of IAV, measles (MV), zika (ZikV) and HIV by suppressing furin activity. 8 Over the coming decade, I expect to see an ever-finer spatiotemporal resolution of host-oriented therapies to achieve safe, effective and broad-spectrum yet costeffective therapies for clinical use. The increasing abundance of affordable, sensitive, high-throughput genome sequencing technologies has led to a recent boom in metagenomics and the cataloguing of the microbiome of our world. The MinION nanopore sequencer is one of the latest innovations in this space, enabling direct sequencing in a miniature form factor with only minimal sample preparation and a consumer-grade laptop computer. Nakagawa and colleagues here report on their latest experiments using this system, further improving its performance for use in resource-poor contexts for meningitis diagnoses. 9 While direct sequencing of viral genomic RNA is challenging, this system was recently used to directly sequence an RNA virus genome (IAV) for the first time. 10 I anticipate further improvements in the performance of such devices over the coming decade will transform virus surveillance efforts, the importance of which was underscored by the recent EboV and novel coronavirus (nCoV / COVID-19) outbreaks, enabling rapid deployment of antiviral treatments that take resistance-conferring mutations into account. Decades of basic immunology research have provided a near-complete picture of the main armaments in the human antiviral arsenal. Nevertheless, this focus on mammalian defences and pathologies has sidelined examination of the types and roles of viruses and antiviral defences that exist throughout our biosphere. One case in point is the CRISPR/Cas antiviral immune system of prokaryotes, which is now repurposed as a revolutionary gene-editing biotechnology in plants and animals. 11 Another is the ancient lineage of nucleocytosolic large DNA viruses (NCLDVs), which are emerging human pathogens that possess enormous genomes of up to several megabases in size encoding hundreds of proteins with unique and unknown functions. 12 Moreover, hundreds of human-and avian-infective viruses such as IAV strain H5N1 are known, but recent efforts indicate the true number may be in the millions and many harbour zoonotic potential. 13 It is increasingly clear that host-virus interactions have generated truly vast yet poorly understood and untapped biodiversity. Closing this Special Feature, Watanabe and Kawaoka elaborate on neo-virology, an emerging field engaged in cataloguing and characterising this biodiversity through a global consortium. 14 I predict these efforts will unlock a vast wealth of currently unexplored biodiversity, leading to biotechnologies and treatments that leverage the host-virus interactions developed throughout evolution. When biomedical innovations fall into the 'Valley of Death', patients who are therefore not reached all too often fall with them. Being entrusted with the resources and expectation to conceive, deliver and communicate dividends to society is both cherished and eagerly pursued at every stage of our careers. Nevertheless, the road to research translation is winding and is built on a foundation of basic research. Supporting industry-academia collaboration and nurturing talent and skills in the Indo-Pacific region are two of the four pillars of the National Innovation and Science Agenda. 2 These frame Australia's Medical Research and Innovation Priorities, which include antimicrobial resistance, global health and health security, drug repurposing and translational research infrastructure, 15 capturing many of the key elements of this CTI Special Feature. Establishing durable international relationships that integrate diverse expertise is essential to delivering these outcomes. To this end, NHMRC has recently taken steps under the International Engagement Strategy 16 to increase cooperation with its counterparts overseas. These include the Japan Agency for Medical Research and Development (AMED), tasked with translating the biomedical research output of that country. Given the reciprocal efforts at accelerating bilateral engagement currently underway, 17 the prospects for new areas of international cooperation and mobility have never been more exciting nor urgent. With the above in mind, all contributions to this CTI Special Feature I have selected from research presented by fellow invitees to the 2018 Awaji International Forum on Infection and Immunity (AIFII) and 2017 Consortium of Biological Sciences (ConBio) conferences in Japan. Both Australia and Japan have strong traditions in immunology and related disciplines, and I predict that the quantity, quality and importance of our bilateral cooperation will accelerate rapidly over the short to medium term. By expanding and cooperatively leveraging our respective research strengths, our efforts may yet solve the many pressing disease, cost and other sustainability issues of our time. """ encoding = tokenizer(question, text, return_tensors="pt") input_ids = encoding["input_ids"] # default is local attention everywhere # the forward method will automatically set global attention on question tokens attention_mask = encoding["attention_mask"] start_scores, end_scores = model(input_ids, attention_mask=attention_mask) all_tokens = tokenizer.convert_ids_to_tokens(input_ids[0].tolist()) answer_tokens = all_tokens[torch.argmax(start_scores) :torch.argmax(end_scores)+1] answer = tokenizer.decode(tokenizer.convert_tokens_to_ids(answer_tokens)) # output => a 'brake' on the immune system ```
[ "QUESTION_ANSWERING", "TRANSLATION" ]
[ "CAS" ]
ayjays132/QNetworkGPT2Large
ayjays132
text-generation
[ "transformers", "pytorch", "gpt2", "text-generation", "en", "dataset:vicgalle/alpaca-gpt4", "license:apache-2.0", "autotrain_compatible", "text-generation-inference", "endpoints_compatible", "region:us" ]
2024-01-03T06:03:18
2024-03-28T10:15:20
113
1
--- datasets: - vicgalle/alpaca-gpt4 language: - en library_name: transformers license: apache-2.0 metrics: - bleu - accuracy pipeline_tag: text-generation model_type: GPT2LMHeadModel architectures: - GPT2LMHeadModel model_filename: pytorch_model.bin config: activation_function: gelu_new attn_pdrop: 0.1 bos_token_id: 50256 embd_pdrop: 0.1 eos_token_id: 50256 initializer_range: 0.02 layer_norm_epsilon: 1e-05 n_ctx: 2048 n_embd: 2048 n_head: 16 n_layer: 24 n_positions: 2048 n_special: 0 predict_special_tokens: true resid_pdrop: 0.1 summary_first_dropout: 0.1 summary_proj_to_labels: true summary_type: cls_index summary_use_proj: true task_specific_params: text-generation: do_sample: true max_length: 200 vocab_size: 32101 --- # QNetworkGPT2: Reinventing Text Generation with AI 📝🤖 ![Text Generation](https://static.vecteezy.com/system/resources/previews/023/477/674/non_2x/ai-generative-blue-red-ink-splash-illustration-free-png.png) --- ## Hyperameters used Here's a consolidated list of hyperparameters for your QNetworkGPT2 RL model: - `input_dim`: Input dimension for the RL agent. - `output_dim`: Output dimension for the RL agent. - `hidden_dim`: Hidden dimension for the RL agent. - `num_episodes`: Number of training episodes. - `generate_interval`: Interval for text generation during training. - `load_path`: Path to load a pre-trained model. - `model_name`: GPT-2 model architecture name. - `max_new_tokens`: Maximum new tokens allowed during text generation. - `max_length`: Maximum sequence length for input data. - `sequence_length`: Length of sequences in the dataset. - `batch_size`: Batch size for training. - `learning_rate`: Learning rate for optimization. - `gamma`: Discount factor for rewards. - `clip_epsilon`: Epsilon value for policy loss clipping. - `entropy_beta`: Beta value for entropy regularization. - `epsilon_start`: Initial epsilon for epsilon-greedy exploration. - `epsilon_end`: Minimum epsilon value. - `epsilon_decay`: Epsilon decay rate. - `heuristic_fn`: Heuristic function for action selection. - `max_new_tokens`: Maximum new tokens allowed during text generation. - `save_path`: Path to save the trained model. Researchers can use these hyperparameters to configure and train their QNetworkGPT2 RL models effectively for text generation tasks. --- --- ## Overview QNetworkGPT2 is an extraordinary AI model that marries Reinforcement Learning (RL) with the power of the GPT-2 language model to create impressive text generation experiences. 🚀 ## Capabilities ### 1. Ultimate Flexibility - Craft RL agents for diverse text generation tasks. - Customize hyperparameters effortlessly. - Harness the brilliance of GPT-2 for text generation magic. ### 2. Q-Network for Mastery - Unleash the QNetwork class for Q-learning in text generation. - Revel in its multi-layer neural network architecture with residual connections and strategic dropout rates. - Empower your model with heuristic functions for ingenious action selection. ### 3. PPO Algorithm - Embrace the Proximal Policy Optimization (PPO) algorithm for supreme policy updates. - Sculpt policies with the wisdom of experiences and rewards. ### 4. Tailored RL Environment - Tailor-make your own RL environment for text generation quests. - Reward the AI with BLEU scores and semantic similarity. - Dance through text generation steps with episode-ending conditions. ### 5. Replay Buffer and Memory - Store and summon experiences with grace in a replay buffer. - Command a replay memory class to oversee experiences like a pro. ### 6. Epsilon-Greedy Exploration - The agent employs epsilon-greedy exploration for marvelous discoveries. ### 7. Target Network for Rock-Solid Stability - Keep target networks in check for unwavering stability during Q-learning escapades. --- ## How It Operates 1. Birth an RL Agent, fine-tuned to your desires. 2. Train the agent using PPO magic or embrace Q-learning for epic journeys. 3. Birth text from input data with the policy network. 4. Evaluate the text's quality using BLEU and semantic beauty. 5. Commence your custom RL environment for text generation marvels. --- ## Uniqueness and Epicness - The union of RL and GPT-2 for text generation mastery. - Advanced text tasks unfold gracefully with QNetwork and its heuristic powers. - The limitless canvas to create RL agents for every text challenge. - Rewarding text quality and semantic harmony with AI-calculated rewards. - The blueprint for a customizable and adaptable RL text generation paradise. --- ## Get Started Now 1. Forge your QNetworkGPT2 with personalized hyperparameters. 2. Unleash the potential with RL-based training. 3. Conjure text aligned with your task and dream. 4. Assess the text with metrics and demands. 5. Fine-tune and enhance for your text generation quest. --- # Load model directly from transformers import AutoTokenizer, AutoModelForCausalLM tokenizer = AutoTokenizer.from_pretrained("ayjays132/QNetworkGPT2") model = AutoModelForCausalLM.from_pretrained("ayjays132/QNetworkGPT2") # Set the EOS token as the padding token tokenizer.pad_token = tokenizer.eos_token # Initialize a conversation history conversation_history = [] # Start a conversation loop while True: # Get user input user_input = input("You: ") # Add user input to the conversation history conversation_history.append(user_input) # Concatenate the conversation strings conversation_text = " ".join(conversation_history) # Tokenize and pad the input input_ids = tokenizer.encode(conversation_text, return_tensors="pt", padding=True, truncation=True) # Generate a response output_ids = model.generate(input_ids, max_length=150, num_return_sequences=1, pad_token_id=tokenizer.eos_token_id) # Decode the generated response generated_response = tokenizer.decode(output_ids[0], skip_special_tokens=True) # Print the generated response print("Bot:", generated_response) # Add bot's response to the conversation history conversation_history.append(generated_response) --- ## Explore and Create QNetworkGPT2 is your ticket to exploring new horizons in text generation. From chatbots and content creation to storytelling and beyond, it's your AI companion for all text adventures. 🌟 Embrace innovation, adaptation, and expansion to conquer your unique text generation challenges. Your text generation revolution starts here! 📚🤖
[ "SEMANTIC_SIMILARITY" ]
[ "CRAFT" ]
GoToCompany/llama3-8b-cpt-sahabatai-v1-base
GoToCompany
null
[ "safetensors", "llama", "en", "id", "jv", "su", "arxiv:2309.06085", "base_model:aisingapore/llama3-8b-cpt-sea-lionv2.1-instruct", "base_model:finetune:aisingapore/llama3-8b-cpt-sea-lionv2.1-instruct", "license:llama3", "region:us" ]
2024-11-06T05:28:23
2024-11-06T05:28:23
113
2
--- base_model: - aisingapore/llama3-8b-cpt-sea-lionv2.1-instruct language: - en - id - jv - su license: llama3 --- # Llama3 8B CPT Sahabat-AI v1 **Sahabat-AI** (Indonesian language for “close friends”) is a collection of Large Language Models (LLMs) which has been pretrained and instruct-tuned for Indonesian language and its various dialects. Sahabat-AI ecosystem is co-initiated by Indonesian tech and telecommunication companies: GoTo Group and Indosat Ooredoo Hutchison. This is the card for the Llama3 8B CPT Sahabat-AI v1 base model which has undergone continued pre-training from the [AI Singapore-Llama-3-8B-Sea-Lion v2.1-Instruct](https://huggingface.co/aisingapore/llama3-8b-cpt-sea-lionv2.1-instruct) model. ## Model Details ### Model Description The continued pre-training data for Llama3 8B CPT Sahabat-AI v1 base model encompasses approximately 50B tokens. - **Co-initiated by:** PT GoTo Gojek Tokopedia Tbk, Indosat Ooredoo Hutchison - **Developed by:** PT GoTo Gojek Tokopedia Tbk, AI Singapore - **Model type:** Decoder - **Languages:** English, Indonesian, Javanese, Sundanese - **License:** [Llama3 Community License](https://huggingface.co/meta-llama/Meta-Llama-3-8B/blob/main/LICENSE) For tokenisation, the model employs the default tokenizer used in Llama-3-8B. The model has a context length of 8192. ### Benchmark Performance We evaluated Llama 8B CPT Sahabat-AI v1 base model on general language capabilities. #### General Language Capabilities For the evaluation of general language capabilities, we employed the - [SEA HELM (also known as BHASA) evaluation benchmark](https://arxiv.org/abs/2309.06085v2) across a variety of tasks. - These tasks include Question Answering (QA), Sentiment Analysis (Sentiment), Toxicity Detection (Toxicity), Translation in both directions (Eng>Lang & Lang>Eng), Abstractive Summarization (Summ), Causal Reasoning (Causal) and Natural Language Inference (NLI). - We also added support for Javanese and Sundanese for the BHASA tasks whenever applicable - and the common English tasks from the [HuggingFace LLM Leaderboard](https://huggingface.co/spaces/open-llm-leaderboard/open_llm_leaderboard). - These tasks consist of [IFEval, BBH, Math Lvl 5, GPQA, MuSR, and MMLU-PRO.](https://huggingface.co/docs/leaderboards/open_llm_leaderboard/about) - **Caveat**: Our results differ from the HuggingFace LLM Leaderboard because we have used [VLLM](https://docs.vllm.ai/en/latest/) as our inference platform. VLLM caps the context size at **4096 tokens** while HuggingFace was set to **8192 tokens**. Note: SEA HELM is implemented using prompts to elicit answers in a strict format. For all tasks, the model is expected to provide an answer tag from which the answer is automatically extracted. For tasks where options are provided, the answer should comprise one of the pre-defined options. The scores for each task is normalised to account for baseline performance due to random chance. The evaluation was done **five-shot** with native prompts on a sample of 100-1000 instances for each dataset. #### Results #### SEA HELM (also known as BHASA) <table style="border-collapse: collapse; width: 100%; font-size: 10px"> <tr> <th style="border: 2px solid black; padding: 8px; font-weight: bold;">Language / Model Name [Base]</th> <th style="border: 1px solid gray; padding: 8px;">Qwen2-7B</th> <th style="border: 1px solid gray; padding: 8px;">Qwen2.5-7B</th> <th style="border: 1px solid gray; padding: 8px;">Llama-3-8B</th> <th style="border: 1px solid gray; padding: 8px;">Llama-3.1-8B</th> <th style="border: 1px solid gray; padding: 8px;">sea-lionv2.1-8B</th> <th style="border: 1px solid gray; padding: 8px;">gemma-2-9B</th> <th style="border: 1px solid gray; padding: 8px;">sea-lionv3-9B</th> <th style="border: 2px solid black; padding: 8px;">sahabatai-v1-8B</th> <th style="border: 1px solid gray; padding: 8px;">sahabatai-v1-9B</th> </tr> <tr> <td style="border: 2px solid black; padding: 8px; font-weight: bold;">Overall (Bahasa Indonesia + Javanese + Sundanese)</td> <td style="border: 1px solid gray; padding: 8px;">42.776</td> <td style="border: 1px solid gray; padding: 8px;">46.245</td> <td style="border: 1px solid gray; padding: 8px;">49.160</td> <td style="border: 1px solid gray; padding: 8px;">49.577</td> <td style="border: 1px solid gray; padding: 8px;">48.602</td> <td style="border: 1px solid gray; padding: 8px;">58.972</td> <td style="border: 1px solid gray; padding: 8px;">60.913</td> <td style="border: 2px solid black; padding: 8px;">59.437</td> <td style="border: 1px solid gray; padding: 8px; background-color: lightgreen;">64.123</td> </tr> <tr> <td style="border: 2px solid black; padding: 8px; font-weight: bold;">Bahasa Indonesia</td> <td style="border: 1px solid gray; padding: 8px;">49.341</td> <td style="border: 1px solid gray; padding: 8px;">55.913</td> <td style="border: 1px solid gray; padding: 8px;">47.865</td> <td style="border: 1px solid gray; padding: 8px;">48.110</td> <td style="border: 1px solid gray; padding: 8px;">49.154</td> <td style="border: 1px solid gray; padding: 8px;">58.572</td> <td style="border: 1px solid gray; padding: 8px; background-color: lightgreen;">62.437</td> <td style="border: 2px solid black; padding: 8px;">53.454</td> <td style="border: 1px solid gray; padding: 8px;">60.040</td> </tr> <tr> <td style="border: 2px solid black; padding: 8px; font-weight: bold;">Javanese</td> <td style="border: 1px solid gray; padding: 8px;">42.774</td> <td style="border: 1px solid gray; padding: 8px;">45.917</td> <td style="border: 1px solid gray; padding: 8px;">54.627</td> <td style="border: 1px solid gray; padding: 8px;">55.215</td> <td style="border: 1px solid gray; padding: 8px;">52.728</td> <td style="border: 1px solid gray; padding: 8px;">63.760</td> <td style="border: 1px solid gray; padding: 8px;">63.363</td> <td style="border: 2px solid black; padding: 8px;">65.048</td> <td style="border: 1px solid gray; padding: 8px; background-color: lightgreen;">69.882</td> </tr> <tr> <td style="border: 2px solid black; padding: 8px; font-weight: bold;">Sundanese</td> <td style="border: 1px solid gray; padding: 8px;">36.213</td> <td style="border: 1px solid gray; padding: 8px;">36.905</td> <td style="border: 1px solid gray; padding: 8px;">44.988</td> <td style="border: 1px solid gray; padding: 8px;">45.407</td> <td style="border: 1px solid gray; padding: 8px;">43.925</td> <td style="border: 1px solid gray; padding: 8px;">54.583</td> <td style="border: 1px solid gray; padding: 8px;">56.939</td> <td style="border: 2px solid black; padding: 8px;">59.809</td> <td style="border: 1px solid gray; padding: 8px; background-color: lightgreen;">62.446</td> </tr> </table> #### English Results <table style="border-collapse: collapse; width: 100%; font-size: 10px"> <tr> <th style="border: 2px solid black; padding: 8px; font-weight: bold;">Model Name [BASE]</th> <th style="border: 1px solid gray; padding: 8px;">Qwen2-7B</th> <th style="border: 1px solid gray; padding: 8px;">Qwen2.5-7B</th> <th style="border: 1px solid gray; padding: 8px;">Llama-3-8B</th> <th style="border: 1px solid gray; padding: 8px;">Llama-3.1-8B</th> <th style="border: 1px solid gray; padding: 8px;">sea-lionv2.1-8B</th> <th style="border: 1px solid gray; padding: 8px;">gemma-2-9B</th> <th style="border: 1px solid gray; padding: 8px;">sea-lionv3-9B</th> <th style="border: 2px solid black; padding: 8px;">sahabatai-v1-8B</th> <th style="border: 1px solid gray; padding: 8px;">sahabatai-v1-9B</th> </tr> <tr> <td style="border: 2px solid black; padding: 8px; font-weight: bold;">Average</td> <td style="border: 1px solid gray; padding: 8px;">23.68</td> <td style="border: 1px solid gray; padding: 8px; background-color: lightgreen;">24.65</td> <td style="border: 1px solid gray; padding: 8px;">13.56</td> <td style="border: 1px solid gray; padding: 8px;">13.69</td> <td style="border: 1px solid gray; padding: 8px;">12.77</td> <td style="border: 1px solid gray; padding: 8px;">13.34</td> <td style="border: 1px solid gray; padding: 8px;">21.99</td> <td style="border: 2px solid black; padding: 8px;">13.92</td> <td style="border: 1px solid gray; padding: 8px;">19.62</td> </tr> </table> ## Training Details ### Data Llama3 8B CPT Sahabat-AI v1 base model was continued pre-trained on 50B tokens of the following data: | Data Source | Unique Tokens (B) | Multiplier | Total Tokens (B) | Percentage (%)| |---------------------------------------|:-----------------:|:----------:|:----------------:|:-------------:| | Dolma Refined Web | 9.5 | 1 | 9.5 | 19.20 | | Dolma arXiv | 0.6 | 1 | 0.6 | 1.20 | | Dolma Star Coder | 5.5 | 1 | 5.5 | 11.0 | | Dolma Semantic Scholar | 1.2 | 1 | 1.2 | 2.40 | | Dolma Reddit | 1.7 | 1 | 1.7 | 3.40 | | Dolma C4 | 1.4 | 1 | 1.4 | 2.80 | | Wiki* + News* - Indonesian | 1.0 | 1 | 1.0 | 2.00 | | SEA-LION Pile - Indonesian | 27.5 | 1 | 27.5 | 55.0 | | JV Pile - Javanese | 0.40 | 3.8 | 1.5 | 3.00 | | SU Pile - Sundanese | 0.20 | 3.8 | 0.75 | 1.50 | Note: - All token counts are counted using Llama3 tokenizer - Wiki* sources includes Wikipedia, Wiki Books, Wiki Source, Wiki Voyage and Fandom Wiki - News* sources includes VOA, Global Voices ### Infrastructure Llama 8B CPT Sahabat-AI v1 was trained using [MosaicML Composer](https://github.com/mosaicml/composer) on the following hardware: | Training Details | Llama3 8B CPT Sahabat-AI v1| |----------------------|:----------------------------:| | Nvidia H100 80GB GPU | 32 | | Training Duration | 5 days | ### Configuration | HyperParameter | Llama3 8B CPT Sahabat-AI v1| |-------------------|:----------------------------:| | Precision | bfloat16 | | Optimizer | decoupled_adamw | | Scheduler | weight_stable_decay | | Learning Rate | 1.0e-5 | | Global Batch Size | 256 | | Micro Batch Size | 1 | ## Call for Collaboration Sahabat-AI (Indonesian language for “close friends”) a **local open source Large Language Model (LLM) ecosystem in Indonesian language**, co-initiated by Indonesian tech and telecommunication companies: GoTo Group and Indosat Ooredoo Hutchison. Sahabat-AI ecosystem aims to empower Indonesians who want to develop AI-based services and applications using Bahasa Indonesia and its various local dialects. We are supported by research centers and global tech experts such as AI Singapore and Tech Mahendra to train the model to gain general language understanding. We also collaborate with key top Indonesia universities such as University of Indonesia, Gadjah Mada University, Bogor Institute of Agriculture, Bandung Institute of Technology, including top Indonesia media groups, such as Kompas Gramedia Group and Republika to train and enrich the model in Bahasa Indonesia, ensuring optimum provision of local context and cultural relevance. We would like to invite **researchers, developers, and language enthusiasts** to actively contribute to the enhancement and expansion of Sahabat-AI. Your collaborations can involve: - Identifying and reporting technical issues - Sharing pre-training, instruction, and preference data - Improving documentation usability - Proposing and implementing new model evaluation tasks and metrics Join us in shaping the future of Sahabat-AI by sharing your expertise and insights to make these models more accessible, accurate, and versatile. You can contribute your ideas through [this form.](https://docs.google.com/forms/d/1_us969eQtEooYOn4XkvGkdP5VHOyCbO6L_sd9kTMnaA/edit) ## The Development Team (in ascending alphabetical order) ### AI Singapore Chan Adwin<br> Cheng Nicholas<br> Choa Esther<br> Huang Yuli<br> Lau Wayne<br> Lee Chwan Ren<br> Leong Wai Yi<br> Leong Wei Qi<br> Limkonchotiwat Peerat<br> Liu Bing Jie Darius<br> Montalan Jann Railey<br> Ng Boon Cheong Raymond<br> Ngui Jian Gang<br> Nguyen Thanh Ngan<br> Ong Brandon<br> Ong Tat-Wee David<br> Ong Zhi Hao<br> Rengarajan Hamsawardhini<br> Siow Bryan<br> Susanto Yosephine<br> Tai Ngee Chia<br> Tan Choon Meng<br> Teng Walter<br> Teo Eng Sipp Leslie<br> Teo Wei Yi<br> Tjhi William<br> Yeo Yeow Tong<br> Yong Xianbin<br> ### PT GoTo Gojek Tokopedia Tbk Anissa Dininta<br> Chau Shiau Ching<br> Choiri Hendra Hadhil<br> Goel Priyank<br> Saini Ajay Kumar<br> Shalev Ofir<br> Tan Daryl<br> Tep Kilian Rithi<br> Tiwari Anupam<br> Widjojo Daniel<br> ## Acknowledgements AI Singapore is a national programme supported by the National Research Foundation, Singapore and hosted by the National University of Singapore. Any opinions, findings and conclusions or recommendations expressed in this material are those of the author(s) and do not reflect the views of National Research Foundation, Singapore. ## Contact For more info, please contact us using this [Sahabat-AI Inquiry Form.](https://docs.google.com/forms/d/1_us969eQtEooYOn4XkvGkdP5VHOyCbO6L_sd9kTMnaA/edit) ## Disclaimer This is the repository for the base model. The model has _not_ been aligned for safety. Developers and users should perform their own safety fine-tuning and related security measures. In no event shall the authors be held liable for any claim, damages, or other liability arising from the use of the released weights and codes.
[ "QUESTION_ANSWERING", "TRANSLATION", "SUMMARIZATION" ]
[ "CHIA" ]
jburmeister/stella_en_400M_v5
jburmeister
sentence-similarity
[ "sentence-transformers", "pytorch", "safetensors", "new", "feature-extraction", "mteb", "transformers", "sentence-similarity", "custom_code", "arxiv:2205.13147", "license:mit", "model-index", "autotrain_compatible", "text-embeddings-inference", "endpoints_compatible", "region:us" ]
2025-01-06T19:04:16
2025-01-06T19:06:54
113
0
--- license: mit tags: - mteb - sentence-transformers - transformers - sentence-similarity model-index: - name: stella_en_400M_v5 results: - task: type: Classification dataset: name: MTEB AmazonCounterfactualClassification (en) type: mteb/amazon_counterfactual config: en split: test revision: e8379541af4e31359cca9fbcf4b00f2671dba205 metrics: - type: accuracy value: 92.35820895522387 - type: ap value: 70.81322736988783 - type: ap_weighted value: 70.81322736988783 - type: f1 value: 88.9505466159595 - type: f1_weighted value: 92.68630932872613 - type: main_score value: 92.35820895522387 - task: type: Classification dataset: name: MTEB AmazonPolarityClassification type: mteb/amazon_polarity config: default split: test revision: e2d317d38cd51312af73b3d32a06d1a08b442046 metrics: - type: accuracy value: 97.1945 - type: ap value: 96.08192192244094 - type: ap_weighted value: 96.08192192244094 - type: f1 value: 97.1936887167346 - type: f1_weighted value: 97.1936887167346 - type: main_score value: 97.1945 - task: type: Classification dataset: name: MTEB AmazonReviewsClassification (en) type: mteb/amazon_reviews_multi config: en split: test revision: 1399c76144fd37290681b995c656ef9b2e06e26d metrics: - type: accuracy value: 59.528000000000006 - type: f1 value: 59.21016819840188 - type: f1_weighted value: 59.21016819840188 - type: main_score value: 59.528000000000006 - task: type: Retrieval dataset: name: MTEB ArguAna type: mteb/arguana config: default split: test revision: c22ab2a51041ffd869aaddef7af8d8215647e41a metrics: - type: main_score value: 64.24 - type: map_at_1 value: 40.398 - type: map_at_10 value: 56.215 - type: map_at_100 value: 56.833999999999996 - type: map_at_1000 value: 56.835 - type: map_at_20 value: 56.747 - type: map_at_3 value: 52.181 - type: map_at_5 value: 54.628 - type: mrr_at_1 value: 41.25177809388336 - type: mrr_at_10 value: 56.570762491815216 - type: mrr_at_100 value: 57.17548614361504 - type: mrr_at_1000 value: 57.176650626377466 - type: mrr_at_20 value: 57.08916253512566 - type: mrr_at_3 value: 52.47747747747754 - type: mrr_at_5 value: 54.94547178757718 - type: nauc_map_at_1000_diff1 value: 22.408086887100158 - type: nauc_map_at_1000_max value: -8.730419096847543 - type: nauc_map_at_1000_std value: -17.789262741255737 - type: nauc_map_at_100_diff1 value: 22.407371684274025 - type: nauc_map_at_100_max value: -8.732263549026266 - type: nauc_map_at_100_std value: -17.79550515579994 - type: nauc_map_at_10_diff1 value: 21.925005073301246 - type: nauc_map_at_10_max value: -8.990323944492134 - type: nauc_map_at_10_std value: -18.199246301671458 - type: nauc_map_at_1_diff1 value: 26.23276644969203 - type: nauc_map_at_1_max value: -12.376511389571245 - type: nauc_map_at_1_std value: -18.11411715207284 - type: nauc_map_at_20_diff1 value: 22.32455790850922 - type: nauc_map_at_20_max value: -8.664671547236034 - type: nauc_map_at_20_std value: -17.8290016125137 - type: nauc_map_at_3_diff1 value: 22.395462147465064 - type: nauc_map_at_3_max value: -8.206580750918844 - type: nauc_map_at_3_std value: -17.604490446911484 - type: nauc_map_at_5_diff1 value: 21.95307379904799 - type: nauc_map_at_5_max value: -8.03958102978443 - type: nauc_map_at_5_std value: -17.36578866595004 - type: nauc_mrr_at_1000_diff1 value: 20.124236798365587 - type: nauc_mrr_at_1000_max value: -9.587376069575898 - type: nauc_mrr_at_1000_std value: -17.79191612151833 - type: nauc_mrr_at_100_diff1 value: 20.123612603474033 - type: nauc_mrr_at_100_max value: -9.589187218607831 - type: nauc_mrr_at_100_std value: -17.7981617777748 - type: nauc_mrr_at_10_diff1 value: 19.723683875738075 - type: nauc_mrr_at_10_max value: -9.774151729178815 - type: nauc_mrr_at_10_std value: -18.168668675495162 - type: nauc_mrr_at_1_diff1 value: 23.945332059908132 - type: nauc_mrr_at_1_max value: -12.260461466152819 - type: nauc_mrr_at_1_std value: -18.007194922921148 - type: nauc_mrr_at_20_diff1 value: 20.04819461810257 - type: nauc_mrr_at_20_max value: -9.518368283588936 - type: nauc_mrr_at_20_std value: -17.831608149836136 - type: nauc_mrr_at_3_diff1 value: 19.8571785245832 - type: nauc_mrr_at_3_max value: -9.464375021240478 - type: nauc_mrr_at_3_std value: -17.728533927330453 - type: nauc_mrr_at_5_diff1 value: 19.670313652167827 - type: nauc_mrr_at_5_max value: -8.966372585728434 - type: nauc_mrr_at_5_std value: -17.468955834324817 - type: nauc_ndcg_at_1000_diff1 value: 21.863049281767417 - type: nauc_ndcg_at_1000_max value: -8.18698520924057 - type: nauc_ndcg_at_1000_std value: -17.634483364794804 - type: nauc_ndcg_at_100_diff1 value: 21.849924385738586 - type: nauc_ndcg_at_100_max value: -8.226437560889345 - type: nauc_ndcg_at_100_std value: -17.774648478087002 - type: nauc_ndcg_at_10_diff1 value: 19.888395590413573 - type: nauc_ndcg_at_10_max value: -8.968706085632382 - type: nauc_ndcg_at_10_std value: -19.31386964628115 - type: nauc_ndcg_at_1_diff1 value: 26.23276644969203 - type: nauc_ndcg_at_1_max value: -12.376511389571245 - type: nauc_ndcg_at_1_std value: -18.11411715207284 - type: nauc_ndcg_at_20_diff1 value: 21.38413342416933 - type: nauc_ndcg_at_20_max value: -7.636238194084164 - type: nauc_ndcg_at_20_std value: -17.946390844693028 - type: nauc_ndcg_at_3_diff1 value: 21.29169165029195 - type: nauc_ndcg_at_3_max value: -6.793840499730093 - type: nauc_ndcg_at_3_std value: -17.52359001586737 - type: nauc_ndcg_at_5_diff1 value: 20.238297656671364 - type: nauc_ndcg_at_5_max value: -6.424992706950072 - type: nauc_ndcg_at_5_std value: -17.082391132291356 - type: nauc_precision_at_1000_diff1 value: -7.05195108528572 - type: nauc_precision_at_1000_max value: 34.439879624882145 - type: nauc_precision_at_1000_std value: 68.72436351659353 - type: nauc_precision_at_100_diff1 value: -2.769464113932605 - type: nauc_precision_at_100_max value: 9.89562961226698 - type: nauc_precision_at_100_std value: -0.5880967482224028 - type: nauc_precision_at_10_diff1 value: 2.1371544726832323 - type: nauc_precision_at_10_max value: -11.93051325147756 - type: nauc_precision_at_10_std value: -30.83144187392059 - type: nauc_precision_at_1_diff1 value: 26.23276644969203 - type: nauc_precision_at_1_max value: -12.376511389571245 - type: nauc_precision_at_1_std value: -18.11411715207284 - type: nauc_precision_at_20_diff1 value: 3.780146814257504 - type: nauc_precision_at_20_max value: 17.06527540214615 - type: nauc_precision_at_20_std value: -20.36832563035565 - type: nauc_precision_at_3_diff1 value: 17.63894384012077 - type: nauc_precision_at_3_max value: -2.0220490624638887 - type: nauc_precision_at_3_std value: -17.285601413493918 - type: nauc_precision_at_5_diff1 value: 12.557855071944601 - type: nauc_precision_at_5_max value: 0.5840236463956658 - type: nauc_precision_at_5_std value: -15.827224420217846 - type: nauc_recall_at_1000_diff1 value: -7.051951085286463 - type: nauc_recall_at_1000_max value: 34.43987962487738 - type: nauc_recall_at_1000_std value: 68.724363516591 - type: nauc_recall_at_100_diff1 value: -2.769464113930314 - type: nauc_recall_at_100_max value: 9.895629612270017 - type: nauc_recall_at_100_std value: -0.58809674821745 - type: nauc_recall_at_10_diff1 value: 2.1371544726834495 - type: nauc_recall_at_10_max value: -11.930513251477253 - type: nauc_recall_at_10_std value: -30.83144187392047 - type: nauc_recall_at_1_diff1 value: 26.23276644969203 - type: nauc_recall_at_1_max value: -12.376511389571245 - type: nauc_recall_at_1_std value: -18.11411715207284 - type: nauc_recall_at_20_diff1 value: 3.7801468142575922 - type: nauc_recall_at_20_max value: 17.0652754021456 - type: nauc_recall_at_20_std value: -20.36832563035559 - type: nauc_recall_at_3_diff1 value: 17.63894384012074 - type: nauc_recall_at_3_max value: -2.02204906246383 - type: nauc_recall_at_3_std value: -17.28560141349386 - type: nauc_recall_at_5_diff1 value: 12.55785507194463 - type: nauc_recall_at_5_max value: 0.5840236463957296 - type: nauc_recall_at_5_std value: -15.827224420217856 - type: ndcg_at_1 value: 40.398 - type: ndcg_at_10 value: 64.24 - type: ndcg_at_100 value: 66.631 - type: ndcg_at_1000 value: 66.65100000000001 - type: ndcg_at_20 value: 66.086 - type: ndcg_at_3 value: 55.938 - type: ndcg_at_5 value: 60.370000000000005 - type: precision_at_1 value: 40.398 - type: precision_at_10 value: 8.962 - type: precision_at_100 value: 0.9950000000000001 - type: precision_at_1000 value: 0.1 - type: precision_at_20 value: 4.836 - type: precision_at_3 value: 22.262 - type: precision_at_5 value: 15.519 - type: recall_at_1 value: 40.398 - type: recall_at_10 value: 89.616 - type: recall_at_100 value: 99.502 - type: recall_at_1000 value: 99.644 - type: recall_at_20 value: 96.72800000000001 - type: recall_at_3 value: 66.78500000000001 - type: recall_at_5 value: 77.596 - task: type: Clustering dataset: name: MTEB ArxivClusteringP2P type: mteb/arxiv-clustering-p2p config: default split: test revision: a122ad7f3f0291bf49cc6f4d32aa80929df69d5d metrics: - type: main_score value: 55.1564333205451 - type: v_measure value: 55.1564333205451 - type: v_measure_std value: 14.696883012214512 - task: type: Clustering dataset: name: MTEB ArxivClusteringS2S type: mteb/arxiv-clustering-s2s config: default split: test revision: f910caf1a6075f7329cdf8c1a6135696f37dbd53 metrics: - type: main_score value: 49.823698316694795 - type: v_measure value: 49.823698316694795 - type: v_measure_std value: 14.951660654298186 - task: type: Reranking dataset: name: MTEB AskUbuntuDupQuestions type: mteb/askubuntudupquestions-reranking config: default split: test revision: 2000358ca161889fa9c082cb41daa8dcfb161a54 metrics: - type: main_score value: 66.15294503553424 - type: map value: 66.15294503553424 - type: mrr value: 78.53438420612935 - type: nAUC_map_diff1 value: 12.569697092717997 - type: nAUC_map_max value: 21.50670312412572 - type: nAUC_map_std value: 16.943786429229064 - type: nAUC_mrr_diff1 value: 15.590272897361238 - type: nAUC_mrr_max value: 34.96072022474653 - type: nAUC_mrr_std value: 21.649217605241045 - task: type: STS dataset: name: MTEB BIOSSES type: mteb/biosses-sts config: default split: test revision: d3fb88f8f02e40887cd149695127462bbcf29b4a metrics: - type: cosine_pearson value: 85.7824546319275 - type: cosine_spearman value: 83.29587385660628 - type: euclidean_pearson value: 84.58764190565167 - type: euclidean_spearman value: 83.30069324352772 - type: main_score value: 83.29587385660628 - type: manhattan_pearson value: 84.95996839947179 - type: manhattan_spearman value: 83.87480271054358 - type: pearson value: 85.7824546319275 - type: spearman value: 83.29587385660628 - task: type: Classification dataset: name: MTEB Banking77Classification type: mteb/banking77 config: default split: test revision: 0fd18e25b25c072e09e0d92ab615fda904d66300 metrics: - type: accuracy value: 89.30194805194806 - type: f1 value: 89.26182507266391 - type: f1_weighted value: 89.26182507266391 - type: main_score value: 89.30194805194806 - task: type: Clustering dataset: name: MTEB BiorxivClusteringP2P type: mteb/biorxiv-clustering-p2p config: default split: test revision: 65b79d1d13f80053f67aca9498d9402c2d9f1f40 metrics: - type: main_score value: 50.67972171889736 - type: v_measure value: 50.67972171889736 - type: v_measure_std value: 0.7687409980036303 - task: type: Clustering dataset: name: MTEB BiorxivClusteringS2S type: mteb/biorxiv-clustering-s2s config: default split: test revision: 258694dd0231531bc1fd9de6ceb52a0853c6d908 metrics: - type: main_score value: 45.80539715556144 - type: v_measure value: 45.80539715556144 - type: v_measure_std value: 0.9601346216579142 - task: type: Retrieval dataset: name: MTEB CQADupstackRetrieval type: mteb/cqadupstack config: default split: test revision: 4ffe81d471b1924886b33c7567bfb200e9eec5c4 metrics: - type: main_score value: 44.361250000000005 - type: map_at_1 value: 28.304499999999997 - type: map_at_10 value: 38.54841666666666 - type: map_at_100 value: 39.83141666666667 - type: map_at_1000 value: 39.944750000000006 - type: map_at_20 value: 39.25341666666667 - type: map_at_3 value: 35.406749999999995 - type: map_at_5 value: 37.15558333333333 - type: mrr_at_1 value: 34.09077232860122 - type: mrr_at_10 value: 43.15445393211421 - type: mrr_at_100 value: 43.98645286848257 - type: mrr_at_1000 value: 44.037631313469404 - type: mrr_at_20 value: 43.64045813249614 - type: mrr_at_3 value: 40.674138648480486 - type: mrr_at_5 value: 42.106251182620255 - type: nauc_map_at_1000_diff1 value: 46.250011739434996 - type: nauc_map_at_1000_max value: 30.13664446260598 - type: nauc_map_at_1000_std value: 5.422301791618935 - type: nauc_map_at_100_diff1 value: 46.253631351999395 - type: nauc_map_at_100_max value: 30.12612918885181 - type: nauc_map_at_100_std value: 5.367077019987172 - type: nauc_map_at_10_diff1 value: 46.328171341741346 - type: nauc_map_at_10_max value: 29.80274612581464 - type: nauc_map_at_10_std value: 4.62996685176396 - type: nauc_map_at_1_diff1 value: 51.56118117729493 - type: nauc_map_at_1_max value: 27.94885243863768 - type: nauc_map_at_1_std value: 1.700366508927356 - type: nauc_map_at_20_diff1 value: 46.286750260299094 - type: nauc_map_at_20_max value: 29.979205290353278 - type: nauc_map_at_20_std value: 5.010588412441873 - type: nauc_map_at_3_diff1 value: 47.10018183619064 - type: nauc_map_at_3_max value: 29.062318206078753 - type: nauc_map_at_3_std value: 3.2235696254694197 - type: nauc_map_at_5_diff1 value: 46.41971733050039 - type: nauc_map_at_5_max value: 29.456798617695657 - type: nauc_map_at_5_std value: 4.0921691023077145 - type: nauc_mrr_at_1000_diff1 value: 45.88888977975723 - type: nauc_mrr_at_1000_max value: 32.162138978089544 - type: nauc_mrr_at_1000_std value: 6.2811943424217915 - type: nauc_mrr_at_100_diff1 value: 45.87480433011124 - type: nauc_mrr_at_100_max value: 32.16011334212834 - type: nauc_mrr_at_100_std value: 6.2865717772421785 - type: nauc_mrr_at_10_diff1 value: 45.849652904658825 - type: nauc_mrr_at_10_max value: 32.13847916232293 - type: nauc_mrr_at_10_std value: 6.105718728141999 - type: nauc_mrr_at_1_diff1 value: 51.013730325062156 - type: nauc_mrr_at_1_max value: 32.77457396492779 - type: nauc_mrr_at_1_std value: 4.415684893471724 - type: nauc_mrr_at_20_diff1 value: 45.86663046255274 - type: nauc_mrr_at_20_max value: 32.15219360697865 - type: nauc_mrr_at_20_std value: 6.19603046412763 - type: nauc_mrr_at_3_diff1 value: 46.522376582423185 - type: nauc_mrr_at_3_max value: 32.18259009733714 - type: nauc_mrr_at_3_std value: 5.288000648220897 - type: nauc_mrr_at_5_diff1 value: 45.86611481369745 - type: nauc_mrr_at_5_max value: 32.14261639054921 - type: nauc_mrr_at_5_std value: 5.8811238177073735 - type: nauc_ndcg_at_1000_diff1 value: 44.5055097547565 - type: nauc_ndcg_at_1000_max value: 31.149682057975458 - type: nauc_ndcg_at_1000_std value: 8.157937194901333 - type: nauc_ndcg_at_100_diff1 value: 44.12398363638596 - type: nauc_ndcg_at_100_max value: 30.878064321409994 - type: nauc_ndcg_at_100_std value: 8.40493441452808 - type: nauc_ndcg_at_10_diff1 value: 44.200093505221474 - type: nauc_ndcg_at_10_max value: 30.15267107733158 - type: nauc_ndcg_at_10_std value: 6.407495361566107 - type: nauc_ndcg_at_1_diff1 value: 51.013730325062156 - type: nauc_ndcg_at_1_max value: 32.77457396492779 - type: nauc_ndcg_at_1_std value: 4.415684893471724 - type: nauc_ndcg_at_20_diff1 value: 44.16988321564116 - type: nauc_ndcg_at_20_max value: 30.333532500651213 - type: nauc_ndcg_at_20_std value: 7.10024701386895 - type: nauc_ndcg_at_3_diff1 value: 45.35982873879988 - type: nauc_ndcg_at_3_max value: 30.288312457948702 - type: nauc_ndcg_at_3_std value: 4.653900898293395 - type: nauc_ndcg_at_5_diff1 value: 44.324558115380185 - type: nauc_ndcg_at_5_max value: 30.048149698941373 - type: nauc_ndcg_at_5_std value: 5.6684459618413205 - type: nauc_precision_at_1000_diff1 value: -7.282175798304458 - type: nauc_precision_at_1000_max value: 7.820142031765352 - type: nauc_precision_at_1000_std value: 11.736131836431172 - type: nauc_precision_at_100_diff1 value: 1.0222940256506976 - type: nauc_precision_at_100_max value: 16.12346497070298 - type: nauc_precision_at_100_std value: 18.202607395247874 - type: nauc_precision_at_10_diff1 value: 18.289439185857837 - type: nauc_precision_at_10_max value: 26.116517399154375 - type: nauc_precision_at_10_std value: 13.921214069982302 - type: nauc_precision_at_1_diff1 value: 51.013730325062156 - type: nauc_precision_at_1_max value: 32.77457396492779 - type: nauc_precision_at_1_std value: 4.415684893471724 - type: nauc_precision_at_20_diff1 value: 12.365165405210886 - type: nauc_precision_at_20_max value: 22.946297258937367 - type: nauc_precision_at_20_std value: 16.13862870358933 - type: nauc_precision_at_3_diff1 value: 32.063423642849685 - type: nauc_precision_at_3_max value: 30.140965811989407 - type: nauc_precision_at_3_std value: 8.501746262550146 - type: nauc_precision_at_5_diff1 value: 24.777203357717948 - type: nauc_precision_at_5_max value: 28.401579566848472 - type: nauc_precision_at_5_std value: 11.643246774390914 - type: nauc_recall_at_1000_diff1 value: 30.04216463401409 - type: nauc_recall_at_1000_max value: 34.98067760563842 - type: nauc_recall_at_1000_std value: 48.01453905250591 - type: nauc_recall_at_100_diff1 value: 31.193415507513972 - type: nauc_recall_at_100_max value: 28.69740149270981 - type: nauc_recall_at_100_std value: 25.20960758920368 - type: nauc_recall_at_10_diff1 value: 36.18870823636506 - type: nauc_recall_at_10_max value: 26.005625231341238 - type: nauc_recall_at_10_std value: 8.891983977041376 - type: nauc_recall_at_1_diff1 value: 51.56118117729493 - type: nauc_recall_at_1_max value: 27.94885243863768 - type: nauc_recall_at_1_std value: 1.700366508927356 - type: nauc_recall_at_20_diff1 value: 34.93996118564803 - type: nauc_recall_at_20_max value: 26.149961715956138 - type: nauc_recall_at_20_std value: 12.0657502367633 - type: nauc_recall_at_3_diff1 value: 40.80743946709512 - type: nauc_recall_at_3_max value: 26.443127773025783 - type: nauc_recall_at_3_std value: 3.7011448604241477 - type: nauc_recall_at_5_diff1 value: 37.608535157055776 - type: nauc_recall_at_5_max value: 26.168016189725822 - type: nauc_recall_at_5_std value: 6.344191564595316 - type: ndcg_at_1 value: 34.09083333333333 - type: ndcg_at_10 value: 44.361250000000005 - type: ndcg_at_100 value: 49.586166666666664 - type: ndcg_at_1000 value: 51.623583333333336 - type: ndcg_at_20 value: 46.40158333333333 - type: ndcg_at_3 value: 39.27733333333333 - type: ndcg_at_5 value: 41.662333333333336 - type: precision_at_1 value: 34.09083333333333 - type: precision_at_10 value: 7.957000000000002 - type: precision_at_100 value: 1.2521666666666669 - type: precision_at_1000 value: 0.16125 - type: precision_at_20 value: 4.6755 - type: precision_at_3 value: 18.402083333333334 - type: precision_at_5 value: 13.104333333333335 - type: recall_at_1 value: 28.304499999999997 - type: recall_at_10 value: 56.80666666666667 - type: recall_at_100 value: 79.66208333333334 - type: recall_at_1000 value: 93.6455 - type: recall_at_20 value: 64.2495 - type: recall_at_3 value: 42.431333333333335 - type: recall_at_5 value: 48.665416666666665 - task: type: Retrieval dataset: name: MTEB ClimateFEVER type: mteb/climate-fever config: default split: test revision: 47f2ac6acb640fc46020b02a5b59fdda04d39380 metrics: - type: main_score value: 43.525999999999996 - type: map_at_1 value: 19.291 - type: map_at_10 value: 33.471000000000004 - type: map_at_100 value: 35.388999999999996 - type: map_at_1000 value: 35.568 - type: map_at_20 value: 34.496 - type: map_at_3 value: 28.713 - type: map_at_5 value: 31.384 - type: mrr_at_1 value: 43.77850162866449 - type: mrr_at_10 value: 56.28576598934912 - type: mrr_at_100 value: 56.8588518168194 - type: mrr_at_1000 value: 56.878236725973544 - type: mrr_at_20 value: 56.6409328120183 - type: mrr_at_3 value: 53.56134636264935 - type: mrr_at_5 value: 55.27795874049956 - type: nauc_map_at_1000_diff1 value: 27.262513153363876 - type: nauc_map_at_1000_max value: 40.099398684385584 - type: nauc_map_at_1000_std value: 18.847812394005512 - type: nauc_map_at_100_diff1 value: 27.238993503030745 - type: nauc_map_at_100_max value: 40.07730434492169 - type: nauc_map_at_100_std value: 18.795349250833684 - type: nauc_map_at_10_diff1 value: 27.70929180366227 - type: nauc_map_at_10_max value: 39.55987024970173 - type: nauc_map_at_10_std value: 17.214881544648996 - type: nauc_map_at_1_diff1 value: 43.34155892182403 - type: nauc_map_at_1_max value: 38.23324890148018 - type: nauc_map_at_1_std value: 6.0781444393516075 - type: nauc_map_at_20_diff1 value: 27.311577477800103 - type: nauc_map_at_20_max value: 39.624414083413456 - type: nauc_map_at_20_std value: 18.149811054163287 - type: nauc_map_at_3_diff1 value: 30.475965062734367 - type: nauc_map_at_3_max value: 38.49324825043695 - type: nauc_map_at_3_std value: 13.357656038648487 - type: nauc_map_at_5_diff1 value: 28.425110095017747 - type: nauc_map_at_5_max value: 39.017894870747796 - type: nauc_map_at_5_std value: 15.543817194122564 - type: nauc_mrr_at_1000_diff1 value: 33.16689354701644 - type: nauc_mrr_at_1000_max value: 41.70755363247148 - type: nauc_mrr_at_1000_std value: 24.61667417463176 - type: nauc_mrr_at_100_diff1 value: 33.147229262917506 - type: nauc_mrr_at_100_max value: 41.712455697170725 - type: nauc_mrr_at_100_std value: 24.6418922043652 - type: nauc_mrr_at_10_diff1 value: 32.94185191112572 - type: nauc_mrr_at_10_max value: 41.64272730141954 - type: nauc_mrr_at_10_std value: 24.663391015702707 - type: nauc_mrr_at_1_diff1 value: 39.571969559016395 - type: nauc_mrr_at_1_max value: 39.396249211263495 - type: nauc_mrr_at_1_std value: 16.984149923258357 - type: nauc_mrr_at_20_diff1 value: 33.10040770334742 - type: nauc_mrr_at_20_max value: 41.807565560083034 - type: nauc_mrr_at_20_std value: 24.8064180365271 - type: nauc_mrr_at_3_diff1 value: 33.065406161485704 - type: nauc_mrr_at_3_max value: 41.049510969934694 - type: nauc_mrr_at_3_std value: 23.18371458928609 - type: nauc_mrr_at_5_diff1 value: 33.2389593543916 - type: nauc_mrr_at_5_max value: 41.629486918949915 - type: nauc_mrr_at_5_std value: 24.5777253036149 - type: nauc_ndcg_at_1000_diff1 value: 25.868840609197637 - type: nauc_ndcg_at_1000_max value: 42.79564910784761 - type: nauc_ndcg_at_1000_std value: 27.035091271680113 - type: nauc_ndcg_at_100_diff1 value: 25.019789319579942 - type: nauc_ndcg_at_100_max value: 42.482345143533735 - type: nauc_ndcg_at_100_std value: 26.76872010731345 - type: nauc_ndcg_at_10_diff1 value: 25.949464660653238 - type: nauc_ndcg_at_10_max value: 40.79769544643906 - type: nauc_ndcg_at_10_std value: 22.486116508973204 - type: nauc_ndcg_at_1_diff1 value: 39.571969559016395 - type: nauc_ndcg_at_1_max value: 39.396249211263495 - type: nauc_ndcg_at_1_std value: 16.984149923258357 - type: nauc_ndcg_at_20_diff1 value: 25.173455685962214 - type: nauc_ndcg_at_20_max value: 40.88873540662413 - type: nauc_ndcg_at_20_std value: 24.4451041955519 - type: nauc_ndcg_at_3_diff1 value: 28.185416070726333 - type: nauc_ndcg_at_3_max value: 39.10600031163912 - type: nauc_ndcg_at_3_std value: 18.42694044215541 - type: nauc_ndcg_at_5_diff1 value: 27.112647584005583 - type: nauc_ndcg_at_5_max value: 40.154045682322526 - type: nauc_ndcg_at_5_std value: 20.26822517176828 - type: nauc_precision_at_1000_diff1 value: -16.42087927044017 - type: nauc_precision_at_1000_max value: 3.5326295053913 - type: nauc_precision_at_1000_std value: 24.406810708493197 - type: nauc_precision_at_100_diff1 value: -12.17648135724982 - type: nauc_precision_at_100_max value: 15.895489260126183 - type: nauc_precision_at_100_std value: 32.48346122610907 - type: nauc_precision_at_10_diff1 value: -1.2493131347748072 - type: nauc_precision_at_10_max value: 26.409459305604376 - type: nauc_precision_at_10_std value: 31.115432019300016 - type: nauc_precision_at_1_diff1 value: 39.571969559016395 - type: nauc_precision_at_1_max value: 39.396249211263495 - type: nauc_precision_at_1_std value: 16.984149923258357 - type: nauc_precision_at_20_diff1 value: -6.597509397240593 - type: nauc_precision_at_20_max value: 21.461984620659695 - type: nauc_precision_at_20_std value: 32.9450259748889 - type: nauc_precision_at_3_diff1 value: 9.46378764865453 - type: nauc_precision_at_3_max value: 32.03650819375425 - type: nauc_precision_at_3_std value: 26.489382638510765 - type: nauc_precision_at_5_diff1 value: 3.5987036728169537 - type: nauc_precision_at_5_max value: 30.633955978579703 - type: nauc_precision_at_5_std value: 30.532430088014443 - type: nauc_recall_at_1000_diff1 value: 10.714633106872254 - type: nauc_recall_at_1000_max value: 43.94958623961 - type: nauc_recall_at_1000_std value: 51.78914468954123 - type: nauc_recall_at_100_diff1 value: 9.63781472255557 - type: nauc_recall_at_100_max value: 38.50917465255336 - type: nauc_recall_at_100_std value: 37.78623984642377 - type: nauc_recall_at_10_diff1 value: 16.480342820841688 - type: nauc_recall_at_10_max value: 35.982566867357406 - type: nauc_recall_at_10_std value: 23.30688188788895 - type: nauc_recall_at_1_diff1 value: 43.34155892182403 - type: nauc_recall_at_1_max value: 38.23324890148018 - type: nauc_recall_at_1_std value: 6.0781444393516075 - type: nauc_recall_at_20_diff1 value: 13.521048985146367 - type: nauc_recall_at_20_max value: 34.62462209239834 - type: nauc_recall_at_20_std value: 27.85924191501618 - type: nauc_recall_at_3_diff1 value: 23.57032748533523 - type: nauc_recall_at_3_max value: 36.32703197635613 - type: nauc_recall_at_3_std value: 15.730238734014337 - type: nauc_recall_at_5_diff1 value: 19.61387036368584 - type: nauc_recall_at_5_max value: 36.22030835529556 - type: nauc_recall_at_5_std value: 19.76310648649897 - type: ndcg_at_1 value: 43.779 - type: ndcg_at_10 value: 43.525999999999996 - type: ndcg_at_100 value: 50.138000000000005 - type: ndcg_at_1000 value: 52.991 - type: ndcg_at_20 value: 46.083 - type: ndcg_at_3 value: 38.002 - type: ndcg_at_5 value: 39.842 - type: precision_at_1 value: 43.779 - type: precision_at_10 value: 13.205 - type: precision_at_100 value: 2.051 - type: precision_at_1000 value: 0.259 - type: precision_at_20 value: 7.722999999999999 - type: precision_at_3 value: 28.903000000000002 - type: precision_at_5 value: 21.368000000000002 - type: recall_at_1 value: 19.291 - type: recall_at_10 value: 48.754 - type: recall_at_100 value: 70.97200000000001 - type: recall_at_1000 value: 86.611 - type: recall_at_20 value: 55.884 - type: recall_at_3 value: 34.101 - type: recall_at_5 value: 40.784 - task: type: Retrieval dataset: name: MTEB DBPedia type: mteb/dbpedia config: default split: test revision: c0f706b76e590d620bd6618b3ca8efdd34e2d659 metrics: - type: main_score value: 49.884 - type: map_at_1 value: 9.913 - type: map_at_10 value: 23.186999999999998 - type: map_at_100 value: 34.207 - type: map_at_1000 value: 36.318 - type: map_at_20 value: 27.419 - type: map_at_3 value: 15.656 - type: map_at_5 value: 18.945999999999998 - type: mrr_at_1 value: 75.75 - type: mrr_at_10 value: 82.16279761904761 - type: mrr_at_100 value: 82.48445635330299 - type: mrr_at_1000 value: 82.4870246719901 - type: mrr_at_20 value: 82.36203632968338 - type: mrr_at_3 value: 81.29166666666666 - type: mrr_at_5 value: 82.02916666666667 - type: nauc_map_at_1000_diff1 value: 17.0739966990996 - type: nauc_map_at_1000_max value: 28.440065298437133 - type: nauc_map_at_1000_std value: 20.83498154003865 - type: nauc_map_at_100_diff1 value: 17.75982086107111 - type: nauc_map_at_100_max value: 26.87850835673573 - type: nauc_map_at_100_std value: 18.350282298599275 - type: nauc_map_at_10_diff1 value: 17.15984258564116 - type: nauc_map_at_10_max value: 10.846179132675553 - type: nauc_map_at_10_std value: -6.263534464094614 - type: nauc_map_at_1_diff1 value: 24.014897777973694 - type: nauc_map_at_1_max value: -4.556638938723358 - type: nauc_map_at_1_std value: -22.7844467526989 - type: nauc_map_at_20_diff1 value: 16.3179372493187 - type: nauc_map_at_20_max value: 17.176378915498915 - type: nauc_map_at_20_std value: 1.9378637630340372 - type: nauc_map_at_3_diff1 value: 19.12786794046792 - type: nauc_map_at_3_max value: 0.09063919305677291 - type: nauc_map_at_3_std value: -16.713143158330492 - type: nauc_map_at_5_diff1 value: 18.76504725420023 - type: nauc_map_at_5_max value: 5.040867712207419 - type: nauc_map_at_5_std value: -12.382578318931165 - type: nauc_mrr_at_1000_diff1 value: 54.61266255011247 - type: nauc_mrr_at_1000_max value: 60.83961280977112 - type: nauc_mrr_at_1000_std value: 32.70429260443016 - type: nauc_mrr_at_100_diff1 value: 54.61346236538542 - type: nauc_mrr_at_100_max value: 60.8407974416647 - type: nauc_mrr_at_100_std value: 32.69272843993462 - type: nauc_mrr_at_10_diff1 value: 54.74633685810871 - type: nauc_mrr_at_10_max value: 61.084525933097865 - type: nauc_mrr_at_10_std value: 33.001220210025565 - type: nauc_mrr_at_1_diff1 value: 56.12708423835806 - type: nauc_mrr_at_1_max value: 58.9314540998289 - type: nauc_mrr_at_1_std value: 27.39422607651012 - type: nauc_mrr_at_20_diff1 value: 54.58896150245695 - type: nauc_mrr_at_20_max value: 60.890929983464815 - type: nauc_mrr_at_20_std value: 32.65559641276393 - type: nauc_mrr_at_3_diff1 value: 54.38229071443791 - type: nauc_mrr_at_3_max value: 59.987849044098596 - type: nauc_mrr_at_3_std value: 33.439813880719974 - type: nauc_mrr_at_5_diff1 value: 54.961790262449824 - type: nauc_mrr_at_5_max value: 61.17705173908951 - type: nauc_mrr_at_5_std value: 33.30939850734856 - type: nauc_ndcg_at_1000_diff1 value: 29.27465932507067 - type: nauc_ndcg_at_1000_max value: 47.952543312315214 - type: nauc_ndcg_at_1000_std value: 36.17132236391485 - type: nauc_ndcg_at_100_diff1 value: 28.63072328980134 - type: nauc_ndcg_at_100_max value: 41.460833419186564 - type: nauc_ndcg_at_100_std value: 27.157100358988135 - type: nauc_ndcg_at_10_diff1 value: 23.41488013023301 - type: nauc_ndcg_at_10_max value: 39.27798133072349 - type: nauc_ndcg_at_10_std value: 21.979241438928312 - type: nauc_ndcg_at_1_diff1 value: 46.12120543657642 - type: nauc_ndcg_at_1_max value: 47.28452124039853 - type: nauc_ndcg_at_1_std value: 19.799884708952543 - type: nauc_ndcg_at_20_diff1 value: 23.627669045115574 - type: nauc_ndcg_at_20_max value: 35.88225062457673 - type: nauc_ndcg_at_20_std value: 18.218628030529498 - type: nauc_ndcg_at_3_diff1 value: 25.37309228946118 - type: nauc_ndcg_at_3_max value: 40.64426332992231 - type: nauc_ndcg_at_3_std value: 24.608330645901482 - type: nauc_ndcg_at_5_diff1 value: 24.055798594999654 - type: nauc_ndcg_at_5_max value: 41.16180524175431 - type: nauc_ndcg_at_5_std value: 24.048305528761315 - type: nauc_precision_at_1000_diff1 value: -18.234943251015576 - type: nauc_precision_at_1000_max value: 0.48708502364659184 - type: nauc_precision_at_1000_std value: 2.4473601543134027 - type: nauc_precision_at_100_diff1 value: -3.0077810947381227 - type: nauc_precision_at_100_max value: 25.27249321108913 - type: nauc_precision_at_100_std value: 37.36575792126928 - type: nauc_precision_at_10_diff1 value: -0.2393778190297635 - type: nauc_precision_at_10_max value: 36.40513293547299 - type: nauc_precision_at_10_std value: 37.4827885766009 - type: nauc_precision_at_1_diff1 value: 56.12708423835806 - type: nauc_precision_at_1_max value: 58.9314540998289 - type: nauc_precision_at_1_std value: 27.39422607651012 - type: nauc_precision_at_20_diff1 value: -1.2010133229402933 - type: nauc_precision_at_20_max value: 34.117541814385966 - type: nauc_precision_at_20_std value: 39.13273254177449 - type: nauc_precision_at_3_diff1 value: 11.757378092198486 - type: nauc_precision_at_3_max value: 42.637962482588875 - type: nauc_precision_at_3_std value: 37.42465077352342 - type: nauc_precision_at_5_diff1 value: 7.233177203405101 - type: nauc_precision_at_5_max value: 43.1663582897407 - type: nauc_precision_at_5_std value: 38.848449220750055 - type: nauc_recall_at_1000_diff1 value: 27.33938551969145 - type: nauc_recall_at_1000_max value: 45.5614254479334 - type: nauc_recall_at_1000_std value: 50.58528916250458 - type: nauc_recall_at_100_diff1 value: 23.610383761920097 - type: nauc_recall_at_100_max value: 31.422168485847184 - type: nauc_recall_at_100_std value: 25.58649926458304 - type: nauc_recall_at_10_diff1 value: 14.62495111808408 - type: nauc_recall_at_10_max value: 7.4295041277681095 - type: nauc_recall_at_10_std value: -9.32297089600654 - type: nauc_recall_at_1_diff1 value: 24.014897777973694 - type: nauc_recall_at_1_max value: -4.556638938723358 - type: nauc_recall_at_1_std value: -22.7844467526989 - type: nauc_recall_at_20_diff1 value: 14.027862330014662 - type: nauc_recall_at_20_max value: 12.437478731690844 - type: nauc_recall_at_20_std value: -3.0740743798103676 - type: nauc_recall_at_3_diff1 value: 16.354018356566712 - type: nauc_recall_at_3_max value: -2.9812231240997917 - type: nauc_recall_at_3_std value: -18.27746460743442 - type: nauc_recall_at_5_diff1 value: 16.81486583473587 - type: nauc_recall_at_5_max value: 2.420128513974744 - type: nauc_recall_at_5_std value: -14.441820321214108 - type: ndcg_at_1 value: 63.87500000000001 - type: ndcg_at_10 value: 49.884 - type: ndcg_at_100 value: 54.738 - type: ndcg_at_1000 value: 61.635 - type: ndcg_at_20 value: 48.894999999999996 - type: ndcg_at_3 value: 54.287 - type: ndcg_at_5 value: 52.40899999999999 - type: precision_at_1 value: 75.75 - type: precision_at_10 value: 40.9 - type: precision_at_100 value: 13.139999999999999 - type: precision_at_1000 value: 2.533 - type: precision_at_20 value: 30.8 - type: precision_at_3 value: 57.667 - type: precision_at_5 value: 51.05 - type: recall_at_1 value: 9.913 - type: recall_at_10 value: 28.591 - type: recall_at_100 value: 61.017999999999994 - type: recall_at_1000 value: 83.383 - type: recall_at_20 value: 37.834 - type: recall_at_3 value: 17.049 - type: recall_at_5 value: 21.685 - task: type: Classification dataset: name: MTEB EmotionClassification type: mteb/emotion config: default split: test revision: 4f58c6b202a23cf9a4da393831edf4f9183cad37 metrics: - type: accuracy value: 78.77499999999999 - type: f1 value: 73.74058240799386 - type: f1_weighted value: 79.78804377638227 - type: main_score value: 78.77499999999999 - task: type: Retrieval dataset: name: MTEB FEVER type: mteb/fever config: default split: test revision: bea83ef9e8fb933d90a2f1d5515737465d613e12 metrics: - type: main_score value: 90.986 - type: map_at_1 value: 81.601 - type: map_at_10 value: 88.242 - type: map_at_100 value: 88.46000000000001 - type: map_at_1000 value: 88.472 - type: map_at_20 value: 88.375 - type: map_at_3 value: 87.237 - type: map_at_5 value: 87.85300000000001 - type: mrr_at_1 value: 87.81878187818782 - type: mrr_at_10 value: 92.20301196786335 - type: mrr_at_100 value: 92.24884236673292 - type: mrr_at_1000 value: 92.2496338899362 - type: mrr_at_20 value: 92.23112073283473 - type: mrr_at_3 value: 91.77417741774165 - type: mrr_at_5 value: 92.03970397039689 - type: nauc_map_at_1000_diff1 value: 56.54670664910505 - type: nauc_map_at_1000_max value: 33.08375749975477 - type: nauc_map_at_1000_std value: 2.7491595418252865 - type: nauc_map_at_100_diff1 value: 56.50887688686924 - type: nauc_map_at_100_max value: 33.075487189958494 - type: nauc_map_at_100_std value: 2.7675869969253375 - type: nauc_map_at_10_diff1 value: 56.08080806610569 - type: nauc_map_at_10_max value: 32.776972098819066 - type: nauc_map_at_10_std value: 2.5904846711290097 - type: nauc_map_at_1_diff1 value: 60.645344065853145 - type: nauc_map_at_1_max value: 31.232776777514797 - type: nauc_map_at_1_std value: -1.1946138176109171 - type: nauc_map_at_20_diff1 value: 56.28378454162355 - type: nauc_map_at_20_max value: 32.98207150385811 - type: nauc_map_at_20_std value: 2.8469814040214025 - type: nauc_map_at_3_diff1 value: 55.81958007095375 - type: nauc_map_at_3_max value: 31.602707711038313 - type: nauc_map_at_3_std value: 0.8117019292273401 - type: nauc_map_at_5_diff1 value: 55.706025752316535 - type: nauc_map_at_5_max value: 32.16032683604737 - type: nauc_map_at_5_std value: 1.8853201503498669 - type: nauc_mrr_at_1000_diff1 value: 75.4997173366251 - type: nauc_mrr_at_1000_max value: 41.49117135484116 - type: nauc_mrr_at_1000_std value: -2.0636172883680852 - type: nauc_mrr_at_100_diff1 value: 75.50118860648519 - type: nauc_mrr_at_100_max value: 41.49490161517194 - type: nauc_mrr_at_100_std value: -2.057024385178682 - type: nauc_mrr_at_10_diff1 value: 75.47295153099428 - type: nauc_mrr_at_10_max value: 41.55003304042536 - type: nauc_mrr_at_10_std value: -2.0353663198929253 - type: nauc_mrr_at_1_diff1 value: 76.632058433229 - type: nauc_mrr_at_1_max value: 39.754483718891656 - type: nauc_mrr_at_1_std value: -2.962241058101701 - type: nauc_mrr_at_20_diff1 value: 75.47221882396194 - type: nauc_mrr_at_20_max value: 41.50779280480839 - type: nauc_mrr_at_20_std value: -1.9620212266426307 - type: nauc_mrr_at_3_diff1 value: 75.5682297897137 - type: nauc_mrr_at_3_max value: 41.53543801506081 - type: nauc_mrr_at_3_std value: -3.391681195945978 - type: nauc_mrr_at_5_diff1 value: 75.37562775183947 - type: nauc_mrr_at_5_max value: 41.42028509006753 - type: nauc_mrr_at_5_std value: -2.418698675622726 - type: nauc_ndcg_at_1000_diff1 value: 59.364557011624 - type: nauc_ndcg_at_1000_max value: 35.4112238125149 - type: nauc_ndcg_at_1000_std value: 3.717516193303376 - type: nauc_ndcg_at_100_diff1 value: 58.55706703023122 - type: nauc_ndcg_at_100_max value: 35.352285999934594 - type: nauc_ndcg_at_100_std value: 4.273437944266781 - type: nauc_ndcg_at_10_diff1 value: 56.77422701267037 - type: nauc_ndcg_at_10_max value: 34.24909893882957 - type: nauc_ndcg_at_10_std value: 4.178151434006727 - type: nauc_ndcg_at_1_diff1 value: 76.632058433229 - type: nauc_ndcg_at_1_max value: 39.754483718891656 - type: nauc_ndcg_at_1_std value: -2.962241058101701 - type: nauc_ndcg_at_20_diff1 value: 57.27343398231262 - type: nauc_ndcg_at_20_max value: 34.7416626740278 - type: nauc_ndcg_at_20_std value: 4.955858766014002 - type: nauc_ndcg_at_3_diff1 value: 57.69267803121093 - type: nauc_ndcg_at_3_max value: 33.13744317023105 - type: nauc_ndcg_at_3_std value: 0.40380284030057023 - type: nauc_ndcg_at_5_diff1 value: 56.57461019113917 - type: nauc_ndcg_at_5_max value: 33.244657840804386 - type: nauc_ndcg_at_5_std value: 2.5121440827702046 - type: nauc_precision_at_1000_diff1 value: -14.54492513449718 - type: nauc_precision_at_1000_max value: -5.94552147573623 - type: nauc_precision_at_1000_std value: 1.2446209816057374 - type: nauc_precision_at_100_diff1 value: -15.452676132568344 - type: nauc_precision_at_100_max value: -3.760241749847617 - type: nauc_precision_at_100_std value: 4.623534605290865 - type: nauc_precision_at_10_diff1 value: -12.712908026086176 - type: nauc_precision_at_10_max value: 0.45241316994816805 - type: nauc_precision_at_10_std value: 7.849478570138391 - type: nauc_precision_at_1_diff1 value: 76.632058433229 - type: nauc_precision_at_1_max value: 39.754483718891656 - type: nauc_precision_at_1_std value: -2.962241058101701 - type: nauc_precision_at_20_diff1 value: -14.514618673172041 - type: nauc_precision_at_20_max value: -1.113635490621818 - type: nauc_precision_at_20_std value: 8.599811730457576 - type: nauc_precision_at_3_diff1 value: 6.1367799850003815 - type: nauc_precision_at_3_max value: 8.466271950897857 - type: nauc_precision_at_3_std value: 1.7458051543195068 - type: nauc_precision_at_5_diff1 value: -5.804548945783379 - type: nauc_precision_at_5_max value: 3.4060251839074818 - type: nauc_precision_at_5_std value: 5.583410511782371 - type: nauc_recall_at_1000_diff1 value: 19.329432953574095 - type: nauc_recall_at_1000_max value: 43.260442595158736 - type: nauc_recall_at_1000_std value: 53.89644660661804 - type: nauc_recall_at_100_diff1 value: 21.265326296051235 - type: nauc_recall_at_100_max value: 38.573000195373695 - type: nauc_recall_at_100_std value: 42.169391082152785 - type: nauc_recall_at_10_diff1 value: 29.785129558987432 - type: nauc_recall_at_10_max value: 28.379657867558034 - type: nauc_recall_at_10_std value: 21.132574624091973 - type: nauc_recall_at_1_diff1 value: 60.645344065853145 - type: nauc_recall_at_1_max value: 31.232776777514797 - type: nauc_recall_at_1_std value: -1.1946138176109171 - type: nauc_recall_at_20_diff1 value: 25.88845612373954 - type: nauc_recall_at_20_max value: 30.24785945821152 - type: nauc_recall_at_20_std value: 31.73911437468067 - type: nauc_recall_at_3_diff1 value: 42.2968464797395 - type: nauc_recall_at_3_max value: 26.494318009870018 - type: nauc_recall_at_3_std value: 2.6045977160467544 - type: nauc_recall_at_5_diff1 value: 35.81340094401374 - type: nauc_recall_at_5_max value: 25.91082947510634 - type: nauc_recall_at_5_std value: 9.759404930864779 - type: ndcg_at_1 value: 87.819 - type: ndcg_at_10 value: 90.986 - type: ndcg_at_100 value: 91.69 - type: ndcg_at_1000 value: 91.863 - type: ndcg_at_20 value: 91.293 - type: ndcg_at_3 value: 89.621 - type: ndcg_at_5 value: 90.333 - type: precision_at_1 value: 87.819 - type: precision_at_10 value: 10.753 - type: precision_at_100 value: 1.138 - type: precision_at_1000 value: 0.117 - type: precision_at_20 value: 5.4879999999999995 - type: precision_at_3 value: 33.703 - type: precision_at_5 value: 20.831 - type: recall_at_1 value: 81.601 - type: recall_at_10 value: 95.44200000000001 - type: recall_at_100 value: 98.14399999999999 - type: recall_at_1000 value: 99.157 - type: recall_at_20 value: 96.43 - type: recall_at_3 value: 91.729 - type: recall_at_5 value: 93.552 - task: type: Retrieval dataset: name: MTEB FiQA2018 type: mteb/fiqa config: default split: test revision: 27a168819829fe9bcd655c2df245fb19452e8e06 metrics: - type: main_score value: 56.056 - type: map_at_1 value: 28.666000000000004 - type: map_at_10 value: 47.437000000000005 - type: map_at_100 value: 49.537 - type: map_at_1000 value: 49.665 - type: map_at_20 value: 48.618 - type: map_at_3 value: 41.355 - type: map_at_5 value: 44.525 - type: mrr_at_1 value: 55.55555555555556 - type: mrr_at_10 value: 63.705173427395614 - type: mrr_at_100 value: 64.25449940779741 - type: mrr_at_1000 value: 64.27635581092147 - type: mrr_at_20 value: 64.03796029079103 - type: mrr_at_3 value: 61.49691358024688 - type: mrr_at_5 value: 62.73148148148143 - type: nauc_map_at_1000_diff1 value: 43.24282910397747 - type: nauc_map_at_1000_max value: 28.506093180265644 - type: nauc_map_at_1000_std value: -13.040508386155054 - type: nauc_map_at_100_diff1 value: 43.23650442904607 - type: nauc_map_at_100_max value: 28.470565635459156 - type: nauc_map_at_100_std value: -12.988098780714935 - type: nauc_map_at_10_diff1 value: 43.393840733087686 - type: nauc_map_at_10_max value: 26.637302062720153 - type: nauc_map_at_10_std value: -14.47500292113762 - type: nauc_map_at_1_diff1 value: 47.705150227211725 - type: nauc_map_at_1_max value: 15.354189686550129 - type: nauc_map_at_1_std value: -14.559819859039067 - type: nauc_map_at_20_diff1 value: 43.14121075706104 - type: nauc_map_at_20_max value: 27.811170590408395 - type: nauc_map_at_20_std value: -13.459413585283583 - type: nauc_map_at_3_diff1 value: 44.33938667720801 - type: nauc_map_at_3_max value: 21.785619884549398 - type: nauc_map_at_3_std value: -15.569980103071593 - type: nauc_map_at_5_diff1 value: 43.39280905665027 - type: nauc_map_at_5_max value: 25.021492190645017 - type: nauc_map_at_5_std value: -14.48856622187443 - type: nauc_mrr_at_1000_diff1 value: 52.971563939946286 - type: nauc_mrr_at_1000_max value: 38.88019486172324 - type: nauc_mrr_at_1000_std value: -12.412991642381616 - type: nauc_mrr_at_100_diff1 value: 52.978468139876945 - type: nauc_mrr_at_100_max value: 38.89751787948751 - type: nauc_mrr_at_100_std value: -12.3677876252269 - type: nauc_mrr_at_10_diff1 value: 52.78507148048174 - type: nauc_mrr_at_10_max value: 38.55079809310022 - type: nauc_mrr_at_10_std value: -12.944127025078755 - type: nauc_mrr_at_1_diff1 value: 55.52626805861546 - type: nauc_mrr_at_1_max value: 40.49306809164979 - type: nauc_mrr_at_1_std value: -12.886607701317681 - type: nauc_mrr_at_20_diff1 value: 52.9592152665678 - type: nauc_mrr_at_20_max value: 38.88514014589964 - type: nauc_mrr_at_20_std value: -12.434464359819444 - type: nauc_mrr_at_3_diff1 value: 52.73696844091174 - type: nauc_mrr_at_3_max value: 38.61018727252859 - type: nauc_mrr_at_3_std value: -13.123989867364166 - type: nauc_mrr_at_5_diff1 value: 53.037110010188 - type: nauc_mrr_at_5_max value: 38.44770729849151 - type: nauc_mrr_at_5_std value: -13.49318771828972 - type: nauc_ndcg_at_1000_diff1 value: 44.73813840091289 - type: nauc_ndcg_at_1000_max value: 33.70113904685389 - type: nauc_ndcg_at_1000_std value: -10.328687058192742 - type: nauc_ndcg_at_100_diff1 value: 44.595174119928835 - type: nauc_ndcg_at_100_max value: 33.4788285112467 - type: nauc_ndcg_at_100_std value: -8.695355259716946 - type: nauc_ndcg_at_10_diff1 value: 44.39837225263 - type: nauc_ndcg_at_10_max value: 29.188289725593393 - type: nauc_ndcg_at_10_std value: -13.67608323673103 - type: nauc_ndcg_at_1_diff1 value: 55.52626805861546 - type: nauc_ndcg_at_1_max value: 40.49306809164979 - type: nauc_ndcg_at_1_std value: -12.886607701317681 - type: nauc_ndcg_at_20_diff1 value: 44.24661739902305 - type: nauc_ndcg_at_20_max value: 31.667868318249965 - type: nauc_ndcg_at_20_std value: -10.65470780066342 - type: nauc_ndcg_at_3_diff1 value: 43.39857166975522 - type: nauc_ndcg_at_3_max value: 31.764668313577495 - type: nauc_ndcg_at_3_std value: -14.494866954678152 - type: nauc_ndcg_at_5_diff1 value: 43.16976647347281 - type: nauc_ndcg_at_5_max value: 29.878329062643143 - type: nauc_ndcg_at_5_std value: -13.987689089179739 - type: nauc_precision_at_1000_diff1 value: -9.807973252625484 - type: nauc_precision_at_1000_max value: 26.6279603849494 - type: nauc_precision_at_1000_std value: 7.113187103520632 - type: nauc_precision_at_100_diff1 value: -4.777149603323976 - type: nauc_precision_at_100_max value: 31.03410463692187 - type: nauc_precision_at_100_std value: 10.463144150275435 - type: nauc_precision_at_10_diff1 value: 8.691528703215962 - type: nauc_precision_at_10_max value: 33.329579434123374 - type: nauc_precision_at_10_std value: -0.8002015226329403 - type: nauc_precision_at_1_diff1 value: 55.52626805861546 - type: nauc_precision_at_1_max value: 40.49306809164979 - type: nauc_precision_at_1_std value: -12.886607701317681 - type: nauc_precision_at_20_diff1 value: 3.4564653474184284 - type: nauc_precision_at_20_max value: 34.401070158471136 - type: nauc_precision_at_20_std value: 5.813431200164549 - type: nauc_precision_at_3_diff1 value: 22.463219705462187 - type: nauc_precision_at_3_max value: 34.77413976546924 - type: nauc_precision_at_3_std value: -7.083890789741479 - type: nauc_precision_at_5_diff1 value: 14.011006004883154 - type: nauc_precision_at_5_max value: 35.73655466853702 - type: nauc_precision_at_5_std value: -2.8395172077771598 - type: nauc_recall_at_1000_diff1 value: 16.478046357391555 - type: nauc_recall_at_1000_max value: 43.231704288282344 - type: nauc_recall_at_1000_std value: 38.430684937573645 - type: nauc_recall_at_100_diff1 value: 30.764718344602436 - type: nauc_recall_at_100_max value: 31.769050487166655 - type: nauc_recall_at_100_std value: 23.48468311677149 - type: nauc_recall_at_10_diff1 value: 34.47339565324045 - type: nauc_recall_at_10_max value: 19.054212335800454 - type: nauc_recall_at_10_std value: -11.039734015330437 - type: nauc_recall_at_1_diff1 value: 47.705150227211725 - type: nauc_recall_at_1_max value: 15.354189686550129 - type: nauc_recall_at_1_std value: -14.559819859039067 - type: nauc_recall_at_20_diff1 value: 32.1011474016873 - type: nauc_recall_at_20_max value: 25.546372988304423 - type: nauc_recall_at_20_std value: -0.007233471152482897 - type: nauc_recall_at_3_diff1 value: 37.5708138019065 - type: nauc_recall_at_3_max value: 16.66410785756736 - type: nauc_recall_at_3_std value: -15.404817020108966 - type: nauc_recall_at_5_diff1 value: 35.714519648479595 - type: nauc_recall_at_5_max value: 19.02075233009296 - type: nauc_recall_at_5_std value: -13.180963359760725 - type: ndcg_at_1 value: 55.556000000000004 - type: ndcg_at_10 value: 56.056 - type: ndcg_at_100 value: 62.44 - type: ndcg_at_1000 value: 64.263 - type: ndcg_at_20 value: 58.638999999999996 - type: ndcg_at_3 value: 51.722 - type: ndcg_at_5 value: 52.701 - type: precision_at_1 value: 55.556000000000004 - type: precision_at_10 value: 15.679000000000002 - type: precision_at_100 value: 2.252 - type: precision_at_1000 value: 0.257 - type: precision_at_20 value: 9.02 - type: precision_at_3 value: 34.619 - type: precision_at_5 value: 25.093 - type: recall_at_1 value: 28.666000000000004 - type: recall_at_10 value: 63.717999999999996 - type: recall_at_100 value: 86.938 - type: recall_at_1000 value: 97.603 - type: recall_at_20 value: 71.649 - type: recall_at_3 value: 46.663 - type: recall_at_5 value: 53.313 - task: type: Retrieval dataset: name: MTEB HotpotQA type: mteb/hotpotqa config: default split: test revision: ab518f4d6fcca38d87c25209f94beba119d02014 metrics: - type: main_score value: 71.74199999999999 - type: map_at_1 value: 41.729 - type: map_at_10 value: 63.168 - type: map_at_100 value: 64.132 - type: map_at_1000 value: 64.199 - type: map_at_20 value: 63.736000000000004 - type: map_at_3 value: 59.826 - type: map_at_5 value: 61.882000000000005 - type: mrr_at_1 value: 83.45712356515868 - type: mrr_at_10 value: 87.850342432719 - type: mrr_at_100 value: 88.0016320691113 - type: mrr_at_1000 value: 88.00576596968136 - type: mrr_at_20 value: 87.94463253190389 - type: mrr_at_3 value: 87.13706954760278 - type: mrr_at_5 value: 87.59419311276136 - type: nauc_map_at_1000_diff1 value: 13.635446621095054 - type: nauc_map_at_1000_max value: 18.670632529445633 - type: nauc_map_at_1000_std value: 10.444842636150575 - type: nauc_map_at_100_diff1 value: 13.599262398010783 - type: nauc_map_at_100_max value: 18.636389405484806 - type: nauc_map_at_100_std value: 10.460027483576043 - type: nauc_map_at_10_diff1 value: 13.235053919323942 - type: nauc_map_at_10_max value: 18.252140477080047 - type: nauc_map_at_10_std value: 9.9075337042203 - type: nauc_map_at_1_diff1 value: 76.51940497836482 - type: nauc_map_at_1_max value: 51.251419487235474 - type: nauc_map_at_1_std value: 0.16714896857146574 - type: nauc_map_at_20_diff1 value: 13.4178245722222 - type: nauc_map_at_20_max value: 18.40988771210718 - type: nauc_map_at_20_std value: 10.216685163366282 - type: nauc_map_at_3_diff1 value: 13.38370761663418 - type: nauc_map_at_3_max value: 17.760962555456537 - type: nauc_map_at_3_std value: 7.15741965624388 - type: nauc_map_at_5_diff1 value: 13.138133309724855 - type: nauc_map_at_5_max value: 17.871761295251044 - type: nauc_map_at_5_std value: 8.475147426940074 - type: nauc_mrr_at_1000_diff1 value: 75.82650818891959 - type: nauc_mrr_at_1000_max value: 53.6736100668434 - type: nauc_mrr_at_1000_std value: 1.8025016349213916 - type: nauc_mrr_at_100_diff1 value: 75.82530574210111 - type: nauc_mrr_at_100_max value: 53.68067545829002 - type: nauc_mrr_at_100_std value: 1.8147470536495791 - type: nauc_mrr_at_10_diff1 value: 75.8330135686799 - type: nauc_mrr_at_10_max value: 53.78626885349077 - type: nauc_mrr_at_10_std value: 1.7975782717226636 - type: nauc_mrr_at_1_diff1 value: 76.51940497836482 - type: nauc_mrr_at_1_max value: 51.251419487235474 - type: nauc_mrr_at_1_std value: 0.16714896857146574 - type: nauc_mrr_at_20_diff1 value: 75.82783382464166 - type: nauc_mrr_at_20_max value: 53.68364567043885 - type: nauc_mrr_at_20_std value: 1.742037904463963 - type: nauc_mrr_at_3_diff1 value: 75.6944609768663 - type: nauc_mrr_at_3_max value: 53.803941340341666 - type: nauc_mrr_at_3_std value: 1.1849945458077804 - type: nauc_mrr_at_5_diff1 value: 75.73006960604903 - type: nauc_mrr_at_5_max value: 53.62223096420106 - type: nauc_mrr_at_5_std value: 1.6144067563410909 - type: nauc_ndcg_at_1000_diff1 value: 21.58025241642726 - type: nauc_ndcg_at_1000_max value: 24.675747527001153 - type: nauc_ndcg_at_1000_std value: 13.075943547492718 - type: nauc_ndcg_at_100_diff1 value: 20.30260137544846 - type: nauc_ndcg_at_100_max value: 23.757528813872018 - type: nauc_ndcg_at_100_std value: 13.648994687574062 - type: nauc_ndcg_at_10_diff1 value: 18.995052360997818 - type: nauc_ndcg_at_10_max value: 22.254260808196037 - type: nauc_ndcg_at_10_std value: 11.27212390633054 - type: nauc_ndcg_at_1_diff1 value: 76.51940497836482 - type: nauc_ndcg_at_1_max value: 51.251419487235474 - type: nauc_ndcg_at_1_std value: 0.16714896857146574 - type: nauc_ndcg_at_20_diff1 value: 19.333742380695757 - type: nauc_ndcg_at_20_max value: 22.527779834633364 - type: nauc_ndcg_at_20_std value: 12.161009000707917 - type: nauc_ndcg_at_3_diff1 value: 20.013329040965534 - type: nauc_ndcg_at_3_max value: 21.99692460311921 - type: nauc_ndcg_at_3_std value: 6.8076290638386165 - type: nauc_ndcg_at_5_diff1 value: 19.08226315942471 - type: nauc_ndcg_at_5_max value: 21.71185964294168 - type: nauc_ndcg_at_5_std value: 8.671911269518214 - type: nauc_precision_at_1000_diff1 value: 2.4462475489446764 - type: nauc_precision_at_1000_max value: 29.145662064268578 - type: nauc_precision_at_1000_std value: 49.20704909525856 - type: nauc_precision_at_100_diff1 value: 0.11271196725540299 - type: nauc_precision_at_100_max value: 17.37584606388067 - type: nauc_precision_at_100_std value: 34.66099346244071 - type: nauc_precision_at_10_diff1 value: 2.9923183951227825 - type: nauc_precision_at_10_max value: 14.261884731124264 - type: nauc_precision_at_10_std value: 18.084188795498378 - type: nauc_precision_at_1_diff1 value: 76.51940497836482 - type: nauc_precision_at_1_max value: 51.251419487235474 - type: nauc_precision_at_1_std value: 0.16714896857146574 - type: nauc_precision_at_20_diff1 value: 1.9180293008303761 - type: nauc_precision_at_20_max value: 13.832269193468512 - type: nauc_precision_at_20_std value: 21.65284406055607 - type: nauc_precision_at_3_diff1 value: 7.226609484731811 - type: nauc_precision_at_3_max value: 15.162908526977272 - type: nauc_precision_at_3_std value: 8.451859972962776 - type: nauc_precision_at_5_diff1 value: 4.705236845538159 - type: nauc_precision_at_5_max value: 14.022910843582666 - type: nauc_precision_at_5_std value: 11.777269322821605 - type: nauc_recall_at_1000_diff1 value: 2.446247548945172 - type: nauc_recall_at_1000_max value: 29.14566206426889 - type: nauc_recall_at_1000_std value: 49.20704909525879 - type: nauc_recall_at_100_diff1 value: 0.1127119672553316 - type: nauc_recall_at_100_max value: 17.37584606388062 - type: nauc_recall_at_100_std value: 34.660993462440686 - type: nauc_recall_at_10_diff1 value: 2.9923183951227927 - type: nauc_recall_at_10_max value: 14.261884731124299 - type: nauc_recall_at_10_std value: 18.08418879549837 - type: nauc_recall_at_1_diff1 value: 76.51940497836482 - type: nauc_recall_at_1_max value: 51.251419487235474 - type: nauc_recall_at_1_std value: 0.16714896857146574 - type: nauc_recall_at_20_diff1 value: 1.918029300830432 - type: nauc_recall_at_20_max value: 13.832269193468566 - type: nauc_recall_at_20_std value: 21.65284406055605 - type: nauc_recall_at_3_diff1 value: 7.226609484731802 - type: nauc_recall_at_3_max value: 15.162908526977182 - type: nauc_recall_at_3_std value: 8.451859972962634 - type: nauc_recall_at_5_diff1 value: 4.705236845538197 - type: nauc_recall_at_5_max value: 14.02291084358265 - type: nauc_recall_at_5_std value: 11.777269322821638 - type: ndcg_at_1 value: 83.45700000000001 - type: ndcg_at_10 value: 71.74199999999999 - type: ndcg_at_100 value: 75.008 - type: ndcg_at_1000 value: 76.242 - type: ndcg_at_20 value: 73.114 - type: ndcg_at_3 value: 67.128 - type: ndcg_at_5 value: 69.645 - type: precision_at_1 value: 83.45700000000001 - type: precision_at_10 value: 14.747 - type: precision_at_100 value: 1.73 - type: precision_at_1000 value: 0.189 - type: precision_at_20 value: 7.8149999999999995 - type: precision_at_3 value: 42.323 - type: precision_at_5 value: 27.381 - type: recall_at_1 value: 41.729 - type: recall_at_10 value: 73.734 - type: recall_at_100 value: 86.502 - type: recall_at_1000 value: 94.60499999999999 - type: recall_at_20 value: 78.14999999999999 - type: recall_at_3 value: 63.483999999999995 - type: recall_at_5 value: 68.45400000000001 - task: type: Classification dataset: name: MTEB ImdbClassification type: mteb/imdb config: default split: test revision: 3d86128a09e091d6018b6d26cad27f2739fc2db7 metrics: - type: accuracy value: 96.4904 - type: ap value: 94.85481918794709 - type: ap_weighted value: 94.85481918794709 - type: f1 value: 96.4898592305707 - type: f1_weighted value: 96.4898592305707 - type: main_score value: 96.4904 - task: type: Retrieval dataset: name: MTEB MSMARCO type: mteb/msmarco config: default split: dev revision: c5a29a104738b98a9e76336939199e264163d4a0 metrics: - type: main_score value: 43.692 - type: map_at_1 value: 23.751 - type: map_at_10 value: 36.553999999999995 - type: map_at_100 value: 37.721 - type: map_at_1000 value: 37.763999999999996 - type: map_at_20 value: 37.289 - type: map_at_3 value: 32.643 - type: map_at_5 value: 34.851 - type: mrr_at_1 value: 24.455587392550143 - type: mrr_at_10 value: 37.18388706963206 - type: mrr_at_100 value: 38.28330737932916 - type: mrr_at_1000 value: 38.32054399710817 - type: mrr_at_20 value: 37.8818001216278 - type: mrr_at_3 value: 33.35721107927405 - type: mrr_at_5 value: 35.52483285577843 - type: nauc_map_at_1000_diff1 value: 36.3576177260684 - type: nauc_map_at_1000_max value: 7.854511605962703 - type: nauc_map_at_1000_std value: -17.701121059746878 - type: nauc_map_at_100_diff1 value: 36.356075649230505 - type: nauc_map_at_100_max value: 7.862168042999533 - type: nauc_map_at_100_std value: -17.670102459097233 - type: nauc_map_at_10_diff1 value: 36.22122978875574 - type: nauc_map_at_10_max value: 7.80848606967416 - type: nauc_map_at_10_std value: -18.3265151386167 - type: nauc_map_at_1_diff1 value: 39.28605466408357 - type: nauc_map_at_1_max value: 6.20202977590459 - type: nauc_map_at_1_std value: -15.734334090045026 - type: nauc_map_at_20_diff1 value: 36.33637880909657 - type: nauc_map_at_20_max value: 7.843437969476022 - type: nauc_map_at_20_std value: -17.917533363025996 - type: nauc_map_at_3_diff1 value: 36.24864976076741 - type: nauc_map_at_3_max value: 7.420345251835957 - type: nauc_map_at_3_std value: -18.71678497722944 - type: nauc_map_at_5_diff1 value: 36.0789619291824 - type: nauc_map_at_5_max value: 7.7314285669514495 - type: nauc_map_at_5_std value: -18.748688764538706 - type: nauc_mrr_at_1000_diff1 value: 36.23912675623378 - type: nauc_mrr_at_1000_max value: 7.690553436255147 - type: nauc_mrr_at_1000_std value: -17.609526070212304 - type: nauc_mrr_at_100_diff1 value: 36.23782651189002 - type: nauc_mrr_at_100_max value: 7.70075095171647 - type: nauc_mrr_at_100_std value: -17.575714144960184 - type: nauc_mrr_at_10_diff1 value: 36.125229472534215 - type: nauc_mrr_at_10_max value: 7.635472248755658 - type: nauc_mrr_at_10_std value: -18.208166616511086 - type: nauc_mrr_at_1_diff1 value: 39.20986875554532 - type: nauc_mrr_at_1_max value: 6.062668487561363 - type: nauc_mrr_at_1_std value: -16.04130340817602 - type: nauc_mrr_at_20_diff1 value: 36.21207088739667 - type: nauc_mrr_at_20_max value: 7.699610250145951 - type: nauc_mrr_at_20_std value: -17.778245221724028 - type: nauc_mrr_at_3_diff1 value: 36.03957583885305 - type: nauc_mrr_at_3_max value: 7.225515576504581 - type: nauc_mrr_at_3_std value: -18.74478742943741 - type: nauc_mrr_at_5_diff1 value: 35.969152496648974 - type: nauc_mrr_at_5_max value: 7.584059789018233 - type: nauc_mrr_at_5_std value: -18.569374723129332 - type: nauc_ndcg_at_1000_diff1 value: 35.894655529841806 - type: nauc_ndcg_at_1000_max value: 8.579327424366236 - type: nauc_ndcg_at_1000_std value: -16.359677367747896 - type: nauc_ndcg_at_100_diff1 value: 35.89861902483983 - type: nauc_ndcg_at_100_max value: 8.830873623962242 - type: nauc_ndcg_at_100_std value: -15.173125564722978 - type: nauc_ndcg_at_10_diff1 value: 35.36499811105169 - type: nauc_ndcg_at_10_max value: 8.449267180956992 - type: nauc_ndcg_at_10_std value: -18.41978802362402 - type: nauc_ndcg_at_1_diff1 value: 39.15422481210622 - type: nauc_ndcg_at_1_max value: 6.055515791928331 - type: nauc_ndcg_at_1_std value: -16.042779610876252 - type: nauc_ndcg_at_20_diff1 value: 35.73402868264468 - type: nauc_ndcg_at_20_max value: 8.695705518210847 - type: nauc_ndcg_at_20_std value: -16.7735829470466 - type: nauc_ndcg_at_3_diff1 value: 35.31358242856231 - type: nauc_ndcg_at_3_max value: 7.645692789058997 - type: nauc_ndcg_at_3_std value: -19.460003734786874 - type: nauc_ndcg_at_5_diff1 value: 35.05216588927143 - type: nauc_ndcg_at_5_max value: 8.216690520604715 - type: nauc_ndcg_at_5_std value: -19.3982054492159 - type: nauc_precision_at_1000_diff1 value: -4.440002625111349 - type: nauc_precision_at_1000_max value: 7.886988951901723 - type: nauc_precision_at_1000_std value: 9.88111187048247 - type: nauc_precision_at_100_diff1 value: 15.728286119463325 - type: nauc_precision_at_100_max value: 13.218650824470654 - type: nauc_precision_at_100_std value: 16.113245895522553 - type: nauc_precision_at_10_diff1 value: 29.51218489610567 - type: nauc_precision_at_10_max value: 10.197432401942912 - type: nauc_precision_at_10_std value: -16.950603431359493 - type: nauc_precision_at_1_diff1 value: 39.15422481210622 - type: nauc_precision_at_1_max value: 6.055515791928331 - type: nauc_precision_at_1_std value: -16.042779610876252 - type: nauc_precision_at_20_diff1 value: 27.825993070397338 - type: nauc_precision_at_20_max value: 11.437632287846007 - type: nauc_precision_at_20_std value: -7.450353566405601 - type: nauc_precision_at_3_diff1 value: 32.14135556796588 - type: nauc_precision_at_3_max value: 7.989252443574163 - type: nauc_precision_at_3_std value: -21.566254595671055 - type: nauc_precision_at_5_diff1 value: 30.68778685307082 - type: nauc_precision_at_5_max value: 9.332160758499892 - type: nauc_precision_at_5_std value: -20.928554713448914 - type: nauc_recall_at_1000_diff1 value: 25.00810478716878 - type: nauc_recall_at_1000_max value: 46.518165765201644 - type: nauc_recall_at_1000_std value: 61.4734635576085 - type: nauc_recall_at_100_diff1 value: 33.895581318261726 - type: nauc_recall_at_100_max value: 20.10706035872801 - type: nauc_recall_at_100_std value: 24.204226584457047 - type: nauc_recall_at_10_diff1 value: 32.363127359576296 - type: nauc_recall_at_10_max value: 10.729923804989545 - type: nauc_recall_at_10_std value: -18.1335370184202 - type: nauc_recall_at_1_diff1 value: 39.28605466408357 - type: nauc_recall_at_1_max value: 6.20202977590459 - type: nauc_recall_at_1_std value: -15.734334090045026 - type: nauc_recall_at_20_diff1 value: 33.47804003169795 - type: nauc_recall_at_20_max value: 12.781494765263382 - type: nauc_recall_at_20_std value: -9.263970132202658 - type: nauc_recall_at_3_diff1 value: 32.71001429428999 - type: nauc_recall_at_3_max value: 8.353439197382693 - type: nauc_recall_at_3_std value: -21.235097744366954 - type: nauc_recall_at_5_diff1 value: 31.87451464963415 - type: nauc_recall_at_5_max value: 9.635051450907305 - type: nauc_recall_at_5_std value: -21.113235357132794 - type: ndcg_at_1 value: 24.47 - type: ndcg_at_10 value: 43.692 - type: ndcg_at_100 value: 49.211 - type: ndcg_at_1000 value: 50.244 - type: ndcg_at_20 value: 46.278000000000006 - type: ndcg_at_3 value: 35.719 - type: ndcg_at_5 value: 39.652 - type: precision_at_1 value: 24.47 - type: precision_at_10 value: 6.857 - type: precision_at_100 value: 0.9610000000000001 - type: precision_at_1000 value: 0.105 - type: precision_at_20 value: 3.968 - type: precision_at_3 value: 15.181000000000001 - type: precision_at_5 value: 11.117 - type: recall_at_1 value: 23.751 - type: recall_at_10 value: 65.64 - type: recall_at_100 value: 90.967 - type: recall_at_1000 value: 98.738 - type: recall_at_20 value: 75.639 - type: recall_at_3 value: 43.927 - type: recall_at_5 value: 53.366 - task: type: Classification dataset: name: MTEB MTOPDomainClassification (en) type: mteb/mtop_domain config: en split: test revision: d80d48c1eb48d3562165c59d59d0034df9fff0bf metrics: - type: accuracy value: 98.82580939352485 - type: f1 value: 98.75201754333801 - type: f1_weighted value: 98.82795205108245 - type: main_score value: 98.82580939352485 - task: type: Classification dataset: name: MTEB MTOPIntentClassification (en) type: mteb/mtop_intent config: en split: test revision: ae001d0e6b1228650b7bd1c2c65fb50ad11a8aba metrics: - type: accuracy value: 92.29822161422709 - type: f1 value: 77.75210224871594 - type: f1_weighted value: 93.58661422540348 - type: main_score value: 92.29822161422709 - task: type: Classification dataset: name: MTEB MassiveIntentClassification (en) type: mteb/amazon_massive_intent config: en split: test revision: 4672e20407010da34463acc759c162ca9734bca6 metrics: - type: accuracy value: 85.17484868863484 - type: f1 value: 81.94484244487094 - type: f1_weighted value: 85.21022593423332 - type: main_score value: 85.17484868863484 - task: type: Classification dataset: name: MTEB MassiveScenarioClassification (en) type: mteb/amazon_massive_scenario config: en split: test revision: fad2c6e8459f9e1c45d9315f4953d921437d70f8 metrics: - type: accuracy value: 89.61667787491594 - type: f1 value: 89.02701927621264 - type: f1_weighted value: 89.56306982022801 - type: main_score value: 89.61667787491594 - task: type: Clustering dataset: name: MTEB MedrxivClusteringP2P type: mteb/medrxiv-clustering-p2p config: default split: test revision: e7a26af6f3ae46b30dde8737f02c07b1505bcc73 metrics: - type: main_score value: 46.318282423948574 - type: v_measure value: 46.318282423948574 - type: v_measure_std value: 0.9729055662461538 - task: type: Clustering dataset: name: MTEB MedrxivClusteringS2S type: mteb/medrxiv-clustering-s2s config: default split: test revision: 35191c8c0dca72d8ff3efcd72aa802307d469663 metrics: - type: main_score value: 44.29033625273981 - type: v_measure value: 44.29033625273981 - type: v_measure_std value: 1.0596383629128594 - task: type: Reranking dataset: name: MTEB MindSmallReranking type: mteb/mind_small config: default split: test revision: 59042f120c80e8afa9cdbb224f67076cec0fc9a7 metrics: - type: main_score value: 33.0526129239962 - type: map value: 33.0526129239962 - type: mrr value: 34.29260046890935 - type: nAUC_map_diff1 value: 12.579738077238032 - type: nAUC_map_max value: -20.936629344962 - type: nAUC_map_std value: -1.6096805784945216 - type: nAUC_mrr_diff1 value: 11.597584463580807 - type: nAUC_mrr_max value: -15.723702838537504 - type: nAUC_mrr_std value: 0.2719172965777737 - task: type: Retrieval dataset: name: MTEB NFCorpus type: mteb/nfcorpus config: default split: test revision: ec0fa4fe99da2ff19ca1214b7966684033a58814 metrics: - type: main_score value: 41.486000000000004 - type: map_at_1 value: 6.866 - type: map_at_10 value: 15.895999999999999 - type: map_at_100 value: 21.093 - type: map_at_1000 value: 23.067 - type: map_at_20 value: 18.125 - type: map_at_3 value: 11.421000000000001 - type: map_at_5 value: 13.415 - type: mrr_at_1 value: 52.63157894736842 - type: mrr_at_10 value: 61.486805248415166 - type: mrr_at_100 value: 62.08211009182091 - type: mrr_at_1000 value: 62.10828701365016 - type: mrr_at_20 value: 61.904411187915784 - type: mrr_at_3 value: 59.90712074303407 - type: mrr_at_5 value: 60.91331269349847 - type: nauc_map_at_1000_diff1 value: 25.484625278529403 - type: nauc_map_at_1000_max value: 31.206600396418853 - type: nauc_map_at_1000_std value: 15.569448072357156 - type: nauc_map_at_100_diff1 value: 27.636750226316764 - type: nauc_map_at_100_max value: 29.66992681250722 - type: nauc_map_at_100_std value: 10.570600484002671 - type: nauc_map_at_10_diff1 value: 32.76642525548697 - type: nauc_map_at_10_max value: 21.459225397237663 - type: nauc_map_at_10_std value: -3.546494734209264 - type: nauc_map_at_1_diff1 value: 48.8002894871328 - type: nauc_map_at_1_max value: 5.7236722609868815 - type: nauc_map_at_1_std value: -13.283554044471352 - type: nauc_map_at_20_diff1 value: 30.57169701502308 - type: nauc_map_at_20_max value: 25.79666139518404 - type: nauc_map_at_20_std value: 1.781732492989651 - type: nauc_map_at_3_diff1 value: 40.076315947201095 - type: nauc_map_at_3_max value: 12.862524429140054 - type: nauc_map_at_3_std value: -9.188349777126817 - type: nauc_map_at_5_diff1 value: 36.9918718052938 - type: nauc_map_at_5_max value: 16.74234374361876 - type: nauc_map_at_5_std value: -7.818523349307494 - type: nauc_mrr_at_1000_diff1 value: 26.88183002609805 - type: nauc_mrr_at_1000_max value: 47.10209348428658 - type: nauc_mrr_at_1000_std value: 32.067825924992924 - type: nauc_mrr_at_100_diff1 value: 26.871482491566745 - type: nauc_mrr_at_100_max value: 47.11303868498556 - type: nauc_mrr_at_100_std value: 32.08961428818868 - type: nauc_mrr_at_10_diff1 value: 26.6356914977722 - type: nauc_mrr_at_10_max value: 47.091624558810366 - type: nauc_mrr_at_10_std value: 31.942424120660164 - type: nauc_mrr_at_1_diff1 value: 28.19774198483673 - type: nauc_mrr_at_1_max value: 41.44380927834253 - type: nauc_mrr_at_1_std value: 25.18222691885917 - type: nauc_mrr_at_20_diff1 value: 26.86487347109452 - type: nauc_mrr_at_20_max value: 47.1987778214726 - type: nauc_mrr_at_20_std value: 32.143517921610034 - type: nauc_mrr_at_3_diff1 value: 27.34340373236422 - type: nauc_mrr_at_3_max value: 46.358726506276646 - type: nauc_mrr_at_3_std value: 31.74924155572593 - type: nauc_mrr_at_5_diff1 value: 27.209667205060672 - type: nauc_mrr_at_5_max value: 46.79883369072009 - type: nauc_mrr_at_5_std value: 31.655605306670758 - type: nauc_ndcg_at_1000_diff1 value: 18.940195769769687 - type: nauc_ndcg_at_1000_max value: 46.48551313937331 - type: nauc_ndcg_at_1000_std value: 33.64819502089232 - type: nauc_ndcg_at_100_diff1 value: 19.50885253809146 - type: nauc_ndcg_at_100_max value: 40.53174462354878 - type: nauc_ndcg_at_100_std value: 28.516152877751118 - type: nauc_ndcg_at_10_diff1 value: 16.01699218096564 - type: nauc_ndcg_at_10_max value: 41.17322878314514 - type: nauc_ndcg_at_10_std value: 29.002233224832196 - type: nauc_ndcg_at_1_diff1 value: 27.443547710102205 - type: nauc_ndcg_at_1_max value: 40.66529763309582 - type: nauc_ndcg_at_1_std value: 24.15016766225869 - type: nauc_ndcg_at_20_diff1 value: 17.541197675685062 - type: nauc_ndcg_at_20_max value: 40.53231266973844 - type: nauc_ndcg_at_20_std value: 29.54096347876548 - type: nauc_ndcg_at_3_diff1 value: 18.649628357473716 - type: nauc_ndcg_at_3_max value: 41.18603570171764 - type: nauc_ndcg_at_3_std value: 27.125524188420396 - type: nauc_ndcg_at_5_diff1 value: 17.519593751448483 - type: nauc_ndcg_at_5_max value: 42.715997890377345 - type: nauc_ndcg_at_5_std value: 27.902627839899868 - type: nauc_precision_at_1000_diff1 value: -15.528797630565155 - type: nauc_precision_at_1000_max value: 13.741640921778671 - type: nauc_precision_at_1000_std value: 44.50896053788372 - type: nauc_precision_at_100_diff1 value: -14.491464489721887 - type: nauc_precision_at_100_max value: 23.136434418999457 - type: nauc_precision_at_100_std value: 49.73145147863128 - type: nauc_precision_at_10_diff1 value: -4.829188942994277 - type: nauc_precision_at_10_max value: 40.327612559528866 - type: nauc_precision_at_10_std value: 39.34919529635044 - type: nauc_precision_at_1_diff1 value: 28.19774198483673 - type: nauc_precision_at_1_max value: 41.44380927834253 - type: nauc_precision_at_1_std value: 25.18222691885917 - type: nauc_precision_at_20_diff1 value: -7.210726293112847 - type: nauc_precision_at_20_max value: 37.195679576636984 - type: nauc_precision_at_20_std value: 45.4597096418357 - type: nauc_precision_at_3_diff1 value: 7.578219537774854 - type: nauc_precision_at_3_max value: 41.59775233475654 - type: nauc_precision_at_3_std value: 30.764584790895118 - type: nauc_precision_at_5_diff1 value: 1.655451789039598 - type: nauc_precision_at_5_max value: 43.435739407610455 - type: nauc_precision_at_5_std value: 33.42552263325999 - type: nauc_recall_at_1000_diff1 value: 5.030705700690516 - type: nauc_recall_at_1000_max value: 19.108072570815583 - type: nauc_recall_at_1000_std value: 14.697734974217308 - type: nauc_recall_at_100_diff1 value: 14.746540318132407 - type: nauc_recall_at_100_max value: 21.798705033854795 - type: nauc_recall_at_100_std value: 11.416195108842587 - type: nauc_recall_at_10_diff1 value: 25.548642427860486 - type: nauc_recall_at_10_max value: 18.711677681987474 - type: nauc_recall_at_10_std value: -5.988904818971677 - type: nauc_recall_at_1_diff1 value: 48.8002894871328 - type: nauc_recall_at_1_max value: 5.7236722609868815 - type: nauc_recall_at_1_std value: -13.283554044471352 - type: nauc_recall_at_20_diff1 value: 23.39140739154809 - type: nauc_recall_at_20_max value: 19.351150636155474 - type: nauc_recall_at_20_std value: -2.757280266915132 - type: nauc_recall_at_3_diff1 value: 38.17453576012812 - type: nauc_recall_at_3_max value: 13.47003839643972 - type: nauc_recall_at_3_std value: -8.75780163862688 - type: nauc_recall_at_5_diff1 value: 33.02812855226899 - type: nauc_recall_at_5_max value: 15.477626408978477 - type: nauc_recall_at_5_std value: -9.072206441070708 - type: ndcg_at_1 value: 50.773999999999994 - type: ndcg_at_10 value: 41.486000000000004 - type: ndcg_at_100 value: 39.051 - type: ndcg_at_1000 value: 48.106 - type: ndcg_at_20 value: 39.432 - type: ndcg_at_3 value: 47.428 - type: ndcg_at_5 value: 45.227000000000004 - type: precision_at_1 value: 52.632 - type: precision_at_10 value: 31.146 - type: precision_at_100 value: 10.328 - type: precision_at_1000 value: 2.432 - type: precision_at_20 value: 23.793 - type: precision_at_3 value: 45.201 - type: precision_at_5 value: 39.876 - type: recall_at_1 value: 6.866 - type: recall_at_10 value: 20.447000000000003 - type: recall_at_100 value: 40.607 - type: recall_at_1000 value: 73.411 - type: recall_at_20 value: 26.082 - type: recall_at_3 value: 12.484 - type: recall_at_5 value: 15.847 - task: type: Retrieval dataset: name: MTEB NQ type: mteb/nq config: default split: test revision: b774495ed302d8c44a3a7ea25c90dbce03968f31 metrics: - type: main_score value: 69.072 - type: map_at_1 value: 45.483000000000004 - type: map_at_10 value: 62.050000000000004 - type: map_at_100 value: 62.693 - type: map_at_1000 value: 62.702999999999996 - type: map_at_20 value: 62.498 - type: map_at_3 value: 58.285 - type: map_at_5 value: 60.711000000000006 - type: mrr_at_1 value: 50.840092699884124 - type: mrr_at_10 value: 64.54635224116673 - type: mrr_at_100 value: 64.9526548702289 - type: mrr_at_1000 value: 64.95908460752281 - type: mrr_at_20 value: 64.82949565799959 - type: mrr_at_3 value: 61.89165701042856 - type: mrr_at_5 value: 63.632676709154026 - type: nauc_map_at_1000_diff1 value: 43.187285304185224 - type: nauc_map_at_1000_max value: 32.39921659632756 - type: nauc_map_at_1000_std value: -5.780901333066553 - type: nauc_map_at_100_diff1 value: 43.184487221204456 - type: nauc_map_at_100_max value: 32.41176116347982 - type: nauc_map_at_100_std value: -5.76422606662383 - type: nauc_map_at_10_diff1 value: 42.967066814031746 - type: nauc_map_at_10_max value: 32.489617364418514 - type: nauc_map_at_10_std value: -6.029045531102664 - type: nauc_map_at_1_diff1 value: 46.16376563218624 - type: nauc_map_at_1_max value: 26.342624776802232 - type: nauc_map_at_1_std value: -7.142171388751972 - type: nauc_map_at_20_diff1 value: 43.15894358608328 - type: nauc_map_at_20_max value: 32.46492198956245 - type: nauc_map_at_20_std value: -5.788373305449195 - type: nauc_map_at_3_diff1 value: 43.231752344608545 - type: nauc_map_at_3_max value: 31.68003009949564 - type: nauc_map_at_3_std value: -8.015235132765458 - type: nauc_map_at_5_diff1 value: 42.86197608819917 - type: nauc_map_at_5_max value: 32.363857571094485 - type: nauc_map_at_5_std value: -6.780487416387977 - type: nauc_mrr_at_1000_diff1 value: 43.40542912045782 - type: nauc_mrr_at_1000_max value: 32.8461770324533 - type: nauc_mrr_at_1000_std value: -3.6505425530008204 - type: nauc_mrr_at_100_diff1 value: 43.40233508014468 - type: nauc_mrr_at_100_max value: 32.85598538385942 - type: nauc_mrr_at_100_std value: -3.637477352635459 - type: nauc_mrr_at_10_diff1 value: 43.260179162806054 - type: nauc_mrr_at_10_max value: 32.942643527040474 - type: nauc_mrr_at_10_std value: -3.712052825320437 - type: nauc_mrr_at_1_diff1 value: 46.354919460881206 - type: nauc_mrr_at_1_max value: 29.1760258591106 - type: nauc_mrr_at_1_std value: -4.107225031227406 - type: nauc_mrr_at_20_diff1 value: 43.37092385434311 - type: nauc_mrr_at_20_max value: 32.93390254712846 - type: nauc_mrr_at_20_std value: -3.5719056112132006 - type: nauc_mrr_at_3_diff1 value: 43.1744474040527 - type: nauc_mrr_at_3_max value: 32.741290559777994 - type: nauc_mrr_at_3_std value: -4.72677925120697 - type: nauc_mrr_at_5_diff1 value: 43.108396819975674 - type: nauc_mrr_at_5_max value: 32.970519514893084 - type: nauc_mrr_at_5_std value: -4.090906158975974 - type: nauc_ndcg_at_1000_diff1 value: 42.786664193638714 - type: nauc_ndcg_at_1000_max value: 33.65554095609296 - type: nauc_ndcg_at_1000_std value: -4.024030130584482 - type: nauc_ndcg_at_100_diff1 value: 42.691246775210814 - type: nauc_ndcg_at_100_max value: 34.063232335110875 - type: nauc_ndcg_at_100_std value: -3.477813807415248 - type: nauc_ndcg_at_10_diff1 value: 41.90988990571757 - type: nauc_ndcg_at_10_max value: 34.58934812881633 - type: nauc_ndcg_at_10_std value: -4.3295110195497655 - type: nauc_ndcg_at_1_diff1 value: 46.354919460881206 - type: nauc_ndcg_at_1_max value: 29.1760258591106 - type: nauc_ndcg_at_1_std value: -4.107225031227406 - type: nauc_ndcg_at_20_diff1 value: 42.493206675867114 - type: nauc_ndcg_at_20_max value: 34.562441307459544 - type: nauc_ndcg_at_20_std value: -3.4456116866749107 - type: nauc_ndcg_at_3_diff1 value: 42.24180336502808 - type: nauc_ndcg_at_3_max value: 33.064267018100594 - type: nauc_ndcg_at_3_std value: -7.786248093572142 - type: nauc_ndcg_at_5_diff1 value: 41.692714787779565 - type: nauc_ndcg_at_5_max value: 34.20502498949156 - type: nauc_ndcg_at_5_std value: -5.979557859282785 - type: nauc_precision_at_1000_diff1 value: -13.779832506640702 - type: nauc_precision_at_1000_max value: 1.243001688631421 - type: nauc_precision_at_1000_std value: 17.351623398622323 - type: nauc_precision_at_100_diff1 value: -11.310526816290297 - type: nauc_precision_at_100_max value: 5.771669506192959 - type: nauc_precision_at_100_std value: 19.917795079540113 - type: nauc_precision_at_10_diff1 value: 2.163699384635286 - type: nauc_precision_at_10_max value: 19.66440698458386 - type: nauc_precision_at_10_std value: 13.689876348315726 - type: nauc_precision_at_1_diff1 value: 46.354919460881206 - type: nauc_precision_at_1_max value: 29.1760258591106 - type: nauc_precision_at_1_std value: -4.107225031227406 - type: nauc_precision_at_20_diff1 value: -3.038735879584471 - type: nauc_precision_at_20_max value: 14.132968299701695 - type: nauc_precision_at_20_std value: 17.78069734664346 - type: nauc_precision_at_3_diff1 value: 21.783760758070095 - type: nauc_precision_at_3_max value: 30.244127986404497 - type: nauc_precision_at_3_std value: -0.12411163467738723 - type: nauc_precision_at_5_diff1 value: 10.980635723302418 - type: nauc_precision_at_5_max value: 25.302293738975575 - type: nauc_precision_at_5_std value: 6.4740817488722024 - type: nauc_recall_at_1000_diff1 value: 34.10343772356593 - type: nauc_recall_at_1000_max value: 80.72497340357538 - type: nauc_recall_at_1000_std value: 69.54564103264093 - type: nauc_recall_at_100_diff1 value: 33.427719956774126 - type: nauc_recall_at_100_max value: 71.54086768335449 - type: nauc_recall_at_100_std value: 49.66157377654885 - type: nauc_recall_at_10_diff1 value: 33.70139560054039 - type: nauc_recall_at_10_max value: 45.47878072860151 - type: nauc_recall_at_10_std value: 1.4188516615716378 - type: nauc_recall_at_1_diff1 value: 46.16376563218624 - type: nauc_recall_at_1_max value: 26.342624776802232 - type: nauc_recall_at_1_std value: -7.142171388751972 - type: nauc_recall_at_20_diff1 value: 35.805379874970086 - type: nauc_recall_at_20_max value: 51.80479822253392 - type: nauc_recall_at_20_std value: 13.531467576460143 - type: nauc_recall_at_3_diff1 value: 37.288500141631616 - type: nauc_recall_at_3_max value: 35.07078243516728 - type: nauc_recall_at_3_std value: -10.452926441410405 - type: nauc_recall_at_5_diff1 value: 34.83186104526897 - type: nauc_recall_at_5_max value: 39.58488976496973 - type: nauc_recall_at_5_std value: -6.3049292065708835 - type: ndcg_at_1 value: 50.839999999999996 - type: ndcg_at_10 value: 69.072 - type: ndcg_at_100 value: 71.538 - type: ndcg_at_1000 value: 71.77799999999999 - type: ndcg_at_20 value: 70.41 - type: ndcg_at_3 value: 62.544999999999995 - type: ndcg_at_5 value: 66.33099999999999 - type: precision_at_1 value: 50.839999999999996 - type: precision_at_10 value: 10.495000000000001 - type: precision_at_100 value: 1.1900000000000002 - type: precision_at_1000 value: 0.121 - type: precision_at_20 value: 5.5809999999999995 - type: precision_at_3 value: 27.636 - type: precision_at_5 value: 18.864 - type: recall_at_1 value: 45.483000000000004 - type: recall_at_10 value: 87.483 - type: recall_at_100 value: 97.844 - type: recall_at_1000 value: 99.66199999999999 - type: recall_at_20 value: 92.294 - type: recall_at_3 value: 71.2 - type: recall_at_5 value: 79.753 - task: type: Retrieval dataset: name: MTEB QuoraRetrieval type: mteb/quora config: default split: test revision: e4e08e0b7dbe3c8700f0daef558ff32256715259 metrics: - type: main_score value: 89.58 - type: map_at_1 value: 71.819 - type: map_at_10 value: 86.04899999999999 - type: map_at_100 value: 86.648 - type: map_at_1000 value: 86.66199999999999 - type: map_at_20 value: 86.441 - type: map_at_3 value: 83.114 - type: map_at_5 value: 84.981 - type: mrr_at_1 value: 82.62 - type: mrr_at_10 value: 88.62899999999979 - type: mrr_at_100 value: 88.70918591324215 - type: mrr_at_1000 value: 88.70973091492397 - type: mrr_at_20 value: 88.68914765317221 - type: mrr_at_3 value: 87.74999999999979 - type: mrr_at_5 value: 88.36799999999974 - type: nauc_map_at_1000_diff1 value: 77.89207709760448 - type: nauc_map_at_1000_max value: 29.63371361495422 - type: nauc_map_at_1000_std value: -48.628180385874344 - type: nauc_map_at_100_diff1 value: 77.89592179104915 - type: nauc_map_at_100_max value: 29.617171506130756 - type: nauc_map_at_100_std value: -48.66057170774648 - type: nauc_map_at_10_diff1 value: 78.0618161228185 - type: nauc_map_at_10_max value: 29.178490609366737 - type: nauc_map_at_10_std value: -50.74755004592002 - type: nauc_map_at_1_diff1 value: 81.64335579973574 - type: nauc_map_at_1_max value: 21.813832226652174 - type: nauc_map_at_1_std value: -42.57570978190876 - type: nauc_map_at_20_diff1 value: 77.9299081005938 - type: nauc_map_at_20_max value: 29.458718470003888 - type: nauc_map_at_20_std value: -49.63337236763102 - type: nauc_map_at_3_diff1 value: 78.72941448509229 - type: nauc_map_at_3_max value: 26.600997896960056 - type: nauc_map_at_3_std value: -51.889002227479885 - type: nauc_map_at_5_diff1 value: 78.31466610917171 - type: nauc_map_at_5_max value: 28.09863984582896 - type: nauc_map_at_5_std value: -52.14058096096497 - type: nauc_mrr_at_1000_diff1 value: 78.42667263739992 - type: nauc_mrr_at_1000_max value: 31.98996235127974 - type: nauc_mrr_at_1000_std value: -44.380439148429296 - type: nauc_mrr_at_100_diff1 value: 78.42661032698115 - type: nauc_mrr_at_100_max value: 31.991652631740102 - type: nauc_mrr_at_100_std value: -44.37854108460535 - type: nauc_mrr_at_10_diff1 value: 78.39126022544136 - type: nauc_mrr_at_10_max value: 32.02023484451197 - type: nauc_mrr_at_10_std value: -44.561252349176954 - type: nauc_mrr_at_1_diff1 value: 79.21630894647448 - type: nauc_mrr_at_1_max value: 31.526303156060177 - type: nauc_mrr_at_1_std value: -41.887504422443136 - type: nauc_mrr_at_20_diff1 value: 78.42548039170424 - type: nauc_mrr_at_20_max value: 31.99588275070137 - type: nauc_mrr_at_20_std value: -44.44957722627042 - type: nauc_mrr_at_3_diff1 value: 78.26165151833735 - type: nauc_mrr_at_3_max value: 32.18028826126801 - type: nauc_mrr_at_3_std value: -44.6998237213182 - type: nauc_mrr_at_5_diff1 value: 78.34786430903962 - type: nauc_mrr_at_5_max value: 32.168476272879566 - type: nauc_mrr_at_5_std value: -44.7915919956712 - type: nauc_ndcg_at_1000_diff1 value: 77.79198355957816 - type: nauc_ndcg_at_1000_max value: 31.14363511518406 - type: nauc_ndcg_at_1000_std value: -46.69335151274275 - type: nauc_ndcg_at_100_diff1 value: 77.79898090286419 - type: nauc_ndcg_at_100_max value: 31.115103811629215 - type: nauc_ndcg_at_100_std value: -46.73078913421965 - type: nauc_ndcg_at_10_diff1 value: 77.74856635461343 - type: nauc_ndcg_at_10_max value: 30.279584686212747 - type: nauc_ndcg_at_10_std value: -50.23514662356807 - type: nauc_ndcg_at_1_diff1 value: 79.17833000040999 - type: nauc_ndcg_at_1_max value: 31.703788144510746 - type: nauc_ndcg_at_1_std value: -41.854817402870715 - type: nauc_ndcg_at_20_diff1 value: 77.7380353804671 - type: nauc_ndcg_at_20_max value: 30.622294129001553 - type: nauc_ndcg_at_20_std value: -49.035794761065254 - type: nauc_ndcg_at_3_diff1 value: 77.41476880573593 - type: nauc_ndcg_at_3_max value: 29.015949978243032 - type: nauc_ndcg_at_3_std value: -49.78627087622648 - type: nauc_ndcg_at_5_diff1 value: 77.64439137502896 - type: nauc_ndcg_at_5_max value: 29.444684897492206 - type: nauc_ndcg_at_5_std value: -51.21908400252501 - type: nauc_precision_at_1000_diff1 value: -44.92396459446822 - type: nauc_precision_at_1000_max value: -3.674153720989045 - type: nauc_precision_at_1000_std value: 39.56552468277785 - type: nauc_precision_at_100_diff1 value: -44.75143023259094 - type: nauc_precision_at_100_max value: -3.705280025140011 - type: nauc_precision_at_100_std value: 39.433619999113326 - type: nauc_precision_at_10_diff1 value: -41.0651074726579 - type: nauc_precision_at_10_max value: -0.21097985601783667 - type: nauc_precision_at_10_std value: 26.24652824589493 - type: nauc_precision_at_1_diff1 value: 79.17833000040999 - type: nauc_precision_at_1_max value: 31.703788144510746 - type: nauc_precision_at_1_std value: -41.854817402870715 - type: nauc_precision_at_20_diff1 value: -43.368001340920294 - type: nauc_precision_at_20_max value: -2.036990010399129 - type: nauc_precision_at_20_std value: 32.37747041406297 - type: nauc_precision_at_3_diff1 value: -22.089307548346877 - type: nauc_precision_at_3_max value: 6.2280973175296 - type: nauc_precision_at_3_std value: 5.323992514036145 - type: nauc_precision_at_5_diff1 value: -34.07115055244003 - type: nauc_precision_at_5_max value: 2.5955315789198834 - type: nauc_precision_at_5_std value: 16.26096689407332 - type: nauc_recall_at_1000_diff1 value: 58.27703860947467 - type: nauc_recall_at_1000_max value: 68.59835835315768 - type: nauc_recall_at_1000_std value: 77.96687006056064 - type: nauc_recall_at_100_diff1 value: 73.24371223081737 - type: nauc_recall_at_100_max value: 39.55925344664591 - type: nauc_recall_at_100_std value: -32.25605030215798 - type: nauc_recall_at_10_diff1 value: 73.41261201339202 - type: nauc_recall_at_10_max value: 26.822979434062926 - type: nauc_recall_at_10_std value: -74.2909332592806 - type: nauc_recall_at_1_diff1 value: 81.64335579973574 - type: nauc_recall_at_1_max value: 21.813832226652174 - type: nauc_recall_at_1_std value: -42.57570978190876 - type: nauc_recall_at_20_diff1 value: 72.7621297920656 - type: nauc_recall_at_20_max value: 26.02492304096079 - type: nauc_recall_at_20_std value: -77.8724532438279 - type: nauc_recall_at_3_diff1 value: 75.25149312810714 - type: nauc_recall_at_3_max value: 23.20545662481487 - type: nauc_recall_at_3_std value: -59.69689982140521 - type: nauc_recall_at_5_diff1 value: 73.69807273001406 - type: nauc_recall_at_5_max value: 24.073666798066057 - type: nauc_recall_at_5_std value: -67.91121268130719 - type: ndcg_at_1 value: 82.64 - type: ndcg_at_10 value: 89.58 - type: ndcg_at_100 value: 90.606 - type: ndcg_at_1000 value: 90.676 - type: ndcg_at_20 value: 90.132 - type: ndcg_at_3 value: 86.88 - type: ndcg_at_5 value: 88.40299999999999 - type: precision_at_1 value: 82.64 - type: precision_at_10 value: 13.604 - type: precision_at_100 value: 1.539 - type: precision_at_1000 value: 0.157 - type: precision_at_20 value: 7.188 - type: precision_at_3 value: 38.083 - type: precision_at_5 value: 25.018 - type: recall_at_1 value: 71.819 - type: recall_at_10 value: 96.34700000000001 - type: recall_at_100 value: 99.715 - type: recall_at_1000 value: 99.995 - type: recall_at_20 value: 98.073 - type: recall_at_3 value: 88.57300000000001 - type: recall_at_5 value: 92.908 - task: type: Clustering dataset: name: MTEB RedditClustering type: mteb/reddit-clustering config: default split: test revision: 24640382cdbf8abc73003fb0fa6d111a705499eb metrics: - type: main_score value: 71.18966762070158 - type: v_measure value: 71.18966762070158 - type: v_measure_std value: 2.7498969054457048 - task: type: Clustering dataset: name: MTEB RedditClusteringP2P type: mteb/reddit-clustering-p2p config: default split: test revision: 385e3cb46b4cfa89021f56c4380204149d0efe33 metrics: - type: main_score value: 74.42014716862516 - type: v_measure value: 74.42014716862516 - type: v_measure_std value: 9.909739891410648 - task: type: Retrieval dataset: name: MTEB SCIDOCS type: mteb/scidocs config: default split: test revision: f8c2fcf00f625baaa80f62ec5bd9e1fff3b8ae88 metrics: - type: main_score value: 25.041999999999998 - type: map_at_1 value: 5.893000000000001 - type: map_at_10 value: 15.260000000000002 - type: map_at_100 value: 18.084 - type: map_at_1000 value: 18.467 - type: map_at_20 value: 16.675 - type: map_at_3 value: 10.526 - type: map_at_5 value: 12.775 - type: mrr_at_1 value: 28.999999999999996 - type: mrr_at_10 value: 41.03575396825395 - type: mrr_at_100 value: 42.136771862785835 - type: mrr_at_1000 value: 42.16698555415099 - type: mrr_at_20 value: 41.707493696104315 - type: mrr_at_3 value: 37.34999999999998 - type: mrr_at_5 value: 39.59999999999995 - type: nauc_map_at_1000_diff1 value: 12.080002654911883 - type: nauc_map_at_1000_max value: 29.813563682286276 - type: nauc_map_at_1000_std value: 20.36659817908673 - type: nauc_map_at_100_diff1 value: 12.108735517749706 - type: nauc_map_at_100_max value: 29.76830671710955 - type: nauc_map_at_100_std value: 20.3433621032846 - type: nauc_map_at_10_diff1 value: 12.91575031185637 - type: nauc_map_at_10_max value: 29.427600958386318 - type: nauc_map_at_10_std value: 16.89867275177153 - type: nauc_map_at_1_diff1 value: 19.353069488987916 - type: nauc_map_at_1_max value: 17.093914951159693 - type: nauc_map_at_1_std value: 8.19886078055046 - type: nauc_map_at_20_diff1 value: 11.977233457943113 - type: nauc_map_at_20_max value: 29.171812822948805 - type: nauc_map_at_20_std value: 18.780517506173965 - type: nauc_map_at_3_diff1 value: 14.453129464176092 - type: nauc_map_at_3_max value: 25.801958649112077 - type: nauc_map_at_3_std value: 11.572823684429643 - type: nauc_map_at_5_diff1 value: 13.167155808104997 - type: nauc_map_at_5_max value: 27.355626948365792 - type: nauc_map_at_5_std value: 14.414151839192183 - type: nauc_mrr_at_1000_diff1 value: 17.262104643988636 - type: nauc_mrr_at_1000_max value: 23.991373837217058 - type: nauc_mrr_at_1000_std value: 12.44755488671623 - type: nauc_mrr_at_100_diff1 value: 17.267280132318703 - type: nauc_mrr_at_100_max value: 24.022189287889294 - type: nauc_mrr_at_100_std value: 12.480695500214788 - type: nauc_mrr_at_10_diff1 value: 17.012383998246268 - type: nauc_mrr_at_10_max value: 24.192637911171722 - type: nauc_mrr_at_10_std value: 12.524608847408917 - type: nauc_mrr_at_1_diff1 value: 19.43518811038007 - type: nauc_mrr_at_1_max value: 17.747482933395602 - type: nauc_mrr_at_1_std value: 8.410779775558684 - type: nauc_mrr_at_20_diff1 value: 17.202663281407446 - type: nauc_mrr_at_20_max value: 24.091991130543118 - type: nauc_mrr_at_20_std value: 12.503814263019908 - type: nauc_mrr_at_3_diff1 value: 17.52733013432995 - type: nauc_mrr_at_3_max value: 23.569459518780214 - type: nauc_mrr_at_3_std value: 11.770846827520726 - type: nauc_mrr_at_5_diff1 value: 17.10817561975543 - type: nauc_mrr_at_5_max value: 23.945141435234678 - type: nauc_mrr_at_5_std value: 12.034468615317719 - type: nauc_ndcg_at_1000_diff1 value: 12.317811393346936 - type: nauc_ndcg_at_1000_max value: 30.809991350156103 - type: nauc_ndcg_at_1000_std value: 24.517501065205067 - type: nauc_ndcg_at_100_diff1 value: 12.824804203182936 - type: nauc_ndcg_at_100_max value: 30.895499817010748 - type: nauc_ndcg_at_100_std value: 25.424376279745402 - type: nauc_ndcg_at_10_diff1 value: 13.32724552457439 - type: nauc_ndcg_at_10_max value: 30.409088666807456 - type: nauc_ndcg_at_10_std value: 18.216330475714113 - type: nauc_ndcg_at_1_diff1 value: 19.43518811038007 - type: nauc_ndcg_at_1_max value: 17.747482933395602 - type: nauc_ndcg_at_1_std value: 8.410779775558684 - type: nauc_ndcg_at_20_diff1 value: 12.224399111852902 - type: nauc_ndcg_at_20_max value: 29.86352330445272 - type: nauc_ndcg_at_20_std value: 21.196937851331807 - type: nauc_ndcg_at_3_diff1 value: 15.367489533734027 - type: nauc_ndcg_at_3_max value: 26.76486390741532 - type: nauc_ndcg_at_3_std value: 12.606077508789923 - type: nauc_ndcg_at_5_diff1 value: 13.831157482390935 - type: nauc_ndcg_at_5_max value: 28.070226983968904 - type: nauc_ndcg_at_5_std value: 15.236787943125435 - type: nauc_precision_at_1000_diff1 value: 0.016122957101357048 - type: nauc_precision_at_1000_max value: 24.380929903557334 - type: nauc_precision_at_1000_std value: 34.54045112720052 - type: nauc_precision_at_100_diff1 value: 7.255224788507301 - type: nauc_precision_at_100_max value: 27.98453788447542 - type: nauc_precision_at_100_std value: 35.38999555441665 - type: nauc_precision_at_10_diff1 value: 9.69185099834181 - type: nauc_precision_at_10_max value: 32.532315522580454 - type: nauc_precision_at_10_std value: 21.48948348473612 - type: nauc_precision_at_1_diff1 value: 19.43518811038007 - type: nauc_precision_at_1_max value: 17.747482933395602 - type: nauc_precision_at_1_std value: 8.410779775558684 - type: nauc_precision_at_20_diff1 value: 6.964076536695672 - type: nauc_precision_at_20_max value: 29.30087236410044 - type: nauc_precision_at_20_std value: 26.413625895571986 - type: nauc_precision_at_3_diff1 value: 14.145134359925155 - type: nauc_precision_at_3_max value: 29.915650960808303 - type: nauc_precision_at_3_std value: 14.095370019867797 - type: nauc_precision_at_5_diff1 value: 11.043933558522692 - type: nauc_precision_at_5_max value: 30.93016505807111 - type: nauc_precision_at_5_std value: 17.749256196062603 - type: nauc_recall_at_1000_diff1 value: -0.7776817772090345 - type: nauc_recall_at_1000_max value: 23.094717340324518 - type: nauc_recall_at_1000_std value: 37.189908681396425 - type: nauc_recall_at_100_diff1 value: 6.887748742013364 - type: nauc_recall_at_100_max value: 27.00798435230277 - type: nauc_recall_at_100_std value: 35.908147807345344 - type: nauc_recall_at_10_diff1 value: 9.605632017480751 - type: nauc_recall_at_10_max value: 31.845202901168655 - type: nauc_recall_at_10_std value: 21.497414586634683 - type: nauc_recall_at_1_diff1 value: 19.353069488987916 - type: nauc_recall_at_1_max value: 17.093914951159693 - type: nauc_recall_at_1_std value: 8.19886078055046 - type: nauc_recall_at_20_diff1 value: 6.927503731844782 - type: nauc_recall_at_20_max value: 28.611698183338202 - type: nauc_recall_at_20_std value: 26.69018660149911 - type: nauc_recall_at_3_diff1 value: 14.043724087062268 - type: nauc_recall_at_3_max value: 29.269835821380465 - type: nauc_recall_at_3_std value: 14.104419605998094 - type: nauc_recall_at_5_diff1 value: 11.017319452873336 - type: nauc_recall_at_5_max value: 30.295720628306228 - type: nauc_recall_at_5_std value: 17.758048545573825 - type: ndcg_at_1 value: 28.999999999999996 - type: ndcg_at_10 value: 25.041999999999998 - type: ndcg_at_100 value: 35.045 - type: ndcg_at_1000 value: 40.803 - type: ndcg_at_20 value: 28.584 - type: ndcg_at_3 value: 23.249 - type: ndcg_at_5 value: 20.533 - type: precision_at_1 value: 28.999999999999996 - type: precision_at_10 value: 13.120000000000001 - type: precision_at_100 value: 2.7470000000000003 - type: precision_at_1000 value: 0.41200000000000003 - type: precision_at_20 value: 8.584999999999999 - type: precision_at_3 value: 21.633 - type: precision_at_5 value: 18.099999999999998 - type: recall_at_1 value: 5.893000000000001 - type: recall_at_10 value: 26.567 - type: recall_at_100 value: 55.800000000000004 - type: recall_at_1000 value: 83.608 - type: recall_at_20 value: 34.86 - type: recall_at_3 value: 13.153 - type: recall_at_5 value: 18.323 - task: type: STS dataset: name: MTEB SICK-R type: mteb/sickr-sts config: default split: test revision: 20a6d6f312dd54037fe07a32d58e5e168867909d metrics: - type: cosine_pearson value: 86.57284584320382 - type: cosine_spearman value: 82.20531642680812 - type: euclidean_pearson value: 83.94261758556554 - type: euclidean_spearman value: 82.20721497738559 - type: main_score value: 82.20531642680812 - type: manhattan_pearson value: 84.15902154703083 - type: manhattan_spearman value: 82.19506027155957 - type: pearson value: 86.57284584320382 - type: spearman value: 82.20531642680812 - task: type: STS dataset: name: MTEB STS12 type: mteb/sts12-sts config: default split: test revision: a0d554a64d88156834ff5ae9920b964011b16384 metrics: - type: cosine_pearson value: 86.28047602146931 - type: cosine_spearman value: 79.51504881448884 - type: euclidean_pearson value: 83.10545189967856 - type: euclidean_spearman value: 79.50586960492797 - type: main_score value: 79.51504881448884 - type: manhattan_pearson value: 83.44244457500889 - type: manhattan_spearman value: 79.730303339846 - type: pearson value: 86.28047602146931 - type: spearman value: 79.51504881448884 - task: type: STS dataset: name: MTEB STS13 type: mteb/sts13-sts config: default split: test revision: 7e90230a92c190f1bf69ae9002b8cea547a64cca metrics: - type: cosine_pearson value: 88.74723553048702 - type: cosine_spearman value: 89.18936052329725 - type: euclidean_pearson value: 88.90400878928668 - type: euclidean_spearman value: 89.19174821431281 - type: main_score value: 89.18936052329725 - type: manhattan_pearson value: 88.81504628424054 - type: manhattan_spearman value: 89.18063294142597 - type: pearson value: 88.74723553048702 - type: spearman value: 89.18936052329725 - task: type: STS dataset: name: MTEB STS14 type: mteb/sts14-sts config: default split: test revision: 6031580fec1f6af667f0bd2da0a551cf4f0b2375 metrics: - type: cosine_pearson value: 86.45403437836023 - type: cosine_spearman value: 85.14654611519086 - type: euclidean_pearson value: 85.87509624462743 - type: euclidean_spearman value: 85.1391108856681 - type: main_score value: 85.14654611519086 - type: manhattan_pearson value: 85.96635794953866 - type: manhattan_spearman value: 85.3271371527667 - type: pearson value: 86.45403437836023 - type: spearman value: 85.14654611519086 - task: type: STS dataset: name: MTEB STS15 type: mteb/sts15-sts config: default split: test revision: ae752c7c21bf194d8b67fd573edf7ae58183cbe3 metrics: - type: cosine_pearson value: 87.84742260009705 - type: cosine_spearman value: 89.10215217191254 - type: euclidean_pearson value: 88.97393286325477 - type: euclidean_spearman value: 89.1014105509662 - type: main_score value: 89.10215217191254 - type: manhattan_pearson value: 89.31698781090151 - type: manhattan_spearman value: 89.53000001764433 - type: pearson value: 87.84742260009705 - type: spearman value: 89.10215217191254 - task: type: STS dataset: name: MTEB STS16 type: mteb/sts16-sts config: default split: test revision: 4d8694f8f0e0100860b497b999b3dbed754a0513 metrics: - type: cosine_pearson value: 85.22397535461835 - type: cosine_spearman value: 87.14066355879785 - type: euclidean_pearson value: 86.31393364087295 - type: euclidean_spearman value: 87.14018892702765 - type: main_score value: 87.14066355879785 - type: manhattan_pearson value: 86.36366855248434 - type: manhattan_spearman value: 87.20858630423012 - type: pearson value: 85.22397535461835 - type: spearman value: 87.14066355879785 - task: type: STS dataset: name: MTEB STS17 (en-en) type: mteb/sts17-crosslingual-sts config: en-en split: test revision: faeb762787bd10488a50c8b5be4a3b82e411949c metrics: - type: cosine_pearson value: 90.66131612061355 - type: cosine_spearman value: 90.97082650129164 - type: euclidean_pearson value: 90.98181906744969 - type: euclidean_spearman value: 90.99008476850047 - type: main_score value: 90.97082650129164 - type: manhattan_pearson value: 90.75245040709021 - type: manhattan_spearman value: 90.6199877691265 - type: pearson value: 90.66131612061355 - type: spearman value: 90.97082650129164 - task: type: STS dataset: name: MTEB STS22 (en) type: mteb/sts22-crosslingual-sts config: en split: test revision: de9d86b3b84231dc21f76c7b7af1f28e2f57f6e3 metrics: - type: cosine_pearson value: 67.270656447085 - type: cosine_spearman value: 67.82870469746828 - type: euclidean_pearson value: 69.03857775285664 - type: euclidean_spearman value: 67.74455108773341 - type: main_score value: 67.82870469746828 - type: manhattan_pearson value: 69.25304172245812 - type: manhattan_spearman value: 68.00987097916055 - type: pearson value: 67.270656447085 - type: spearman value: 67.82870469746828 - task: type: STS dataset: name: MTEB STSBenchmark type: mteb/stsbenchmark-sts config: default split: test revision: b0fddb56ed78048fa8b90373c8a3cfc37b684831 metrics: - type: cosine_pearson value: 87.17245205384889 - type: cosine_spearman value: 87.7360146030987 - type: euclidean_pearson value: 87.48919412794656 - type: euclidean_spearman value: 87.7312047878383 - type: main_score value: 87.7360146030987 - type: manhattan_pearson value: 87.61476224354806 - type: manhattan_spearman value: 87.95220889254693 - type: pearson value: 87.17245205384889 - type: spearman value: 87.7360146030987 - task: type: Reranking dataset: name: MTEB SciDocsRR type: mteb/scidocs-reranking config: default split: test revision: d3c5e1fc0b855ab6097bf1cda04dd73947d7caab metrics: - type: main_score value: 88.43547871921146 - type: map value: 88.43547871921146 - type: mrr value: 96.5564473652709 - type: nAUC_map_diff1 value: -13.66029392579231 - type: nAUC_map_max value: 50.325613574053506 - type: nAUC_map_std value: 60.02986231275796 - type: nAUC_mrr_diff1 value: 23.83821476411125 - type: nAUC_mrr_max value: 86.72643311769906 - type: nAUC_mrr_std value: 72.12741063469213 - task: type: Retrieval dataset: name: MTEB SciFact type: mteb/scifact config: default split: test revision: 0228b52cf27578f30900b9e5271d331663a030d7 metrics: - type: main_score value: 78.233 - type: map_at_1 value: 61.49400000000001 - type: map_at_10 value: 73.30600000000001 - type: map_at_100 value: 73.719 - type: map_at_1000 value: 73.724 - type: map_at_20 value: 73.611 - type: map_at_3 value: 70.626 - type: map_at_5 value: 72.417 - type: mrr_at_1 value: 64.66666666666666 - type: mrr_at_10 value: 74.30357142857143 - type: mrr_at_100 value: 74.56950898079988 - type: mrr_at_1000 value: 74.57295833098681 - type: mrr_at_20 value: 74.46165223665226 - type: mrr_at_3 value: 72.3888888888889 - type: mrr_at_5 value: 73.60555555555557 - type: nauc_map_at_1000_diff1 value: 76.51524604780636 - type: nauc_map_at_1000_max value: 53.48521938401881 - type: nauc_map_at_1000_std value: -7.347799382158861 - type: nauc_map_at_100_diff1 value: 76.5122888096236 - type: nauc_map_at_100_max value: 53.49221847471618 - type: nauc_map_at_100_std value: -7.329683735681086 - type: nauc_map_at_10_diff1 value: 76.30928630674504 - type: nauc_map_at_10_max value: 53.00102977185941 - type: nauc_map_at_10_std value: -7.7467740085108705 - type: nauc_map_at_1_diff1 value: 79.54189281784247 - type: nauc_map_at_1_max value: 46.630071622109526 - type: nauc_map_at_1_std value: -14.395943134644112 - type: nauc_map_at_20_diff1 value: 76.41604361947962 - type: nauc_map_at_20_max value: 53.578883876146875 - type: nauc_map_at_20_std value: -7.403103451288041 - type: nauc_map_at_3_diff1 value: 76.25911617571941 - type: nauc_map_at_3_max value: 49.140287380513605 - type: nauc_map_at_3_std value: -11.35992449218983 - type: nauc_map_at_5_diff1 value: 76.35122077770336 - type: nauc_map_at_5_max value: 52.1744367901208 - type: nauc_map_at_5_std value: -7.85753955055384 - type: nauc_mrr_at_1000_diff1 value: 76.97223309515867 - type: nauc_mrr_at_1000_max value: 57.263787498613326 - type: nauc_mrr_at_1000_std value: -4.884090708840035 - type: nauc_mrr_at_100_diff1 value: 76.97312970894603 - type: nauc_mrr_at_100_max value: 57.26850730446478 - type: nauc_mrr_at_100_std value: -4.875200894216617 - type: nauc_mrr_at_10_diff1 value: 76.65927674223613 - type: nauc_mrr_at_10_max value: 57.30979763941454 - type: nauc_mrr_at_10_std value: -4.863331094022142 - type: nauc_mrr_at_1_diff1 value: 80.0454932568644 - type: nauc_mrr_at_1_max value: 56.76038421319305 - type: nauc_mrr_at_1_std value: -4.101939392632653 - type: nauc_mrr_at_20_diff1 value: 76.87237970440503 - type: nauc_mrr_at_20_max value: 57.33843605225869 - type: nauc_mrr_at_20_std value: -4.96248984417978 - type: nauc_mrr_at_3_diff1 value: 76.74130186666727 - type: nauc_mrr_at_3_max value: 56.19313244846155 - type: nauc_mrr_at_3_std value: -5.684365934009136 - type: nauc_mrr_at_5_diff1 value: 76.66406918799962 - type: nauc_mrr_at_5_max value: 57.56110093228628 - type: nauc_mrr_at_5_std value: -3.7464413085588073 - type: nauc_ndcg_at_1000_diff1 value: 76.19194173971773 - type: nauc_ndcg_at_1000_max value: 55.57464600170693 - type: nauc_ndcg_at_1000_std value: -6.0761689532372625 - type: nauc_ndcg_at_100_diff1 value: 76.14631273843654 - type: nauc_ndcg_at_100_max value: 55.72246565373382 - type: nauc_ndcg_at_100_std value: -5.595160698860595 - type: nauc_ndcg_at_10_diff1 value: 75.0108223611192 - type: nauc_ndcg_at_10_max value: 55.27894212877493 - type: nauc_ndcg_at_10_std value: -6.968331740214591 - type: nauc_ndcg_at_1_diff1 value: 80.0454932568644 - type: nauc_ndcg_at_1_max value: 56.76038421319305 - type: nauc_ndcg_at_1_std value: -4.101939392632653 - type: nauc_ndcg_at_20_diff1 value: 75.54887755702472 - type: nauc_ndcg_at_20_max value: 56.406879417251496 - type: nauc_ndcg_at_20_std value: -6.495231061329629 - type: nauc_ndcg_at_3_diff1 value: 75.03620356688509 - type: nauc_ndcg_at_3_max value: 52.147381077773424 - type: nauc_ndcg_at_3_std value: -8.448005688956199 - type: nauc_ndcg_at_5_diff1 value: 75.1195898074229 - type: nauc_ndcg_at_5_max value: 54.2321033861173 - type: nauc_ndcg_at_5_std value: -5.882690780895338 - type: nauc_precision_at_1000_diff1 value: -28.081979732100532 - type: nauc_precision_at_1000_max value: 35.055348014832916 - type: nauc_precision_at_1000_std value: 59.61280468927384 - type: nauc_precision_at_100_diff1 value: -25.112740730587458 - type: nauc_precision_at_100_max value: 38.26331300116496 - type: nauc_precision_at_100_std value: 62.46316222328831 - type: nauc_precision_at_10_diff1 value: -2.6766206473658833 - type: nauc_precision_at_10_max value: 45.95321867204845 - type: nauc_precision_at_10_std value: 45.07212468670564 - type: nauc_precision_at_1_diff1 value: 80.0454932568644 - type: nauc_precision_at_1_max value: 56.76038421319305 - type: nauc_precision_at_1_std value: -4.101939392632653 - type: nauc_precision_at_20_diff1 value: -10.698911116738385 - type: nauc_precision_at_20_max value: 43.467275950182994 - type: nauc_precision_at_20_std value: 48.00467321991766 - type: nauc_precision_at_3_diff1 value: 33.6344708541193 - type: nauc_precision_at_3_max value: 49.309242331670504 - type: nauc_precision_at_3_std value: 21.02940391379915 - type: nauc_precision_at_5_diff1 value: 13.560415600596318 - type: nauc_precision_at_5_max value: 48.918726500100085 - type: nauc_precision_at_5_std value: 39.940930429172184 - type: nauc_recall_at_1000_diff1 value: .nan - type: nauc_recall_at_1000_max value: .nan - type: nauc_recall_at_1000_std value: .nan - type: nauc_recall_at_100_diff1 value: 70.82166199813196 - type: nauc_recall_at_100_max value: 76.6106442577042 - type: nauc_recall_at_100_std value: 66.47992530345513 - type: nauc_recall_at_10_diff1 value: 62.68908885556092 - type: nauc_recall_at_10_max value: 58.14262437741839 - type: nauc_recall_at_10_std value: -12.946717875063369 - type: nauc_recall_at_1_diff1 value: 79.54189281784247 - type: nauc_recall_at_1_max value: 46.630071622109526 - type: nauc_recall_at_1_std value: -14.395943134644112 - type: nauc_recall_at_20_diff1 value: 65.79470497876567 - type: nauc_recall_at_20_max value: 71.68308183488456 - type: nauc_recall_at_20_std value: -12.556850697268453 - type: nauc_recall_at_3_diff1 value: 68.3240211318129 - type: nauc_recall_at_3_max value: 45.05998217275036 - type: nauc_recall_at_3_std value: -14.23179772593869 - type: nauc_recall_at_5_diff1 value: 67.53366869904056 - type: nauc_recall_at_5_max value: 53.57935627081027 - type: nauc_recall_at_5_std value: -3.3271112904853393 - type: ndcg_at_1 value: 64.667 - type: ndcg_at_10 value: 78.233 - type: ndcg_at_100 value: 79.806 - type: ndcg_at_1000 value: 79.92099999999999 - type: ndcg_at_20 value: 79.006 - type: ndcg_at_3 value: 74.018 - type: ndcg_at_5 value: 76.334 - type: precision_at_1 value: 64.667 - type: precision_at_10 value: 10.4 - type: precision_at_100 value: 1.1199999999999999 - type: precision_at_1000 value: 0.11299999999999999 - type: precision_at_20 value: 5.383 - type: precision_at_3 value: 29.444 - type: precision_at_5 value: 19.467000000000002 - type: recall_at_1 value: 61.49400000000001 - type: recall_at_10 value: 92.156 - type: recall_at_100 value: 99.167 - type: recall_at_1000 value: 100.0 - type: recall_at_20 value: 94.833 - type: recall_at_3 value: 80.833 - type: recall_at_5 value: 86.6 - task: type: PairClassification dataset: name: MTEB SprintDuplicateQuestions type: mteb/sprintduplicatequestions-pairclassification config: default split: test revision: d66bd1f72af766a5cc4b0ca5e00c162f89e8cc46 metrics: - type: cosine_accuracy value: 99.8039603960396 - type: cosine_accuracy_threshold value: 84.54211950302124 - type: cosine_ap value: 95.59056372734358 - type: cosine_f1 value: 90.1394422310757 - type: cosine_f1_threshold value: 84.54211950302124 - type: cosine_precision value: 89.78174603174604 - type: cosine_recall value: 90.5 - type: dot_accuracy value: 99.80594059405941 - type: dot_accuracy_threshold value: 85.57180166244507 - type: dot_ap value: 95.53453431914399 - type: dot_f1 value: 90.10442565887618 - type: dot_f1_threshold value: 84.59715843200684 - type: dot_precision value: 89.61424332344214 - type: dot_recall value: 90.60000000000001 - type: euclidean_accuracy value: 99.8039603960396 - type: euclidean_accuracy_threshold value: 53.253382444381714 - type: euclidean_ap value: 95.5850992402159 - type: euclidean_f1 value: 90.09457441513192 - type: euclidean_f1_threshold value: 55.725520849227905 - type: euclidean_precision value: 89.69276511397423 - type: euclidean_recall value: 90.5 - type: main_score value: 95.7485189884476 - type: manhattan_accuracy value: 99.81485148514851 - type: manhattan_accuracy_threshold value: 3491.29638671875 - type: manhattan_ap value: 95.7485189884476 - type: manhattan_f1 value: 90.464048954615 - type: manhattan_f1_threshold value: 3491.29638671875 - type: manhattan_precision value: 92.2996878251821 - type: manhattan_recall value: 88.7 - type: max_ap value: 95.7485189884476 - type: max_f1 value: 90.464048954615 - type: max_precision value: 92.2996878251821 - type: max_recall value: 90.60000000000001 - type: similarity_accuracy value: 99.8039603960396 - type: similarity_accuracy_threshold value: 84.54211950302124 - type: similarity_ap value: 95.59056372734358 - type: similarity_f1 value: 90.1394422310757 - type: similarity_f1_threshold value: 84.54211950302124 - type: similarity_precision value: 89.78174603174604 - type: similarity_recall value: 90.5 - task: type: Clustering dataset: name: MTEB StackExchangeClustering type: mteb/stackexchange-clustering config: default split: test revision: 6cbc1f7b2bc0622f2e39d2c77fa502909748c259 metrics: - type: main_score value: 78.49205191950675 - type: v_measure value: 78.49205191950675 - type: v_measure_std value: 2.84869550699959 - task: type: Clustering dataset: name: MTEB StackExchangeClusteringP2P type: mteb/stackexchange-clustering-p2p config: default split: test revision: 815ca46b2622cec33ccafc3735d572c266efdb44 metrics: - type: main_score value: 48.90421736513028 - type: v_measure value: 48.90421736513028 - type: v_measure_std value: 1.6875865714471023 - task: type: Reranking dataset: name: MTEB StackOverflowDupQuestions type: mteb/stackoverflowdupquestions-reranking config: default split: test revision: e185fbe320c72810689fc5848eb6114e1ef5ec69 metrics: - type: main_score value: 52.9874730481696 - type: map value: 52.9874730481696 - type: mrr value: 53.85867604617604 - type: nAUC_map_diff1 value: 39.633429293407616 - type: nAUC_map_max value: 10.236807988858546 - type: nAUC_map_std value: 10.276522217929674 - type: nAUC_mrr_diff1 value: 40.0543079218377 - type: nAUC_mrr_max value: 10.96209807382042 - type: nAUC_mrr_std value: 10.524400196109918 - task: type: Summarization dataset: name: MTEB SummEval type: mteb/summeval config: default split: test revision: cda12ad7615edc362dbf25a00fdd61d3b1eaf93c metrics: - type: cosine_pearson value: 30.727801109114232 - type: cosine_spearman value: 31.66058223980157 - type: dot_pearson value: 30.78818248622866 - type: dot_spearman value: 31.525158776890265 - type: main_score value: 31.66058223980157 - type: pearson value: 30.727801109114232 - type: spearman value: 31.66058223980157 - task: type: Retrieval dataset: name: MTEB TRECCOVID type: mteb/trec-covid config: default split: test revision: bb9466bac8153a0349341eb1b22e06409e78ef4e metrics: - type: main_score value: 85.206 - type: map_at_1 value: 0.246 - type: map_at_10 value: 2.1950000000000003 - type: map_at_100 value: 14.179 - type: map_at_1000 value: 35.037 - type: map_at_20 value: 4.143 - type: map_at_3 value: 0.7100000000000001 - type: map_at_5 value: 1.135 - type: mrr_at_1 value: 94.0 - type: mrr_at_10 value: 96.66666666666666 - type: mrr_at_100 value: 96.66666666666666 - type: mrr_at_1000 value: 96.66666666666666 - type: mrr_at_20 value: 96.66666666666666 - type: mrr_at_3 value: 96.66666666666666 - type: mrr_at_5 value: 96.66666666666666 - type: nauc_map_at_1000_diff1 value: -4.6264497624527525 - type: nauc_map_at_1000_max value: 44.594457564749355 - type: nauc_map_at_1000_std value: 73.17642341400133 - type: nauc_map_at_100_diff1 value: 23.451335157405726 - type: nauc_map_at_100_max value: 25.426398857299525 - type: nauc_map_at_100_std value: 64.07416694472633 - type: nauc_map_at_10_diff1 value: 46.57568738568346 - type: nauc_map_at_10_max value: 9.693233249079238 - type: nauc_map_at_10_std value: 28.549530265164357 - type: nauc_map_at_1_diff1 value: 53.48238396620123 - type: nauc_map_at_1_max value: 0.33476619393733076 - type: nauc_map_at_1_std value: 8.906362219128463 - type: nauc_map_at_20_diff1 value: 39.40719602207749 - type: nauc_map_at_20_max value: 9.635915072074045 - type: nauc_map_at_20_std value: 35.15634791346394 - type: nauc_map_at_3_diff1 value: 53.11784737840137 - type: nauc_map_at_3_max value: 3.059682761072153 - type: nauc_map_at_3_std value: 21.310633086556617 - type: nauc_map_at_5_diff1 value: 49.91570701185436 - type: nauc_map_at_5_max value: 8.045082896244576 - type: nauc_map_at_5_std value: 20.597686235051647 - type: nauc_mrr_at_1000_diff1 value: 41.98412698412726 - type: nauc_mrr_at_1000_max value: 78.24463118580779 - type: nauc_mrr_at_1000_std value: 0.30812324930028195 - type: nauc_mrr_at_100_diff1 value: 41.98412698412726 - type: nauc_mrr_at_100_max value: 78.24463118580779 - type: nauc_mrr_at_100_std value: 0.30812324930028195 - type: nauc_mrr_at_10_diff1 value: 41.98412698412726 - type: nauc_mrr_at_10_max value: 78.24463118580779 - type: nauc_mrr_at_10_std value: 0.30812324930028195 - type: nauc_mrr_at_1_diff1 value: 38.62433862433873 - type: nauc_mrr_at_1_max value: 80.78120136943666 - type: nauc_mrr_at_1_std value: -10.768751945222197 - type: nauc_mrr_at_20_diff1 value: 41.98412698412726 - type: nauc_mrr_at_20_max value: 78.24463118580779 - type: nauc_mrr_at_20_std value: 0.30812324930028195 - type: nauc_mrr_at_3_diff1 value: 41.98412698412726 - type: nauc_mrr_at_3_max value: 78.24463118580779 - type: nauc_mrr_at_3_std value: 0.30812324930028195 - type: nauc_mrr_at_5_diff1 value: 41.98412698412726 - type: nauc_mrr_at_5_max value: 78.24463118580779 - type: nauc_mrr_at_5_std value: 0.30812324930028195 - type: nauc_ndcg_at_1000_diff1 value: 0.5174948602880207 - type: nauc_ndcg_at_1000_max value: 48.60686602077053 - type: nauc_ndcg_at_1000_std value: 75.72456343175277 - type: nauc_ndcg_at_100_diff1 value: -20.747252137999254 - type: nauc_ndcg_at_100_max value: 49.985132618254994 - type: nauc_ndcg_at_100_std value: 61.096383293836574 - type: nauc_ndcg_at_10_diff1 value: 6.791377920463332 - type: nauc_ndcg_at_10_max value: 57.50019332833286 - type: nauc_ndcg_at_10_std value: 49.201028841219426 - type: nauc_ndcg_at_1_diff1 value: 54.92683440362145 - type: nauc_ndcg_at_1_max value: 83.8667228129276 - type: nauc_ndcg_at_1_std value: 1.6738604063586122 - type: nauc_ndcg_at_20_diff1 value: -5.1948699196314925 - type: nauc_ndcg_at_20_max value: 54.483087684806556 - type: nauc_ndcg_at_20_std value: 50.54823818118781 - type: nauc_ndcg_at_3_diff1 value: 26.267246500164372 - type: nauc_ndcg_at_3_max value: 63.0173212926611 - type: nauc_ndcg_at_3_std value: 41.025597406368256 - type: nauc_ndcg_at_5_diff1 value: 16.910185454343036 - type: nauc_ndcg_at_5_max value: 60.9328683868778 - type: nauc_ndcg_at_5_std value: 36.70169905857712 - type: nauc_precision_at_1000_diff1 value: -46.374447765983525 - type: nauc_precision_at_1000_max value: 35.36052337813863 - type: nauc_precision_at_1000_std value: 14.219220668161018 - type: nauc_precision_at_100_diff1 value: -29.7838083657744 - type: nauc_precision_at_100_max value: 43.93589400385112 - type: nauc_precision_at_100_std value: 55.425045718579945 - type: nauc_precision_at_10_diff1 value: -12.016613405227687 - type: nauc_precision_at_10_max value: 57.79924427743131 - type: nauc_precision_at_10_std value: 49.022036703550675 - type: nauc_precision_at_1_diff1 value: 38.62433862433873 - type: nauc_precision_at_1_max value: 80.78120136943666 - type: nauc_precision_at_1_std value: -10.768751945222197 - type: nauc_precision_at_20_diff1 value: -23.95633847880195 - type: nauc_precision_at_20_max value: 48.34715917258276 - type: nauc_precision_at_20_std value: 48.82198285255887 - type: nauc_precision_at_3_diff1 value: 6.871296905858807 - type: nauc_precision_at_3_max value: 70.54805793285054 - type: nauc_precision_at_3_std value: 44.65108624094803 - type: nauc_precision_at_5_diff1 value: -9.074932448759695 - type: nauc_precision_at_5_max value: 67.41284242437573 - type: nauc_precision_at_5_std value: 23.876891983919577 - type: nauc_recall_at_1000_diff1 value: 8.142288830293255 - type: nauc_recall_at_1000_max value: 38.85182826835104 - type: nauc_recall_at_1000_std value: 68.60783819217335 - type: nauc_recall_at_100_diff1 value: 34.262914076287466 - type: nauc_recall_at_100_max value: 12.87009658528838 - type: nauc_recall_at_100_std value: 56.21330603762995 - type: nauc_recall_at_10_diff1 value: 49.33830945338758 - type: nauc_recall_at_10_max value: 0.3539875530671406 - type: nauc_recall_at_10_std value: 26.85864465557644 - type: nauc_recall_at_1_diff1 value: 53.48238396620123 - type: nauc_recall_at_1_max value: 0.33476619393733076 - type: nauc_recall_at_1_std value: 8.906362219128463 - type: nauc_recall_at_20_diff1 value: 44.21928181266254 - type: nauc_recall_at_20_max value: -0.9198356057088594 - type: nauc_recall_at_20_std value: 31.484376992896784 - type: nauc_recall_at_3_diff1 value: 53.038093080990876 - type: nauc_recall_at_3_max value: -1.4170895916973003 - type: nauc_recall_at_3_std value: 21.890202855574497 - type: nauc_recall_at_5_diff1 value: 49.39742214825278 - type: nauc_recall_at_5_max value: 2.8412267611894517 - type: nauc_recall_at_5_std value: 18.01598921859512 - type: ndcg_at_1 value: 91.0 - type: ndcg_at_10 value: 85.206 - type: ndcg_at_100 value: 67.29 - type: ndcg_at_1000 value: 60.584 - type: ndcg_at_20 value: 82.321 - type: ndcg_at_3 value: 88.642 - type: ndcg_at_5 value: 87.063 - type: precision_at_1 value: 94.0 - type: precision_at_10 value: 89.8 - type: precision_at_100 value: 69.78 - type: precision_at_1000 value: 26.738 - type: precision_at_20 value: 87.2 - type: precision_at_3 value: 92.0 - type: precision_at_5 value: 90.8 - type: recall_at_1 value: 0.246 - type: recall_at_10 value: 2.344 - type: recall_at_100 value: 16.962 - type: recall_at_1000 value: 57.325 - type: recall_at_20 value: 4.517 - type: recall_at_3 value: 0.731 - type: recall_at_5 value: 1.1780000000000002 - task: type: Retrieval dataset: name: MTEB Touche2020 type: mteb/touche2020 config: default split: test revision: a34f9a33db75fa0cbb21bb5cfc3dae8dc8bec93f metrics: - type: main_score value: 31.455 - type: map_at_1 value: 2.9739999999999998 - type: map_at_10 value: 12.183 - type: map_at_100 value: 18.772 - type: map_at_1000 value: 20.415 - type: map_at_20 value: 14.451 - type: map_at_3 value: 6.507000000000001 - type: map_at_5 value: 8.66 - type: mrr_at_1 value: 40.816326530612244 - type: mrr_at_10 value: 57.70975056689341 - type: mrr_at_100 value: 58.18379126542391 - type: mrr_at_1000 value: 58.18379126542391 - type: mrr_at_20 value: 57.85552316164561 - type: mrr_at_3 value: 54.08163265306123 - type: mrr_at_5 value: 56.42857142857143 - type: nauc_map_at_1000_diff1 value: 3.1567471051481437 - type: nauc_map_at_1000_max value: -1.5882060729791523 - type: nauc_map_at_1000_std value: 18.69622198722074 - type: nauc_map_at_100_diff1 value: 3.3449677678147536 - type: nauc_map_at_100_max value: -2.8928606866168405 - type: nauc_map_at_100_std value: 15.789984947653412 - type: nauc_map_at_10_diff1 value: 2.9696743570444264 - type: nauc_map_at_10_max value: -9.096749212011876 - type: nauc_map_at_10_std value: -5.38545817258353 - type: nauc_map_at_1_diff1 value: 20.680780404542546 - type: nauc_map_at_1_max value: -7.04722927447817 - type: nauc_map_at_1_std value: -7.062494733973898 - type: nauc_map_at_20_diff1 value: 4.070437790119271 - type: nauc_map_at_20_max value: -4.84491434686032 - type: nauc_map_at_20_std value: 0.5846341109021014 - type: nauc_map_at_3_diff1 value: 11.9634978045925 - type: nauc_map_at_3_max value: -8.27834591046608 - type: nauc_map_at_3_std value: -8.687615453381065 - type: nauc_map_at_5_diff1 value: 0.9195191526009436 - type: nauc_map_at_5_max value: -1.673813362719489 - type: nauc_map_at_5_std value: -6.67549753473631 - type: nauc_mrr_at_1000_diff1 value: 19.877993208719573 - type: nauc_mrr_at_1000_max value: -10.37776706406218 - type: nauc_mrr_at_1000_std value: 7.132169578056367 - type: nauc_mrr_at_100_diff1 value: 19.877993208719573 - type: nauc_mrr_at_100_max value: -10.37776706406218 - type: nauc_mrr_at_100_std value: 7.132169578056367 - type: nauc_mrr_at_10_diff1 value: 20.414285568401457 - type: nauc_mrr_at_10_max value: -9.677800295687861 - type: nauc_mrr_at_10_std value: 8.001103690180859 - type: nauc_mrr_at_1_diff1 value: 22.393284073955723 - type: nauc_mrr_at_1_max value: -5.889370191243167 - type: nauc_mrr_at_1_std value: -1.5183536173658247 - type: nauc_mrr_at_20_diff1 value: 20.455564720604055 - type: nauc_mrr_at_20_max value: -10.230642830103074 - type: nauc_mrr_at_20_std value: 7.863582453266621 - type: nauc_mrr_at_3_diff1 value: 17.554895390732618 - type: nauc_mrr_at_3_max value: -15.618463505555052 - type: nauc_mrr_at_3_std value: 5.913231577966864 - type: nauc_mrr_at_5_diff1 value: 18.393678507779914 - type: nauc_mrr_at_5_max value: -11.903593353147762 - type: nauc_mrr_at_5_std value: 7.580745996262831 - type: nauc_ndcg_at_1000_diff1 value: 13.746937095530473 - type: nauc_ndcg_at_1000_max value: -0.9319249687895838 - type: nauc_ndcg_at_1000_std value: 38.56328031451904 - type: nauc_ndcg_at_100_diff1 value: 13.854865944415895 - type: nauc_ndcg_at_100_max value: -7.142142012591404 - type: nauc_ndcg_at_100_std value: 35.61341954818848 - type: nauc_ndcg_at_10_diff1 value: 9.010144273248759 - type: nauc_ndcg_at_10_max value: -15.320014897424574 - type: nauc_ndcg_at_10_std value: 2.84883880489144 - type: nauc_ndcg_at_1_diff1 value: 20.939533945592967 - type: nauc_ndcg_at_1_max value: -6.387319972188946 - type: nauc_ndcg_at_1_std value: -0.5258673122126726 - type: nauc_ndcg_at_20_diff1 value: 14.660827309009496 - type: nauc_ndcg_at_20_max value: -13.476196120145994 - type: nauc_ndcg_at_20_std value: 8.22391881710838 - type: nauc_ndcg_at_3_diff1 value: 13.429985227235935 - type: nauc_ndcg_at_3_max value: -14.904544592570247 - type: nauc_ndcg_at_3_std value: 1.599779998183342 - type: nauc_ndcg_at_5_diff1 value: 8.085466231900622 - type: nauc_ndcg_at_5_max value: -9.09591969526831 - type: nauc_ndcg_at_5_std value: 3.5794092637248505 - type: nauc_precision_at_1000_diff1 value: -9.31941215946743 - type: nauc_precision_at_1000_max value: 31.52913520470716 - type: nauc_precision_at_1000_std value: 22.720784312185856 - type: nauc_precision_at_100_diff1 value: 8.958548406995279 - type: nauc_precision_at_100_max value: 15.100597910674104 - type: nauc_precision_at_100_std value: 71.04548238175113 - type: nauc_precision_at_10_diff1 value: 12.4698194690008 - type: nauc_precision_at_10_max value: -15.84870544871496 - type: nauc_precision_at_10_std value: 7.575297622501928 - type: nauc_precision_at_1_diff1 value: 22.393284073955723 - type: nauc_precision_at_1_max value: -5.889370191243167 - type: nauc_precision_at_1_std value: -1.5183536173658247 - type: nauc_precision_at_20_diff1 value: 15.393505718138758 - type: nauc_precision_at_20_max value: -3.70684298539384 - type: nauc_precision_at_20_std value: 29.426137824970304 - type: nauc_precision_at_3_diff1 value: 9.997768085465394 - type: nauc_precision_at_3_max value: -17.12224314347674 - type: nauc_precision_at_3_std value: -1.343018166772313 - type: nauc_precision_at_5_diff1 value: 3.8936997437913554 - type: nauc_precision_at_5_max value: -5.689104289687632 - type: nauc_precision_at_5_std value: 3.181098051304285 - type: nauc_recall_at_1000_diff1 value: 9.908303508158387 - type: nauc_recall_at_1000_max value: 6.174506592699848 - type: nauc_recall_at_1000_std value: 77.41931114780012 - type: nauc_recall_at_100_diff1 value: 10.286839241876192 - type: nauc_recall_at_100_max value: -6.6138697026666815 - type: nauc_recall_at_100_std value: 49.608313692633224 - type: nauc_recall_at_10_diff1 value: 2.215545846659851 - type: nauc_recall_at_10_max value: -17.83025802478445 - type: nauc_recall_at_10_std value: -3.3784768673705465 - type: nauc_recall_at_1_diff1 value: 20.680780404542546 - type: nauc_recall_at_1_max value: -7.04722927447817 - type: nauc_recall_at_1_std value: -7.062494733973898 - type: nauc_recall_at_20_diff1 value: 6.974410239251615 - type: nauc_recall_at_20_max value: -14.161147924731646 - type: nauc_recall_at_20_std value: 9.328412057721454 - type: nauc_recall_at_3_diff1 value: 7.904589805754212 - type: nauc_recall_at_3_max value: -12.1912388648593 - type: nauc_recall_at_3_std value: -9.221542013385555 - type: nauc_recall_at_5_diff1 value: -3.2604132752706914 - type: nauc_recall_at_5_max value: -6.886351441658915 - type: nauc_recall_at_5_std value: -7.014252851712789 - type: ndcg_at_1 value: 39.796 - type: ndcg_at_10 value: 31.455 - type: ndcg_at_100 value: 42.388999999999996 - type: ndcg_at_1000 value: 53.556000000000004 - type: ndcg_at_20 value: 30.808000000000003 - type: ndcg_at_3 value: 35.831 - type: ndcg_at_5 value: 32.845 - type: precision_at_1 value: 40.816 - type: precision_at_10 value: 27.143 - type: precision_at_100 value: 8.449 - type: precision_at_1000 value: 1.6179999999999999 - type: precision_at_20 value: 19.387999999999998 - type: precision_at_3 value: 35.374 - type: precision_at_5 value: 31.019999999999996 - type: recall_at_1 value: 2.9739999999999998 - type: recall_at_10 value: 19.39 - type: recall_at_100 value: 51.636 - type: recall_at_1000 value: 86.99900000000001 - type: recall_at_20 value: 26.478 - type: recall_at_3 value: 7.703 - type: recall_at_5 value: 11.42 - task: type: Classification dataset: name: MTEB ToxicConversationsClassification type: mteb/toxic_conversations_50k config: default split: test revision: edfaf9da55d3dd50d43143d90c1ac476895ae6de metrics: - type: accuracy value: 86.9384765625 - type: ap value: 31.737513704141552 - type: ap_weighted value: 31.737513704141552 - type: f1 value: 71.5490757306975 - type: f1_weighted value: 89.14632533489856 - type: main_score value: 86.9384765625 - task: type: Classification dataset: name: MTEB TweetSentimentExtractionClassification type: mteb/tweet_sentiment_extraction config: default split: test revision: d604517c81ca91fe16a244d1248fc021f9ecee7a metrics: - type: accuracy value: 73.57668364459535 - type: f1 value: 73.90467103648074 - type: f1_weighted value: 73.42158415034704 - type: main_score value: 73.57668364459535 - task: type: Clustering dataset: name: MTEB TwentyNewsgroupsClustering type: mteb/twentynewsgroups-clustering config: default split: test revision: 6125ec4e24fa026cec8a478383ee943acfbd5449 metrics: - type: main_score value: 58.574148097494685 - type: v_measure value: 58.574148097494685 - type: v_measure_std value: 0.9443161637490822 - task: type: PairClassification dataset: name: MTEB TwitterSemEval2015 type: mteb/twittersemeval2015-pairclassification config: default split: test revision: 70970daeab8776df92f5ea462b6173c0b46fd2d1 metrics: - type: cosine_accuracy value: 88.1385229778864 - type: cosine_accuracy_threshold value: 83.86307954788208 - type: cosine_ap value: 80.17965893449055 - type: cosine_f1 value: 73.0614300100705 - type: cosine_f1_threshold value: 80.7942807674408 - type: cosine_precision value: 69.8603755416466 - type: cosine_recall value: 76.56992084432717 - type: dot_accuracy value: 88.2100494724921 - type: dot_accuracy_threshold value: 83.84793996810913 - type: dot_ap value: 80.18603932881858 - type: dot_f1 value: 73.07643714466204 - type: dot_f1_threshold value: 80.87586164474487 - type: dot_precision value: 70.10909090909091 - type: dot_recall value: 76.3060686015831 - type: euclidean_accuracy value: 88.1385229778864 - type: euclidean_accuracy_threshold value: 56.77661895751953 - type: euclidean_ap value: 80.1784070881624 - type: euclidean_f1 value: 73.04830369529574 - type: euclidean_f1_threshold value: 61.91838979721069 - type: euclidean_precision value: 69.96859144720948 - type: euclidean_recall value: 76.41160949868075 - type: main_score value: 80.18603932881858 - type: manhattan_accuracy value: 88.0431543184121 - type: manhattan_accuracy_threshold value: 3755.6137084960938 - type: manhattan_ap value: 79.98270453664578 - type: manhattan_f1 value: 72.68242015061023 - type: manhattan_f1_threshold value: 3892.494583129883 - type: manhattan_precision value: 71.54907975460122 - type: manhattan_recall value: 73.85224274406332 - type: max_ap value: 80.18603932881858 - type: max_f1 value: 73.07643714466204 - type: max_precision value: 71.54907975460122 - type: max_recall value: 76.56992084432717 - type: similarity_accuracy value: 88.1385229778864 - type: similarity_accuracy_threshold value: 83.86307954788208 - type: similarity_ap value: 80.17965893449055 - type: similarity_f1 value: 73.0614300100705 - type: similarity_f1_threshold value: 80.7942807674408 - type: similarity_precision value: 69.8603755416466 - type: similarity_recall value: 76.56992084432717 - task: type: PairClassification dataset: name: MTEB TwitterURLCorpus type: mteb/twitterurlcorpus-pairclassification config: default split: test revision: 8b6510b0b1fa4e4c4f879467980e9be563ec1cdf metrics: - type: cosine_accuracy value: 89.7892653393876 - type: cosine_accuracy_threshold value: 79.69566583633423 - type: cosine_ap value: 87.4579867302024 - type: cosine_f1 value: 79.91620843152658 - type: cosine_f1_threshold value: 78.53609323501587 - type: cosine_precision value: 77.7155329210622 - type: cosine_recall value: 82.24514936864799 - type: dot_accuracy value: 89.78732487289945 - type: dot_accuracy_threshold value: 80.05315661430359 - type: dot_ap value: 87.44916182456272 - type: dot_f1 value: 79.90419878751591 - type: dot_f1_threshold value: 78.57890725135803 - type: dot_precision value: 77.73409057812728 - type: dot_recall value: 82.19895287958116 - type: euclidean_accuracy value: 89.78538440641131 - type: euclidean_accuracy_threshold value: 62.29925751686096 - type: euclidean_ap value: 87.45904868911386 - type: euclidean_f1 value: 79.93127404474657 - type: euclidean_f1_threshold value: 65.61101078987122 - type: euclidean_precision value: 77.62060210373595 - type: euclidean_recall value: 82.38373883584848 - type: main_score value: 87.46554314325058 - type: manhattan_accuracy value: 89.76597974152986 - type: manhattan_accuracy_threshold value: 3988.5299682617188 - type: manhattan_ap value: 87.46554314325058 - type: manhattan_f1 value: 79.97181740645973 - type: manhattan_f1_threshold value: 4235.905838012695 - type: manhattan_precision value: 77.13713427283783 - type: manhattan_recall value: 83.02279026793964 - type: max_ap value: 87.46554314325058 - type: max_f1 value: 79.97181740645973 - type: max_precision value: 77.73409057812728 - type: max_recall value: 83.02279026793964 - type: similarity_accuracy value: 89.7892653393876 - type: similarity_accuracy_threshold value: 79.69566583633423 - type: similarity_ap value: 87.4579867302024 - type: similarity_f1 value: 79.91620843152658 - type: similarity_f1_threshold value: 78.53609323501587 - type: similarity_precision value: 77.7155329210622 - type: similarity_recall value: 82.24514936864799 --- # *Forked from dunzhang/stella_en_400M_v5* # Updates Hi, everyone, thanks for using stella models. After six months of work, I trained the jasper model on top of the stella model, which is a multimodal model, and it can be ranked 2 in mteb (submitted the results on 2024-12-11, which may need official review https://github.com/embeddings-benchmark/results/pull/68). Model link: https://huggingface.co/infgrad/jasper_en_vision_language_v1 I'll focus on the technical report, training data and related code, hopefully the tricks I've used will be of some help to you guys! The core training code will be integrated into the rag-retrieval library(https://github.com/NLPJCL/RAG-Retrieval) in the near future. (Welcome to star) This work was accomplished during my free time, it's a personal hobby. One person's time and energy is limited, and you are welcome to make any contributions! You can also find these models on my [homepage](https://huggingface.co/infgrad). # Introduction The models are trained based on `Alibaba-NLP/gte-large-en-v1.5` and `Alibaba-NLP/gte-Qwen2-1.5B-instruct`. Thanks for their contributions! **We simplify usage of prompts, providing two prompts for most general tasks, one is for s2p, another one is for s2s.** Prompt of s2p task(e.g. retrieve task): ```text Instruct: Given a web search query, retrieve relevant passages that answer the query.\nQuery: {query} ``` Prompt of s2s task(e.g. semantic textual similarity task): ```text Instruct: Retrieve semantically similar text.\nQuery: {query} ``` The models are finally trained by [MRL](https://arxiv.org/abs/2205.13147), so they have multiple dimensions: 512, 768, 1024, 2048, 4096, 6144 and 8192. The higher the dimension, the better the performance. **Generally speaking, 1024d is good enough.** The MTEB score of 1024d is only 0.001 lower than 8192d. # Model directory structure The model directory structure is very simple, it is a standard SentenceTransformer directory **with a series of `2_Dense_{dims}` folders**, where `dims` represents the final vector dimension. For example, the `2_Dense_256` folder stores Linear weights that convert vector dimensions to 256 dimensions. Please refer to the following chapters for specific instructions on how to use them. # Usage You can use `SentenceTransformers` or `transformers` library to encode text. ## Sentence Transformers ```python from sentence_transformers import SentenceTransformer # This model supports two prompts: "s2p_query" and "s2s_query" for sentence-to-passage and sentence-to-sentence tasks, respectively. # They are defined in `config_sentence_transformers.json` query_prompt_name = "s2p_query" queries = [ "What are some ways to reduce stress?", "What are the benefits of drinking green tea?", ] # docs do not need any prompts docs = [ "There are many effective ways to reduce stress. Some common techniques include deep breathing, meditation, and physical activity. Engaging in hobbies, spending time in nature, and connecting with loved ones can also help alleviate stress. Additionally, setting boundaries, practicing self-care, and learning to say no can prevent stress from building up.", "Green tea has been consumed for centuries and is known for its potential health benefits. It contains antioxidants that may help protect the body against damage caused by free radicals. Regular consumption of green tea has been associated with improved heart health, enhanced cognitive function, and a reduced risk of certain types of cancer. The polyphenols in green tea may also have anti-inflammatory and weight loss properties.", ] # !The default dimension is 1024, if you need other dimensions, please clone the model and modify `modules.json` to replace `2_Dense_1024` with another dimension, e.g. `2_Dense_256` or `2_Dense_8192` ! # on gpu model = SentenceTransformer("dunzhang/stella_en_400M_v5", trust_remote_code=True).cuda() # you can also use this model without the features of `use_memory_efficient_attention` and `unpad_inputs`. It can be worked in CPU. # model = SentenceTransformer( # "dunzhang/stella_en_400M_v5", # trust_remote_code=True, # device="cpu", # config_kwargs={"use_memory_efficient_attention": False, "unpad_inputs": False} # ) query_embeddings = model.encode(queries, prompt_name=query_prompt_name) doc_embeddings = model.encode(docs) print(query_embeddings.shape, doc_embeddings.shape) # (2, 1024) (2, 1024) similarities = model.similarity(query_embeddings, doc_embeddings) print(similarities) # tensor([[0.8398, 0.2990], # [0.3282, 0.8095]]) ``` ## Transformers ```python import os import torch from transformers import AutoModel, AutoTokenizer from sklearn.preprocessing import normalize query_prompt = "Instruct: Given a web search query, retrieve relevant passages that answer the query.\nQuery: " queries = [ "What are some ways to reduce stress?", "What are the benefits of drinking green tea?", ] queries = [query_prompt + query for query in queries] # docs do not need any prompts docs = [ "There are many effective ways to reduce stress. Some common techniques include deep breathing, meditation, and physical activity. Engaging in hobbies, spending time in nature, and connecting with loved ones can also help alleviate stress. Additionally, setting boundaries, practicing self-care, and learning to say no can prevent stress from building up.", "Green tea has been consumed for centuries and is known for its potential health benefits. It contains antioxidants that may help protect the body against damage caused by free radicals. Regular consumption of green tea has been associated with improved heart health, enhanced cognitive function, and a reduced risk of certain types of cancer. The polyphenols in green tea may also have anti-inflammatory and weight loss properties.", ] # The path of your model after cloning it model_dir = "{Your MODEL_PATH}" vector_dim = 1024 vector_linear_directory = f"2_Dense_{vector_dim}" model = AutoModel.from_pretrained(model_dir, trust_remote_code=True).cuda().eval() # you can also use this model without the features of `use_memory_efficient_attention` and `unpad_inputs`. It can be worked in CPU. # model = AutoModel.from_pretrained(model_dir, trust_remote_code=True,use_memory_efficient_attention=False,unpad_inputs=False).cuda().eval() tokenizer = AutoTokenizer.from_pretrained(model_dir, trust_remote_code=True) vector_linear = torch.nn.Linear(in_features=model.config.hidden_size, out_features=vector_dim) vector_linear_dict = { k.replace("linear.", ""): v for k, v in torch.load(os.path.join(model_dir, f"{vector_linear_directory}/pytorch_model.bin")).items() } vector_linear.load_state_dict(vector_linear_dict) vector_linear.cuda() # Embed the queries with torch.no_grad(): input_data = tokenizer(queries, padding="longest", truncation=True, max_length=512, return_tensors="pt") input_data = {k: v.cuda() for k, v in input_data.items()} attention_mask = input_data["attention_mask"] last_hidden_state = model(**input_data)[0] last_hidden = last_hidden_state.masked_fill(~attention_mask[..., None].bool(), 0.0) query_vectors = last_hidden.sum(dim=1) / attention_mask.sum(dim=1)[..., None] query_vectors = normalize(vector_linear(query_vectors).cpu().numpy()) # Embed the documents with torch.no_grad(): input_data = tokenizer(docs, padding="longest", truncation=True, max_length=512, return_tensors="pt") input_data = {k: v.cuda() for k, v in input_data.items()} attention_mask = input_data["attention_mask"] last_hidden_state = model(**input_data)[0] last_hidden = last_hidden_state.masked_fill(~attention_mask[..., None].bool(), 0.0) docs_vectors = last_hidden.sum(dim=1) / attention_mask.sum(dim=1)[..., None] docs_vectors = normalize(vector_linear(docs_vectors).cpu().numpy()) print(query_vectors.shape, docs_vectors.shape) # (2, 1024) (2, 1024) similarities = query_vectors @ docs_vectors.T print(similarities) # [[0.8397531 0.29900077] # [0.32818374 0.80954516]] ``` ### infinity_emb Usage via [infinity, MIT Licensed](https://github.com/michaelfeil/infinity). ```bash docker run \ --gpus all -p "7997":"7997" \ michaelf34/infinity:0.0.69 \ v2 --model-id dunzhang/stella_en_400M_v5 --revision "refs/pr/24" --dtype bfloat16 --batch-size 16 --device cuda --engine torch --port 7997 --no-bettertransformer ``` # FAQ Q: The details of training? A: The training method and datasets will be released in the future. (specific time unknown, may be provided in a paper) Q: How to choose a suitable prompt for my own task? A: In most cases, please use the s2p and s2s prompts. These two prompts account for the vast majority of the training data. Q: How to reproduce MTEB results? A: Please use evaluation scripts in `Alibaba-NLP/gte-Qwen2-1.5B-instruct` or `intfloat/e5-mistral-7b-instruct` Q: Why each dimension has a linear weight? A: MRL has multiple training methods, we choose this method which has the best performance. Q: What is the sequence length of models? A: 512 is recommended, in our experiments, almost all models perform poorly on specialized long text retrieval datasets. Besides, the model is trained on datasets of 512 length. This may be an optimization term. If you have any questions, please start a discussion on community.
[ "SUMMARIZATION" ]
[ "BIOSSES", "SCIFACT" ]
Impulse2000/multilingual-e5-large-instruct-GGUF
Impulse2000
feature-extraction
[ "sentence-transformers", "gguf", "mteb", "transformers", "llama-cpp", "feature-extraction", "multilingual", "af", "am", "ar", "as", "az", "be", "bg", "bn", "br", "bs", "ca", "cs", "cy", "da", "de", "el", "en", "eo", "es", "et", "eu", "fa", "fi", "fr", "fy", "ga", "gd", "gl", "gu", "ha", "he", "hi", "hr", "hu", "hy", "id", "is", "it", "ja", "jv", "ka", "kk", "km", "kn", "ko", "ku", "ky", "la", "lo", "lt", "lv", "mg", "mk", "ml", "mn", "mr", "ms", "my", "ne", "nl", "no", "om", "or", "pa", "pl", "ps", "pt", "ro", "ru", "sa", "sd", "si", "sk", "sl", "so", "sq", "sr", "su", "sv", "sw", "ta", "te", "th", "tl", "tr", "ug", "uk", "ur", "uz", "vi", "xh", "yi", "zh", "base_model:intfloat/multilingual-e5-large-instruct", "base_model:quantized:intfloat/multilingual-e5-large-instruct", "license:mit", "model-index", "autotrain_compatible", "endpoints_compatible", "region:us" ]
2025-02-08T19:30:41
2025-02-08T20:00:26
113
1
--- base_model: intfloat/multilingual-e5-large-instruct language: - multilingual - af - am - ar - as - az - be - bg - bn - br - bs - ca - cs - cy - da - de - el - en - eo - es - et - eu - fa - fi - fr - fy - ga - gd - gl - gu - ha - he - hi - hr - hu - hy - id - is - it - ja - jv - ka - kk - km - kn - ko - ku - ky - la - lo - lt - lv - mg - mk - ml - mn - mr - ms - my - ne - nl - 'no' - om - or - pa - pl - ps - pt - ro - ru - sa - sd - si - sk - sl - so - sq - sr - su - sv - sw - ta - te - th - tl - tr - ug - uk - ur - uz - vi - xh - yi - zh license: mit pipeline_tag: feature-extraction tags: - mteb - sentence-transformers - transformers - llama-cpp model-index: - name: multilingual-e5-large-instruct results: - task: type: Classification dataset: name: MTEB AmazonCounterfactualClassification (en) type: mteb/amazon_counterfactual config: en split: test revision: e8379541af4e31359cca9fbcf4b00f2671dba205 metrics: - type: accuracy value: 76.23880597014924 - type: ap value: 39.07351965022687 - type: f1 value: 70.04836733862683 - task: type: Classification dataset: name: MTEB AmazonCounterfactualClassification (de) type: mteb/amazon_counterfactual config: de split: test revision: e8379541af4e31359cca9fbcf4b00f2671dba205 metrics: - type: accuracy value: 66.71306209850107 - type: ap value: 79.01499914759529 - type: f1 value: 64.81951817560703 - task: type: Classification dataset: name: MTEB AmazonCounterfactualClassification (en-ext) type: mteb/amazon_counterfactual config: en-ext split: test revision: e8379541af4e31359cca9fbcf4b00f2671dba205 metrics: - type: accuracy value: 73.85307346326837 - type: ap value: 22.447519885878737 - type: f1 value: 61.0162730745633 - task: type: Classification dataset: name: MTEB AmazonCounterfactualClassification (ja) type: mteb/amazon_counterfactual config: ja split: test revision: e8379541af4e31359cca9fbcf4b00f2671dba205 metrics: - type: accuracy value: 76.04925053533191 - type: ap value: 23.44983217128922 - type: f1 value: 62.5723230907759 - task: type: Classification dataset: name: MTEB AmazonPolarityClassification type: mteb/amazon_polarity config: default split: test revision: e2d317d38cd51312af73b3d32a06d1a08b442046 metrics: - type: accuracy value: 96.28742500000001 - type: ap value: 94.8449918887462 - type: f1 value: 96.28680923610432 - task: type: Classification dataset: name: MTEB AmazonReviewsClassification (en) type: mteb/amazon_reviews_multi config: en split: test revision: 1399c76144fd37290681b995c656ef9b2e06e26d metrics: - type: accuracy value: 56.716 - type: f1 value: 55.76510398266401 - task: type: Classification dataset: name: MTEB AmazonReviewsClassification (de) type: mteb/amazon_reviews_multi config: de split: test revision: 1399c76144fd37290681b995c656ef9b2e06e26d metrics: - type: accuracy value: 52.99999999999999 - type: f1 value: 52.00829994765178 - task: type: Classification dataset: name: MTEB AmazonReviewsClassification (es) type: mteb/amazon_reviews_multi config: es split: test revision: 1399c76144fd37290681b995c656ef9b2e06e26d metrics: - type: accuracy value: 48.806000000000004 - type: f1 value: 48.082345914983634 - task: type: Classification dataset: name: MTEB AmazonReviewsClassification (fr) type: mteb/amazon_reviews_multi config: fr split: test revision: 1399c76144fd37290681b995c656ef9b2e06e26d metrics: - type: accuracy value: 48.507999999999996 - type: f1 value: 47.68752844642045 - task: type: Classification dataset: name: MTEB AmazonReviewsClassification (ja) type: mteb/amazon_reviews_multi config: ja split: test revision: 1399c76144fd37290681b995c656ef9b2e06e26d metrics: - type: accuracy value: 47.709999999999994 - type: f1 value: 47.05870376637181 - task: type: Classification dataset: name: MTEB AmazonReviewsClassification (zh) type: mteb/amazon_reviews_multi config: zh split: test revision: 1399c76144fd37290681b995c656ef9b2e06e26d metrics: - type: accuracy value: 44.662000000000006 - type: f1 value: 43.42371965372771 - task: type: Retrieval dataset: name: MTEB ArguAna type: arguana config: default split: test revision: None metrics: - type: map_at_1 value: 31.721 - type: map_at_10 value: 49.221 - type: map_at_100 value: 49.884 - type: map_at_1000 value: 49.888 - type: map_at_3 value: 44.31 - type: map_at_5 value: 47.276 - type: mrr_at_1 value: 32.432 - type: mrr_at_10 value: 49.5 - type: mrr_at_100 value: 50.163000000000004 - type: mrr_at_1000 value: 50.166 - type: mrr_at_3 value: 44.618 - type: mrr_at_5 value: 47.541 - type: ndcg_at_1 value: 31.721 - type: ndcg_at_10 value: 58.384 - type: ndcg_at_100 value: 61.111000000000004 - type: ndcg_at_1000 value: 61.187999999999995 - type: ndcg_at_3 value: 48.386 - type: ndcg_at_5 value: 53.708999999999996 - type: precision_at_1 value: 31.721 - type: precision_at_10 value: 8.741 - type: precision_at_100 value: 0.991 - type: precision_at_1000 value: 0.1 - type: precision_at_3 value: 20.057 - type: precision_at_5 value: 14.609 - type: recall_at_1 value: 31.721 - type: recall_at_10 value: 87.411 - type: recall_at_100 value: 99.075 - type: recall_at_1000 value: 99.644 - type: recall_at_3 value: 60.171 - type: recall_at_5 value: 73.044 - task: type: Clustering dataset: name: MTEB ArxivClusteringP2P type: mteb/arxiv-clustering-p2p config: default split: test revision: a122ad7f3f0291bf49cc6f4d32aa80929df69d5d metrics: - type: v_measure value: 46.40419580759799 - task: type: Clustering dataset: name: MTEB ArxivClusteringS2S type: mteb/arxiv-clustering-s2s config: default split: test revision: f910caf1a6075f7329cdf8c1a6135696f37dbd53 metrics: - type: v_measure value: 40.48593255007969 - task: type: Reranking dataset: name: MTEB AskUbuntuDupQuestions type: mteb/askubuntudupquestions-reranking config: default split: test revision: 2000358ca161889fa9c082cb41daa8dcfb161a54 metrics: - type: map value: 63.889179122289995 - type: mrr value: 77.61146286769556 - task: type: STS dataset: name: MTEB BIOSSES type: mteb/biosses-sts config: default split: test revision: d3fb88f8f02e40887cd149695127462bbcf29b4a metrics: - type: cos_sim_pearson value: 88.15075203727929 - type: cos_sim_spearman value: 86.9622224570873 - type: euclidean_pearson value: 86.70473853624121 - type: euclidean_spearman value: 86.9622224570873 - type: manhattan_pearson value: 86.21089380980065 - type: manhattan_spearman value: 86.75318154937008 - task: type: BitextMining dataset: name: MTEB BUCC (de-en) type: mteb/bucc-bitext-mining config: de-en split: test revision: d51519689f32196a32af33b075a01d0e7c51e252 metrics: - type: accuracy value: 99.65553235908142 - type: f1 value: 99.60681976339595 - type: precision value: 99.58246346555325 - type: recall value: 99.65553235908142 - task: type: BitextMining dataset: name: MTEB BUCC (fr-en) type: mteb/bucc-bitext-mining config: fr-en split: test revision: d51519689f32196a32af33b075a01d0e7c51e252 metrics: - type: accuracy value: 99.26260180497468 - type: f1 value: 99.14520507740848 - type: precision value: 99.08650671362535 - type: recall value: 99.26260180497468 - task: type: BitextMining dataset: name: MTEB BUCC (ru-en) type: mteb/bucc-bitext-mining config: ru-en split: test revision: d51519689f32196a32af33b075a01d0e7c51e252 metrics: - type: accuracy value: 98.07412538967787 - type: f1 value: 97.86629719431936 - type: precision value: 97.76238309664012 - type: recall value: 98.07412538967787 - task: type: BitextMining dataset: name: MTEB BUCC (zh-en) type: mteb/bucc-bitext-mining config: zh-en split: test revision: d51519689f32196a32af33b075a01d0e7c51e252 metrics: - type: accuracy value: 99.42074776197998 - type: f1 value: 99.38564156573635 - type: precision value: 99.36808846761454 - type: recall value: 99.42074776197998 - task: type: Classification dataset: name: MTEB Banking77Classification type: mteb/banking77 config: default split: test revision: 0fd18e25b25c072e09e0d92ab615fda904d66300 metrics: - type: accuracy value: 85.73376623376623 - type: f1 value: 85.68480707214599 - task: type: Clustering dataset: name: MTEB BiorxivClusteringP2P type: mteb/biorxiv-clustering-p2p config: default split: test revision: 65b79d1d13f80053f67aca9498d9402c2d9f1f40 metrics: - type: v_measure value: 40.935218072113855 - task: type: Clustering dataset: name: MTEB BiorxivClusteringS2S type: mteb/biorxiv-clustering-s2s config: default split: test revision: 258694dd0231531bc1fd9de6ceb52a0853c6d908 metrics: - type: v_measure value: 36.276389017675264 - task: type: Retrieval dataset: name: MTEB CQADupstackRetrieval type: BeIR/cqadupstack config: default split: test revision: None metrics: - type: map_at_1 value: 27.764166666666668 - type: map_at_10 value: 37.298166666666674 - type: map_at_100 value: 38.530166666666666 - type: map_at_1000 value: 38.64416666666667 - type: map_at_3 value: 34.484833333333334 - type: map_at_5 value: 36.0385 - type: mrr_at_1 value: 32.93558333333333 - type: mrr_at_10 value: 41.589749999999995 - type: mrr_at_100 value: 42.425333333333334 - type: mrr_at_1000 value: 42.476333333333336 - type: mrr_at_3 value: 39.26825 - type: mrr_at_5 value: 40.567083333333336 - type: ndcg_at_1 value: 32.93558333333333 - type: ndcg_at_10 value: 42.706583333333334 - type: ndcg_at_100 value: 47.82483333333333 - type: ndcg_at_1000 value: 49.95733333333334 - type: ndcg_at_3 value: 38.064750000000004 - type: ndcg_at_5 value: 40.18158333333333 - type: precision_at_1 value: 32.93558333333333 - type: precision_at_10 value: 7.459833333333334 - type: precision_at_100 value: 1.1830833333333335 - type: precision_at_1000 value: 0.15608333333333332 - type: precision_at_3 value: 17.5235 - type: precision_at_5 value: 12.349833333333333 - type: recall_at_1 value: 27.764166666666668 - type: recall_at_10 value: 54.31775 - type: recall_at_100 value: 76.74350000000001 - type: recall_at_1000 value: 91.45208333333332 - type: recall_at_3 value: 41.23425 - type: recall_at_5 value: 46.73983333333334 - task: type: Retrieval dataset: name: MTEB ClimateFEVER type: climate-fever config: default split: test revision: None metrics: - type: map_at_1 value: 12.969 - type: map_at_10 value: 21.584999999999997 - type: map_at_100 value: 23.3 - type: map_at_1000 value: 23.5 - type: map_at_3 value: 18.218999999999998 - type: map_at_5 value: 19.983 - type: mrr_at_1 value: 29.316 - type: mrr_at_10 value: 40.033 - type: mrr_at_100 value: 40.96 - type: mrr_at_1000 value: 41.001 - type: mrr_at_3 value: 37.123 - type: mrr_at_5 value: 38.757999999999996 - type: ndcg_at_1 value: 29.316 - type: ndcg_at_10 value: 29.858 - type: ndcg_at_100 value: 36.756 - type: ndcg_at_1000 value: 40.245999999999995 - type: ndcg_at_3 value: 24.822 - type: ndcg_at_5 value: 26.565 - type: precision_at_1 value: 29.316 - type: precision_at_10 value: 9.186 - type: precision_at_100 value: 1.6549999999999998 - type: precision_at_1000 value: 0.22999999999999998 - type: precision_at_3 value: 18.436 - type: precision_at_5 value: 13.876 - type: recall_at_1 value: 12.969 - type: recall_at_10 value: 35.142 - type: recall_at_100 value: 59.143 - type: recall_at_1000 value: 78.594 - type: recall_at_3 value: 22.604 - type: recall_at_5 value: 27.883000000000003 - task: type: Retrieval dataset: name: MTEB DBPedia type: dbpedia-entity config: default split: test revision: None metrics: - type: map_at_1 value: 8.527999999999999 - type: map_at_10 value: 17.974999999999998 - type: map_at_100 value: 25.665 - type: map_at_1000 value: 27.406000000000002 - type: map_at_3 value: 13.017999999999999 - type: map_at_5 value: 15.137 - type: mrr_at_1 value: 62.5 - type: mrr_at_10 value: 71.891 - type: mrr_at_100 value: 72.294 - type: mrr_at_1000 value: 72.296 - type: mrr_at_3 value: 69.958 - type: mrr_at_5 value: 71.121 - type: ndcg_at_1 value: 50.875 - type: ndcg_at_10 value: 38.36 - type: ndcg_at_100 value: 44.235 - type: ndcg_at_1000 value: 52.154 - type: ndcg_at_3 value: 43.008 - type: ndcg_at_5 value: 40.083999999999996 - type: precision_at_1 value: 62.5 - type: precision_at_10 value: 30 - type: precision_at_100 value: 10.038 - type: precision_at_1000 value: 2.0869999999999997 - type: precision_at_3 value: 46.833000000000006 - type: precision_at_5 value: 38.800000000000004 - type: recall_at_1 value: 8.527999999999999 - type: recall_at_10 value: 23.828 - type: recall_at_100 value: 52.322 - type: recall_at_1000 value: 77.143 - type: recall_at_3 value: 14.136000000000001 - type: recall_at_5 value: 17.761 - task: type: Classification dataset: name: MTEB EmotionClassification type: mteb/emotion config: default split: test revision: 4f58c6b202a23cf9a4da393831edf4f9183cad37 metrics: - type: accuracy value: 51.51 - type: f1 value: 47.632159862049896 - task: type: Retrieval dataset: name: MTEB FEVER type: fever config: default split: test revision: None metrics: - type: map_at_1 value: 60.734 - type: map_at_10 value: 72.442 - type: map_at_100 value: 72.735 - type: map_at_1000 value: 72.75 - type: map_at_3 value: 70.41199999999999 - type: map_at_5 value: 71.80499999999999 - type: mrr_at_1 value: 65.212 - type: mrr_at_10 value: 76.613 - type: mrr_at_100 value: 76.79899999999999 - type: mrr_at_1000 value: 76.801 - type: mrr_at_3 value: 74.8 - type: mrr_at_5 value: 76.12400000000001 - type: ndcg_at_1 value: 65.212 - type: ndcg_at_10 value: 77.988 - type: ndcg_at_100 value: 79.167 - type: ndcg_at_1000 value: 79.452 - type: ndcg_at_3 value: 74.362 - type: ndcg_at_5 value: 76.666 - type: precision_at_1 value: 65.212 - type: precision_at_10 value: 10.003 - type: precision_at_100 value: 1.077 - type: precision_at_1000 value: 0.11199999999999999 - type: precision_at_3 value: 29.518 - type: precision_at_5 value: 19.016 - type: recall_at_1 value: 60.734 - type: recall_at_10 value: 90.824 - type: recall_at_100 value: 95.71600000000001 - type: recall_at_1000 value: 97.577 - type: recall_at_3 value: 81.243 - type: recall_at_5 value: 86.90299999999999 - task: type: Retrieval dataset: name: MTEB FiQA2018 type: fiqa config: default split: test revision: None metrics: - type: map_at_1 value: 23.845 - type: map_at_10 value: 39.281 - type: map_at_100 value: 41.422 - type: map_at_1000 value: 41.593 - type: map_at_3 value: 34.467 - type: map_at_5 value: 37.017 - type: mrr_at_1 value: 47.531 - type: mrr_at_10 value: 56.204 - type: mrr_at_100 value: 56.928999999999995 - type: mrr_at_1000 value: 56.962999999999994 - type: mrr_at_3 value: 54.115 - type: mrr_at_5 value: 55.373000000000005 - type: ndcg_at_1 value: 47.531 - type: ndcg_at_10 value: 47.711999999999996 - type: ndcg_at_100 value: 54.510999999999996 - type: ndcg_at_1000 value: 57.103 - type: ndcg_at_3 value: 44.145 - type: ndcg_at_5 value: 45.032 - type: precision_at_1 value: 47.531 - type: precision_at_10 value: 13.194 - type: precision_at_100 value: 2.045 - type: precision_at_1000 value: 0.249 - type: precision_at_3 value: 29.424 - type: precision_at_5 value: 21.451 - type: recall_at_1 value: 23.845 - type: recall_at_10 value: 54.967 - type: recall_at_100 value: 79.11399999999999 - type: recall_at_1000 value: 94.56700000000001 - type: recall_at_3 value: 40.256 - type: recall_at_5 value: 46.215 - task: type: Retrieval dataset: name: MTEB HotpotQA type: hotpotqa config: default split: test revision: None metrics: - type: map_at_1 value: 37.819 - type: map_at_10 value: 60.889 - type: map_at_100 value: 61.717999999999996 - type: map_at_1000 value: 61.778 - type: map_at_3 value: 57.254000000000005 - type: map_at_5 value: 59.541 - type: mrr_at_1 value: 75.638 - type: mrr_at_10 value: 82.173 - type: mrr_at_100 value: 82.362 - type: mrr_at_1000 value: 82.37 - type: mrr_at_3 value: 81.089 - type: mrr_at_5 value: 81.827 - type: ndcg_at_1 value: 75.638 - type: ndcg_at_10 value: 69.317 - type: ndcg_at_100 value: 72.221 - type: ndcg_at_1000 value: 73.382 - type: ndcg_at_3 value: 64.14 - type: ndcg_at_5 value: 67.07600000000001 - type: precision_at_1 value: 75.638 - type: precision_at_10 value: 14.704999999999998 - type: precision_at_100 value: 1.698 - type: precision_at_1000 value: 0.185 - type: precision_at_3 value: 41.394999999999996 - type: precision_at_5 value: 27.162999999999997 - type: recall_at_1 value: 37.819 - type: recall_at_10 value: 73.52499999999999 - type: recall_at_100 value: 84.875 - type: recall_at_1000 value: 92.559 - type: recall_at_3 value: 62.092999999999996 - type: recall_at_5 value: 67.907 - task: type: Classification dataset: name: MTEB ImdbClassification type: mteb/imdb config: default split: test revision: 3d86128a09e091d6018b6d26cad27f2739fc2db7 metrics: - type: accuracy value: 94.60079999999999 - type: ap value: 92.67396345347356 - type: f1 value: 94.5988098167121 - task: type: Retrieval dataset: name: MTEB MSMARCO type: msmarco config: default split: dev revision: None metrics: - type: map_at_1 value: 21.285 - type: map_at_10 value: 33.436 - type: map_at_100 value: 34.63 - type: map_at_1000 value: 34.681 - type: map_at_3 value: 29.412 - type: map_at_5 value: 31.715 - type: mrr_at_1 value: 21.848 - type: mrr_at_10 value: 33.979 - type: mrr_at_100 value: 35.118 - type: mrr_at_1000 value: 35.162 - type: mrr_at_3 value: 30.036 - type: mrr_at_5 value: 32.298 - type: ndcg_at_1 value: 21.862000000000002 - type: ndcg_at_10 value: 40.43 - type: ndcg_at_100 value: 46.17 - type: ndcg_at_1000 value: 47.412 - type: ndcg_at_3 value: 32.221 - type: ndcg_at_5 value: 36.332 - type: precision_at_1 value: 21.862000000000002 - type: precision_at_10 value: 6.491 - type: precision_at_100 value: 0.935 - type: precision_at_1000 value: 0.104 - type: precision_at_3 value: 13.744 - type: precision_at_5 value: 10.331999999999999 - type: recall_at_1 value: 21.285 - type: recall_at_10 value: 62.083 - type: recall_at_100 value: 88.576 - type: recall_at_1000 value: 98.006 - type: recall_at_3 value: 39.729 - type: recall_at_5 value: 49.608000000000004 - task: type: Classification dataset: name: MTEB MTOPDomainClassification (en) type: mteb/mtop_domain config: en split: test revision: d80d48c1eb48d3562165c59d59d0034df9fff0bf metrics: - type: accuracy value: 93.92612859097127 - type: f1 value: 93.82370333372853 - task: type: Classification dataset: name: MTEB MTOPDomainClassification (de) type: mteb/mtop_domain config: de split: test revision: d80d48c1eb48d3562165c59d59d0034df9fff0bf metrics: - type: accuracy value: 92.67681036911807 - type: f1 value: 92.14191382411472 - task: type: Classification dataset: name: MTEB MTOPDomainClassification (es) type: mteb/mtop_domain config: es split: test revision: d80d48c1eb48d3562165c59d59d0034df9fff0bf metrics: - type: accuracy value: 92.26817878585723 - type: f1 value: 91.92824250337878 - task: type: Classification dataset: name: MTEB MTOPDomainClassification (fr) type: mteb/mtop_domain config: fr split: test revision: d80d48c1eb48d3562165c59d59d0034df9fff0bf metrics: - type: accuracy value: 89.96554963983714 - type: f1 value: 90.02859329630792 - task: type: Classification dataset: name: MTEB MTOPDomainClassification (hi) type: mteb/mtop_domain config: hi split: test revision: d80d48c1eb48d3562165c59d59d0034df9fff0bf metrics: - type: accuracy value: 90.02509860164935 - type: f1 value: 89.30665159182062 - task: type: Classification dataset: name: MTEB MTOPDomainClassification (th) type: mteb/mtop_domain config: th split: test revision: d80d48c1eb48d3562165c59d59d0034df9fff0bf metrics: - type: accuracy value: 87.55515370705244 - type: f1 value: 87.94449232331907 - task: type: Classification dataset: name: MTEB MTOPIntentClassification (en) type: mteb/mtop_intent config: en split: test revision: ae001d0e6b1228650b7bd1c2c65fb50ad11a8aba metrics: - type: accuracy value: 82.4623803009576 - type: f1 value: 66.06738378772725 - task: type: Classification dataset: name: MTEB MTOPIntentClassification (de) type: mteb/mtop_intent config: de split: test revision: ae001d0e6b1228650b7bd1c2c65fb50ad11a8aba metrics: - type: accuracy value: 79.3716539870386 - type: f1 value: 60.37614033396853 - task: type: Classification dataset: name: MTEB MTOPIntentClassification (es) type: mteb/mtop_intent config: es split: test revision: ae001d0e6b1228650b7bd1c2c65fb50ad11a8aba metrics: - type: accuracy value: 80.34022681787857 - type: f1 value: 58.302008026952 - task: type: Classification dataset: name: MTEB MTOPIntentClassification (fr) type: mteb/mtop_intent config: fr split: test revision: ae001d0e6b1228650b7bd1c2c65fb50ad11a8aba metrics: - type: accuracy value: 76.72095208268087 - type: f1 value: 59.64524724009049 - task: type: Classification dataset: name: MTEB MTOPIntentClassification (hi) type: mteb/mtop_intent config: hi split: test revision: ae001d0e6b1228650b7bd1c2c65fb50ad11a8aba metrics: - type: accuracy value: 77.87020437432773 - type: f1 value: 57.80202694670567 - task: type: Classification dataset: name: MTEB MTOPIntentClassification (th) type: mteb/mtop_intent config: th split: test revision: ae001d0e6b1228650b7bd1c2c65fb50ad11a8aba metrics: - type: accuracy value: 77.73598553345387 - type: f1 value: 58.19628250675031 - task: type: Classification dataset: name: MTEB MassiveIntentClassification (af) type: mteb/amazon_massive_intent config: af split: test revision: 31efe3c427b0bae9c22cbb560b8f15491cc6bed7 metrics: - type: accuracy value: 67.6630800268998 - type: f1 value: 65.00996668051691 - task: type: Classification dataset: name: MTEB MassiveIntentClassification (am) type: mteb/amazon_massive_intent config: am split: test revision: 31efe3c427b0bae9c22cbb560b8f15491cc6bed7 metrics: - type: accuracy value: 60.7128446536651 - type: f1 value: 57.95860594874963 - task: type: Classification dataset: name: MTEB MassiveIntentClassification (ar) type: mteb/amazon_massive_intent config: ar split: test revision: 31efe3c427b0bae9c22cbb560b8f15491cc6bed7 metrics: - type: accuracy value: 63.61129791526563 - type: f1 value: 59.75328290206483 - task: type: Classification dataset: name: MTEB MassiveIntentClassification (az) type: mteb/amazon_massive_intent config: az split: test revision: 31efe3c427b0bae9c22cbb560b8f15491cc6bed7 metrics: - type: accuracy value: 69.00134498991257 - type: f1 value: 67.0230483991802 - task: type: Classification dataset: name: MTEB MassiveIntentClassification (bn) type: mteb/amazon_massive_intent config: bn split: test revision: 31efe3c427b0bae9c22cbb560b8f15491cc6bed7 metrics: - type: accuracy value: 68.54068594485541 - type: f1 value: 65.54604628946976 - task: type: Classification dataset: name: MTEB MassiveIntentClassification (cy) type: mteb/amazon_massive_intent config: cy split: test revision: 31efe3c427b0bae9c22cbb560b8f15491cc6bed7 metrics: - type: accuracy value: 63.032952252858095 - type: f1 value: 58.715741857057104 - task: type: Classification dataset: name: MTEB MassiveIntentClassification (da) type: mteb/amazon_massive_intent config: da split: test revision: 31efe3c427b0bae9c22cbb560b8f15491cc6bed7 metrics: - type: accuracy value: 71.80901143241427 - type: f1 value: 68.33963989243877 - task: type: Classification dataset: name: MTEB MassiveIntentClassification (de) type: mteb/amazon_massive_intent config: de split: test revision: 31efe3c427b0bae9c22cbb560b8f15491cc6bed7 metrics: - type: accuracy value: 72.47141896435777 - type: f1 value: 69.56765020308262 - task: type: Classification dataset: name: MTEB MassiveIntentClassification (el) type: mteb/amazon_massive_intent config: el split: test revision: 31efe3c427b0bae9c22cbb560b8f15491cc6bed7 metrics: - type: accuracy value: 71.2373907195696 - type: f1 value: 69.04529836036467 - task: type: Classification dataset: name: MTEB MassiveIntentClassification (en) type: mteb/amazon_massive_intent config: en split: test revision: 31efe3c427b0bae9c22cbb560b8f15491cc6bed7 metrics: - type: accuracy value: 77.05783456624076 - type: f1 value: 74.69430584708174 - task: type: Classification dataset: name: MTEB MassiveIntentClassification (es) type: mteb/amazon_massive_intent config: es split: test revision: 31efe3c427b0bae9c22cbb560b8f15491cc6bed7 metrics: - type: accuracy value: 72.82111634162744 - type: f1 value: 70.77228952803762 - task: type: Classification dataset: name: MTEB MassiveIntentClassification (fa) type: mteb/amazon_massive_intent config: fa split: test revision: 31efe3c427b0bae9c22cbb560b8f15491cc6bed7 metrics: - type: accuracy value: 74.25353059852051 - type: f1 value: 71.05310103416411 - task: type: Classification dataset: name: MTEB MassiveIntentClassification (fi) type: mteb/amazon_massive_intent config: fi split: test revision: 31efe3c427b0bae9c22cbb560b8f15491cc6bed7 metrics: - type: accuracy value: 72.28648285137861 - type: f1 value: 69.08020473732226 - task: type: Classification dataset: name: MTEB MassiveIntentClassification (fr) type: mteb/amazon_massive_intent config: fr split: test revision: 31efe3c427b0bae9c22cbb560b8f15491cc6bed7 metrics: - type: accuracy value: 73.31540013449899 - type: f1 value: 70.9426355465791 - task: type: Classification dataset: name: MTEB MassiveIntentClassification (he) type: mteb/amazon_massive_intent config: he split: test revision: 31efe3c427b0bae9c22cbb560b8f15491cc6bed7 metrics: - type: accuracy value: 70.2151983860121 - type: f1 value: 67.52541755908858 - task: type: Classification dataset: name: MTEB MassiveIntentClassification (hi) type: mteb/amazon_massive_intent config: hi split: test revision: 31efe3c427b0bae9c22cbb560b8f15491cc6bed7 metrics: - type: accuracy value: 71.58372562205784 - type: f1 value: 69.49769064229827 - task: type: Classification dataset: name: MTEB MassiveIntentClassification (hu) type: mteb/amazon_massive_intent config: hu split: test revision: 31efe3c427b0bae9c22cbb560b8f15491cc6bed7 metrics: - type: accuracy value: 71.9233355749832 - type: f1 value: 69.36311548259593 - task: type: Classification dataset: name: MTEB MassiveIntentClassification (hy) type: mteb/amazon_massive_intent config: hy split: test revision: 31efe3c427b0bae9c22cbb560b8f15491cc6bed7 metrics: - type: accuracy value: 68.07330195023538 - type: f1 value: 64.99882022345572 - task: type: Classification dataset: name: MTEB MassiveIntentClassification (id) type: mteb/amazon_massive_intent config: id split: test revision: 31efe3c427b0bae9c22cbb560b8f15491cc6bed7 metrics: - type: accuracy value: 72.62273032952253 - type: f1 value: 70.6394885471001 - task: type: Classification dataset: name: MTEB MassiveIntentClassification (is) type: mteb/amazon_massive_intent config: is split: test revision: 31efe3c427b0bae9c22cbb560b8f15491cc6bed7 metrics: - type: accuracy value: 65.77000672494957 - type: f1 value: 62.9368944815065 - task: type: Classification dataset: name: MTEB MassiveIntentClassification (it) type: mteb/amazon_massive_intent config: it split: test revision: 31efe3c427b0bae9c22cbb560b8f15491cc6bed7 metrics: - type: accuracy value: 73.453261600538 - type: f1 value: 70.85069934666681 - task: type: Classification dataset: name: MTEB MassiveIntentClassification (ja) type: mteb/amazon_massive_intent config: ja split: test revision: 31efe3c427b0bae9c22cbb560b8f15491cc6bed7 metrics: - type: accuracy value: 74.6906523201076 - type: f1 value: 72.03249740074217 - task: type: Classification dataset: name: MTEB MassiveIntentClassification (jv) type: mteb/amazon_massive_intent config: jv split: test revision: 31efe3c427b0bae9c22cbb560b8f15491cc6bed7 metrics: - type: accuracy value: 63.03631472763953 - type: f1 value: 59.3165215571852 - task: type: Classification dataset: name: MTEB MassiveIntentClassification (ka) type: mteb/amazon_massive_intent config: ka split: test revision: 31efe3c427b0bae9c22cbb560b8f15491cc6bed7 metrics: - type: accuracy value: 58.913920645595155 - type: f1 value: 57.367337711611285 - task: type: Classification dataset: name: MTEB MassiveIntentClassification (km) type: mteb/amazon_massive_intent config: km split: test revision: 31efe3c427b0bae9c22cbb560b8f15491cc6bed7 metrics: - type: accuracy value: 54.42837928715535 - type: f1 value: 52.60527294970906 - task: type: Classification dataset: name: MTEB MassiveIntentClassification (kn) type: mteb/amazon_massive_intent config: kn split: test revision: 31efe3c427b0bae9c22cbb560b8f15491cc6bed7 metrics: - type: accuracy value: 66.33490248823135 - type: f1 value: 63.213340969404065 - task: type: Classification dataset: name: MTEB MassiveIntentClassification (ko) type: mteb/amazon_massive_intent config: ko split: test revision: 31efe3c427b0bae9c22cbb560b8f15491cc6bed7 metrics: - type: accuracy value: 70.58507061197041 - type: f1 value: 68.40256628040486 - task: type: Classification dataset: name: MTEB MassiveIntentClassification (lv) type: mteb/amazon_massive_intent config: lv split: test revision: 31efe3c427b0bae9c22cbb560b8f15491cc6bed7 metrics: - type: accuracy value: 69.11230665770006 - type: f1 value: 66.44863577842305 - task: type: Classification dataset: name: MTEB MassiveIntentClassification (ml) type: mteb/amazon_massive_intent config: ml split: test revision: 31efe3c427b0bae9c22cbb560b8f15491cc6bed7 metrics: - type: accuracy value: 69.70073974445192 - type: f1 value: 67.21291337273702 - task: type: Classification dataset: name: MTEB MassiveIntentClassification (mn) type: mteb/amazon_massive_intent config: mn split: test revision: 31efe3c427b0bae9c22cbb560b8f15491cc6bed7 metrics: - type: accuracy value: 66.43913920645595 - type: f1 value: 64.09838087422806 - task: type: Classification dataset: name: MTEB MassiveIntentClassification (ms) type: mteb/amazon_massive_intent config: ms split: test revision: 31efe3c427b0bae9c22cbb560b8f15491cc6bed7 metrics: - type: accuracy value: 70.80026899798251 - type: f1 value: 68.76986742962444 - task: type: Classification dataset: name: MTEB MassiveIntentClassification (my) type: mteb/amazon_massive_intent config: my split: test revision: 31efe3c427b0bae9c22cbb560b8f15491cc6bed7 metrics: - type: accuracy value: 64.78816408876934 - type: f1 value: 62.18781873428972 - task: type: Classification dataset: name: MTEB MassiveIntentClassification (nb) type: mteb/amazon_massive_intent config: nb split: test revision: 31efe3c427b0bae9c22cbb560b8f15491cc6bed7 metrics: - type: accuracy value: 71.6577000672495 - type: f1 value: 68.75171511133003 - task: type: Classification dataset: name: MTEB MassiveIntentClassification (nl) type: mteb/amazon_massive_intent config: nl split: test revision: 31efe3c427b0bae9c22cbb560b8f15491cc6bed7 metrics: - type: accuracy value: 74.42501681237391 - type: f1 value: 71.18434963451544 - task: type: Classification dataset: name: MTEB MassiveIntentClassification (pl) type: mteb/amazon_massive_intent config: pl split: test revision: 31efe3c427b0bae9c22cbb560b8f15491cc6bed7 metrics: - type: accuracy value: 73.64828513786146 - type: f1 value: 70.67741914007422 - task: type: Classification dataset: name: MTEB MassiveIntentClassification (pt) type: mteb/amazon_massive_intent config: pt split: test revision: 31efe3c427b0bae9c22cbb560b8f15491cc6bed7 metrics: - type: accuracy value: 73.62811028917284 - type: f1 value: 71.36402039740959 - task: type: Classification dataset: name: MTEB MassiveIntentClassification (ro) type: mteb/amazon_massive_intent config: ro split: test revision: 31efe3c427b0bae9c22cbb560b8f15491cc6bed7 metrics: - type: accuracy value: 71.88634835238736 - type: f1 value: 69.23701923480677 - task: type: Classification dataset: name: MTEB MassiveIntentClassification (ru) type: mteb/amazon_massive_intent config: ru split: test revision: 31efe3c427b0bae9c22cbb560b8f15491cc6bed7 metrics: - type: accuracy value: 74.15938130464022 - type: f1 value: 71.87792218993388 - task: type: Classification dataset: name: MTEB MassiveIntentClassification (sl) type: mteb/amazon_massive_intent config: sl split: test revision: 31efe3c427b0bae9c22cbb560b8f15491cc6bed7 metrics: - type: accuracy value: 69.96301277740416 - type: f1 value: 67.29584200202983 - task: type: Classification dataset: name: MTEB MassiveIntentClassification (sq) type: mteb/amazon_massive_intent config: sq split: test revision: 31efe3c427b0bae9c22cbb560b8f15491cc6bed7 metrics: - type: accuracy value: 69.49562878278412 - type: f1 value: 66.91716685679431 - task: type: Classification dataset: name: MTEB MassiveIntentClassification (sv) type: mteb/amazon_massive_intent config: sv split: test revision: 31efe3c427b0bae9c22cbb560b8f15491cc6bed7 metrics: - type: accuracy value: 74.6805648957633 - type: f1 value: 72.02723592594374 - task: type: Classification dataset: name: MTEB MassiveIntentClassification (sw) type: mteb/amazon_massive_intent config: sw split: test revision: 31efe3c427b0bae9c22cbb560b8f15491cc6bed7 metrics: - type: accuracy value: 63.00605245460659 - type: f1 value: 60.16716669482932 - task: type: Classification dataset: name: MTEB MassiveIntentClassification (ta) type: mteb/amazon_massive_intent config: ta split: test revision: 31efe3c427b0bae9c22cbb560b8f15491cc6bed7 metrics: - type: accuracy value: 66.90988567585742 - type: f1 value: 63.99405488777784 - task: type: Classification dataset: name: MTEB MassiveIntentClassification (te) type: mteb/amazon_massive_intent config: te split: test revision: 31efe3c427b0bae9c22cbb560b8f15491cc6bed7 metrics: - type: accuracy value: 67.62273032952253 - type: f1 value: 65.17213906909481 - task: type: Classification dataset: name: MTEB MassiveIntentClassification (th) type: mteb/amazon_massive_intent config: th split: test revision: 31efe3c427b0bae9c22cbb560b8f15491cc6bed7 metrics: - type: accuracy value: 69.50907868190988 - type: f1 value: 69.15165697194853 - task: type: Classification dataset: name: MTEB MassiveIntentClassification (tl) type: mteb/amazon_massive_intent config: tl split: test revision: 31efe3c427b0bae9c22cbb560b8f15491cc6bed7 metrics: - type: accuracy value: 69.30733019502352 - type: f1 value: 66.69024007380474 - task: type: Classification dataset: name: MTEB MassiveIntentClassification (tr) type: mteb/amazon_massive_intent config: tr split: test revision: 31efe3c427b0bae9c22cbb560b8f15491cc6bed7 metrics: - type: accuracy value: 72.24277067921989 - type: f1 value: 68.80515408492947 - task: type: Classification dataset: name: MTEB MassiveIntentClassification (ur) type: mteb/amazon_massive_intent config: ur split: test revision: 31efe3c427b0bae9c22cbb560b8f15491cc6bed7 metrics: - type: accuracy value: 67.49831876260929 - type: f1 value: 64.83778567111116 - task: type: Classification dataset: name: MTEB MassiveIntentClassification (vi) type: mteb/amazon_massive_intent config: vi split: test revision: 31efe3c427b0bae9c22cbb560b8f15491cc6bed7 metrics: - type: accuracy value: 71.28782784129119 - type: f1 value: 69.3294186700733 - task: type: Classification dataset: name: MTEB MassiveIntentClassification (zh-CN) type: mteb/amazon_massive_intent config: zh-CN split: test revision: 31efe3c427b0bae9c22cbb560b8f15491cc6bed7 metrics: - type: accuracy value: 73.315400134499 - type: f1 value: 71.22674385243207 - task: type: Classification dataset: name: MTEB MassiveIntentClassification (zh-TW) type: mteb/amazon_massive_intent config: zh-TW split: test revision: 31efe3c427b0bae9c22cbb560b8f15491cc6bed7 metrics: - type: accuracy value: 69.37794216543377 - type: f1 value: 68.96962492838232 - task: type: Classification dataset: name: MTEB MassiveScenarioClassification (af) type: mteb/amazon_massive_scenario config: af split: test revision: 7d571f92784cd94a019292a1f45445077d0ef634 metrics: - type: accuracy value: 73.33557498318764 - type: f1 value: 72.28949738478356 - task: type: Classification dataset: name: MTEB MassiveScenarioClassification (am) type: mteb/amazon_massive_scenario config: am split: test revision: 7d571f92784cd94a019292a1f45445077d0ef634 metrics: - type: accuracy value: 65.84398117014123 - type: f1 value: 64.71026362091463 - task: type: Classification dataset: name: MTEB MassiveScenarioClassification (ar) type: mteb/amazon_massive_scenario config: ar split: test revision: 7d571f92784cd94a019292a1f45445077d0ef634 metrics: - type: accuracy value: 69.76462676529925 - type: f1 value: 69.8229667407667 - task: type: Classification dataset: name: MTEB MassiveScenarioClassification (az) type: mteb/amazon_massive_scenario config: az split: test revision: 7d571f92784cd94a019292a1f45445077d0ef634 metrics: - type: accuracy value: 72.02420981842636 - type: f1 value: 71.76576384895898 - task: type: Classification dataset: name: MTEB MassiveScenarioClassification (bn) type: mteb/amazon_massive_scenario config: bn split: test revision: 7d571f92784cd94a019292a1f45445077d0ef634 metrics: - type: accuracy value: 72.7572293207801 - type: f1 value: 72.76840765295256 - task: type: Classification dataset: name: MTEB MassiveScenarioClassification (cy) type: mteb/amazon_massive_scenario config: cy split: test revision: 7d571f92784cd94a019292a1f45445077d0ef634 metrics: - type: accuracy value: 68.02286482851379 - type: f1 value: 66.17237947327872 - task: type: Classification dataset: name: MTEB MassiveScenarioClassification (da) type: mteb/amazon_massive_scenario config: da split: test revision: 7d571f92784cd94a019292a1f45445077d0ef634 metrics: - type: accuracy value: 77.60928043039678 - type: f1 value: 77.27094731234773 - task: type: Classification dataset: name: MTEB MassiveScenarioClassification (de) type: mteb/amazon_massive_scenario config: de split: test revision: 7d571f92784cd94a019292a1f45445077d0ef634 metrics: - type: accuracy value: 77.68325487558843 - type: f1 value: 77.97530399082261 - task: type: Classification dataset: name: MTEB MassiveScenarioClassification (el) type: mteb/amazon_massive_scenario config: el split: test revision: 7d571f92784cd94a019292a1f45445077d0ef634 metrics: - type: accuracy value: 76.13315400134498 - type: f1 value: 75.97558584796424 - task: type: Classification dataset: name: MTEB MassiveScenarioClassification (en) type: mteb/amazon_massive_scenario config: en split: test revision: 7d571f92784cd94a019292a1f45445077d0ef634 metrics: - type: accuracy value: 80.47410894418292 - type: f1 value: 80.52244841473792 - task: type: Classification dataset: name: MTEB MassiveScenarioClassification (es) type: mteb/amazon_massive_scenario config: es split: test revision: 7d571f92784cd94a019292a1f45445077d0ef634 metrics: - type: accuracy value: 76.9670477471419 - type: f1 value: 77.37318805793146 - task: type: Classification dataset: name: MTEB MassiveScenarioClassification (fa) type: mteb/amazon_massive_scenario config: fa split: test revision: 7d571f92784cd94a019292a1f45445077d0ef634 metrics: - type: accuracy value: 78.09683927370544 - type: f1 value: 77.69773737430847 - task: type: Classification dataset: name: MTEB MassiveScenarioClassification (fi) type: mteb/amazon_massive_scenario config: fi split: test revision: 7d571f92784cd94a019292a1f45445077d0ef634 metrics: - type: accuracy value: 75.20847343644922 - type: f1 value: 75.17071738727348 - task: type: Classification dataset: name: MTEB MassiveScenarioClassification (fr) type: mteb/amazon_massive_scenario config: fr split: test revision: 7d571f92784cd94a019292a1f45445077d0ef634 metrics: - type: accuracy value: 77.07464694014796 - type: f1 value: 77.16136207698571 - task: type: Classification dataset: name: MTEB MassiveScenarioClassification (he) type: mteb/amazon_massive_scenario config: he split: test revision: 7d571f92784cd94a019292a1f45445077d0ef634 metrics: - type: accuracy value: 73.53396099529255 - type: f1 value: 73.58296404484122 - task: type: Classification dataset: name: MTEB MassiveScenarioClassification (hi) type: mteb/amazon_massive_scenario config: hi split: test revision: 7d571f92784cd94a019292a1f45445077d0ef634 metrics: - type: accuracy value: 75.75319435104237 - type: f1 value: 75.24674707850833 - task: type: Classification dataset: name: MTEB MassiveScenarioClassification (hu) type: mteb/amazon_massive_scenario config: hu split: test revision: 7d571f92784cd94a019292a1f45445077d0ef634 metrics: - type: accuracy value: 77.0948217888366 - type: f1 value: 76.47559490205028 - task: type: Classification dataset: name: MTEB MassiveScenarioClassification (hy) type: mteb/amazon_massive_scenario config: hy split: test revision: 7d571f92784cd94a019292a1f45445077d0ef634 metrics: - type: accuracy value: 71.07599193006052 - type: f1 value: 70.76028043093511 - task: type: Classification dataset: name: MTEB MassiveScenarioClassification (id) type: mteb/amazon_massive_scenario config: id split: test revision: 7d571f92784cd94a019292a1f45445077d0ef634 metrics: - type: accuracy value: 77.10490921318089 - type: f1 value: 77.01215275283272 - task: type: Classification dataset: name: MTEB MassiveScenarioClassification (is) type: mteb/amazon_massive_scenario config: is split: test revision: 7d571f92784cd94a019292a1f45445077d0ef634 metrics: - type: accuracy value: 71.25756556825824 - type: f1 value: 70.20605314648762 - task: type: Classification dataset: name: MTEB MassiveScenarioClassification (it) type: mteb/amazon_massive_scenario config: it split: test revision: 7d571f92784cd94a019292a1f45445077d0ef634 metrics: - type: accuracy value: 77.08137188971082 - type: f1 value: 77.3899269057439 - task: type: Classification dataset: name: MTEB MassiveScenarioClassification (ja) type: mteb/amazon_massive_scenario config: ja split: test revision: 7d571f92784cd94a019292a1f45445077d0ef634 metrics: - type: accuracy value: 79.35440484196369 - type: f1 value: 79.58964690002772 - task: type: Classification dataset: name: MTEB MassiveScenarioClassification (jv) type: mteb/amazon_massive_scenario config: jv split: test revision: 7d571f92784cd94a019292a1f45445077d0ef634 metrics: - type: accuracy value: 68.42299932750504 - type: f1 value: 68.07844356925413 - task: type: Classification dataset: name: MTEB MassiveScenarioClassification (ka) type: mteb/amazon_massive_scenario config: ka split: test revision: 7d571f92784cd94a019292a1f45445077d0ef634 metrics: - type: accuracy value: 66.15669132481507 - type: f1 value: 65.89383352608513 - task: type: Classification dataset: name: MTEB MassiveScenarioClassification (km) type: mteb/amazon_massive_scenario config: km split: test revision: 7d571f92784cd94a019292a1f45445077d0ef634 metrics: - type: accuracy value: 60.11432414256894 - type: f1 value: 57.69910594559806 - task: type: Classification dataset: name: MTEB MassiveScenarioClassification (kn) type: mteb/amazon_massive_scenario config: kn split: test revision: 7d571f92784cd94a019292a1f45445077d0ef634 metrics: - type: accuracy value: 71.24747814391392 - type: f1 value: 70.42455553830918 - task: type: Classification dataset: name: MTEB MassiveScenarioClassification (ko) type: mteb/amazon_massive_scenario config: ko split: test revision: 7d571f92784cd94a019292a1f45445077d0ef634 metrics: - type: accuracy value: 76.46267652992603 - type: f1 value: 76.8854559308316 - task: type: Classification dataset: name: MTEB MassiveScenarioClassification (lv) type: mteb/amazon_massive_scenario config: lv split: test revision: 7d571f92784cd94a019292a1f45445077d0ef634 metrics: - type: accuracy value: 73.24815063887021 - type: f1 value: 72.77805034658074 - task: type: Classification dataset: name: MTEB MassiveScenarioClassification (ml) type: mteb/amazon_massive_scenario config: ml split: test revision: 7d571f92784cd94a019292a1f45445077d0ef634 metrics: - type: accuracy value: 74.11566913248151 - type: f1 value: 73.86147988001356 - task: type: Classification dataset: name: MTEB MassiveScenarioClassification (mn) type: mteb/amazon_massive_scenario config: mn split: test revision: 7d571f92784cd94a019292a1f45445077d0ef634 metrics: - type: accuracy value: 70.0168123739072 - type: f1 value: 69.38515920054571 - task: type: Classification dataset: name: MTEB MassiveScenarioClassification (ms) type: mteb/amazon_massive_scenario config: ms split: test revision: 7d571f92784cd94a019292a1f45445077d0ef634 metrics: - type: accuracy value: 74.41156691324814 - type: f1 value: 73.43474953408237 - task: type: Classification dataset: name: MTEB MassiveScenarioClassification (my) type: mteb/amazon_massive_scenario config: my split: test revision: 7d571f92784cd94a019292a1f45445077d0ef634 metrics: - type: accuracy value: 68.39609952925353 - type: f1 value: 67.29731681109291 - task: type: Classification dataset: name: MTEB MassiveScenarioClassification (nb) type: mteb/amazon_massive_scenario config: nb split: test revision: 7d571f92784cd94a019292a1f45445077d0ef634 metrics: - type: accuracy value: 77.20914593140552 - type: f1 value: 77.07066497935367 - task: type: Classification dataset: name: MTEB MassiveScenarioClassification (nl) type: mteb/amazon_massive_scenario config: nl split: test revision: 7d571f92784cd94a019292a1f45445077d0ef634 metrics: - type: accuracy value: 78.52387357094821 - type: f1 value: 78.5259569473291 - task: type: Classification dataset: name: MTEB MassiveScenarioClassification (pl) type: mteb/amazon_massive_scenario config: pl split: test revision: 7d571f92784cd94a019292a1f45445077d0ef634 metrics: - type: accuracy value: 76.6913248150639 - type: f1 value: 76.91201656350455 - task: type: Classification dataset: name: MTEB MassiveScenarioClassification (pt) type: mteb/amazon_massive_scenario config: pt split: test revision: 7d571f92784cd94a019292a1f45445077d0ef634 metrics: - type: accuracy value: 77.1217215870881 - type: f1 value: 77.41179937912504 - task: type: Classification dataset: name: MTEB MassiveScenarioClassification (ro) type: mteb/amazon_massive_scenario config: ro split: test revision: 7d571f92784cd94a019292a1f45445077d0ef634 metrics: - type: accuracy value: 75.25891055817083 - type: f1 value: 75.8089244542887 - task: type: Classification dataset: name: MTEB MassiveScenarioClassification (ru) type: mteb/amazon_massive_scenario config: ru split: test revision: 7d571f92784cd94a019292a1f45445077d0ef634 metrics: - type: accuracy value: 77.70679219905851 - type: f1 value: 78.21459594517711 - task: type: Classification dataset: name: MTEB MassiveScenarioClassification (sl) type: mteb/amazon_massive_scenario config: sl split: test revision: 7d571f92784cd94a019292a1f45445077d0ef634 metrics: - type: accuracy value: 74.83523873570948 - type: f1 value: 74.86847028401978 - task: type: Classification dataset: name: MTEB MassiveScenarioClassification (sq) type: mteb/amazon_massive_scenario config: sq split: test revision: 7d571f92784cd94a019292a1f45445077d0ef634 metrics: - type: accuracy value: 74.71755211835911 - type: f1 value: 74.0214326485662 - task: type: Classification dataset: name: MTEB MassiveScenarioClassification (sv) type: mteb/amazon_massive_scenario config: sv split: test revision: 7d571f92784cd94a019292a1f45445077d0ef634 metrics: - type: accuracy value: 79.06523201075991 - type: f1 value: 79.10545620325138 - task: type: Classification dataset: name: MTEB MassiveScenarioClassification (sw) type: mteb/amazon_massive_scenario config: sw split: test revision: 7d571f92784cd94a019292a1f45445077d0ef634 metrics: - type: accuracy value: 67.91862811028918 - type: f1 value: 66.50386121217983 - task: type: Classification dataset: name: MTEB MassiveScenarioClassification (ta) type: mteb/amazon_massive_scenario config: ta split: test revision: 7d571f92784cd94a019292a1f45445077d0ef634 metrics: - type: accuracy value: 70.93140551445865 - type: f1 value: 70.755435928495 - task: type: Classification dataset: name: MTEB MassiveScenarioClassification (te) type: mteb/amazon_massive_scenario config: te split: test revision: 7d571f92784cd94a019292a1f45445077d0ef634 metrics: - type: accuracy value: 72.40753194351042 - type: f1 value: 71.61816115782923 - task: type: Classification dataset: name: MTEB MassiveScenarioClassification (th) type: mteb/amazon_massive_scenario config: th split: test revision: 7d571f92784cd94a019292a1f45445077d0ef634 metrics: - type: accuracy value: 75.1815736381977 - type: f1 value: 75.08016717887205 - task: type: Classification dataset: name: MTEB MassiveScenarioClassification (tl) type: mteb/amazon_massive_scenario config: tl split: test revision: 7d571f92784cd94a019292a1f45445077d0ef634 metrics: - type: accuracy value: 72.86482851378614 - type: f1 value: 72.39521180006291 - task: type: Classification dataset: name: MTEB MassiveScenarioClassification (tr) type: mteb/amazon_massive_scenario config: tr split: test revision: 7d571f92784cd94a019292a1f45445077d0ef634 metrics: - type: accuracy value: 76.46940147948891 - type: f1 value: 76.70044085362349 - task: type: Classification dataset: name: MTEB MassiveScenarioClassification (ur) type: mteb/amazon_massive_scenario config: ur split: test revision: 7d571f92784cd94a019292a1f45445077d0ef634 metrics: - type: accuracy value: 71.89307330195024 - type: f1 value: 71.5721825332298 - task: type: Classification dataset: name: MTEB MassiveScenarioClassification (vi) type: mteb/amazon_massive_scenario config: vi split: test revision: 7d571f92784cd94a019292a1f45445077d0ef634 metrics: - type: accuracy value: 74.7511768661735 - type: f1 value: 75.17918654541515 - task: type: Classification dataset: name: MTEB MassiveScenarioClassification (zh-CN) type: mteb/amazon_massive_scenario config: zh-CN split: test revision: 7d571f92784cd94a019292a1f45445077d0ef634 metrics: - type: accuracy value: 78.69535978480162 - type: f1 value: 78.90019070153316 - task: type: Classification dataset: name: MTEB MassiveScenarioClassification (zh-TW) type: mteb/amazon_massive_scenario config: zh-TW split: test revision: 7d571f92784cd94a019292a1f45445077d0ef634 metrics: - type: accuracy value: 75.45729657027572 - type: f1 value: 76.19578371794672 - task: type: Clustering dataset: name: MTEB MedrxivClusteringP2P type: mteb/medrxiv-clustering-p2p config: default split: test revision: e7a26af6f3ae46b30dde8737f02c07b1505bcc73 metrics: - type: v_measure value: 36.92715354123554 - task: type: Clustering dataset: name: MTEB MedrxivClusteringS2S type: mteb/medrxiv-clustering-s2s config: default split: test revision: 35191c8c0dca72d8ff3efcd72aa802307d469663 metrics: - type: v_measure value: 35.53536244162518 - task: type: Reranking dataset: name: MTEB MindSmallReranking type: mteb/mind_small config: default split: test revision: 3bdac13927fdc888b903db93b2ffdbd90b295a69 metrics: - type: map value: 33.08507884504006 - type: mrr value: 34.32436977159129 - task: type: Retrieval dataset: name: MTEB NFCorpus type: nfcorpus config: default split: test revision: None metrics: - type: map_at_1 value: 5.935 - type: map_at_10 value: 13.297 - type: map_at_100 value: 16.907 - type: map_at_1000 value: 18.391 - type: map_at_3 value: 9.626999999999999 - type: map_at_5 value: 11.190999999999999 - type: mrr_at_1 value: 46.129999999999995 - type: mrr_at_10 value: 54.346000000000004 - type: mrr_at_100 value: 55.067 - type: mrr_at_1000 value: 55.1 - type: mrr_at_3 value: 51.961 - type: mrr_at_5 value: 53.246 - type: ndcg_at_1 value: 44.118 - type: ndcg_at_10 value: 35.534 - type: ndcg_at_100 value: 32.946999999999996 - type: ndcg_at_1000 value: 41.599000000000004 - type: ndcg_at_3 value: 40.25 - type: ndcg_at_5 value: 37.978 - type: precision_at_1 value: 46.129999999999995 - type: precision_at_10 value: 26.842 - type: precision_at_100 value: 8.427 - type: precision_at_1000 value: 2.128 - type: precision_at_3 value: 37.977 - type: precision_at_5 value: 32.879000000000005 - type: recall_at_1 value: 5.935 - type: recall_at_10 value: 17.211000000000002 - type: recall_at_100 value: 34.33 - type: recall_at_1000 value: 65.551 - type: recall_at_3 value: 10.483 - type: recall_at_5 value: 13.078999999999999 - task: type: Retrieval dataset: name: MTEB NQ type: nq config: default split: test revision: None metrics: - type: map_at_1 value: 35.231 - type: map_at_10 value: 50.202000000000005 - type: map_at_100 value: 51.154999999999994 - type: map_at_1000 value: 51.181 - type: map_at_3 value: 45.774 - type: map_at_5 value: 48.522 - type: mrr_at_1 value: 39.687 - type: mrr_at_10 value: 52.88 - type: mrr_at_100 value: 53.569 - type: mrr_at_1000 value: 53.58500000000001 - type: mrr_at_3 value: 49.228 - type: mrr_at_5 value: 51.525 - type: ndcg_at_1 value: 39.687 - type: ndcg_at_10 value: 57.754000000000005 - type: ndcg_at_100 value: 61.597 - type: ndcg_at_1000 value: 62.18900000000001 - type: ndcg_at_3 value: 49.55 - type: ndcg_at_5 value: 54.11899999999999 - type: precision_at_1 value: 39.687 - type: precision_at_10 value: 9.313 - type: precision_at_100 value: 1.146 - type: precision_at_1000 value: 0.12 - type: precision_at_3 value: 22.229 - type: precision_at_5 value: 15.939 - type: recall_at_1 value: 35.231 - type: recall_at_10 value: 78.083 - type: recall_at_100 value: 94.42099999999999 - type: recall_at_1000 value: 98.81 - type: recall_at_3 value: 57.047000000000004 - type: recall_at_5 value: 67.637 - task: type: Retrieval dataset: name: MTEB QuoraRetrieval type: quora config: default split: test revision: None metrics: - type: map_at_1 value: 71.241 - type: map_at_10 value: 85.462 - type: map_at_100 value: 86.083 - type: map_at_1000 value: 86.09700000000001 - type: map_at_3 value: 82.49499999999999 - type: map_at_5 value: 84.392 - type: mrr_at_1 value: 82.09 - type: mrr_at_10 value: 88.301 - type: mrr_at_100 value: 88.383 - type: mrr_at_1000 value: 88.384 - type: mrr_at_3 value: 87.37 - type: mrr_at_5 value: 88.035 - type: ndcg_at_1 value: 82.12 - type: ndcg_at_10 value: 89.149 - type: ndcg_at_100 value: 90.235 - type: ndcg_at_1000 value: 90.307 - type: ndcg_at_3 value: 86.37599999999999 - type: ndcg_at_5 value: 87.964 - type: precision_at_1 value: 82.12 - type: precision_at_10 value: 13.56 - type: precision_at_100 value: 1.539 - type: precision_at_1000 value: 0.157 - type: precision_at_3 value: 37.88 - type: precision_at_5 value: 24.92 - type: recall_at_1 value: 71.241 - type: recall_at_10 value: 96.128 - type: recall_at_100 value: 99.696 - type: recall_at_1000 value: 99.994 - type: recall_at_3 value: 88.181 - type: recall_at_5 value: 92.694 - task: type: Clustering dataset: name: MTEB RedditClustering type: mteb/reddit-clustering config: default split: test revision: 24640382cdbf8abc73003fb0fa6d111a705499eb metrics: - type: v_measure value: 56.59757799655151 - task: type: Clustering dataset: name: MTEB RedditClusteringP2P type: mteb/reddit-clustering-p2p config: default split: test revision: 282350215ef01743dc01b456c7f5241fa8937f16 metrics: - type: v_measure value: 64.27391998854624 - task: type: Retrieval dataset: name: MTEB SCIDOCS type: scidocs config: default split: test revision: None metrics: - type: map_at_1 value: 4.243 - type: map_at_10 value: 10.965 - type: map_at_100 value: 12.934999999999999 - type: map_at_1000 value: 13.256 - type: map_at_3 value: 7.907 - type: map_at_5 value: 9.435 - type: mrr_at_1 value: 20.9 - type: mrr_at_10 value: 31.849 - type: mrr_at_100 value: 32.964 - type: mrr_at_1000 value: 33.024 - type: mrr_at_3 value: 28.517 - type: mrr_at_5 value: 30.381999999999998 - type: ndcg_at_1 value: 20.9 - type: ndcg_at_10 value: 18.723 - type: ndcg_at_100 value: 26.384999999999998 - type: ndcg_at_1000 value: 32.114 - type: ndcg_at_3 value: 17.753 - type: ndcg_at_5 value: 15.558 - type: precision_at_1 value: 20.9 - type: precision_at_10 value: 9.8 - type: precision_at_100 value: 2.078 - type: precision_at_1000 value: 0.345 - type: precision_at_3 value: 16.900000000000002 - type: precision_at_5 value: 13.88 - type: recall_at_1 value: 4.243 - type: recall_at_10 value: 19.885 - type: recall_at_100 value: 42.17 - type: recall_at_1000 value: 70.12 - type: recall_at_3 value: 10.288 - type: recall_at_5 value: 14.072000000000001 - task: type: STS dataset: name: MTEB SICK-R type: mteb/sickr-sts config: default split: test revision: a6ea5a8cab320b040a23452cc28066d9beae2cee metrics: - type: cos_sim_pearson value: 85.84209174935282 - type: cos_sim_spearman value: 81.73248048438833 - type: euclidean_pearson value: 83.02810070308149 - type: euclidean_spearman value: 81.73248295679514 - type: manhattan_pearson value: 82.95368060376002 - type: manhattan_spearman value: 81.60277910998718 - task: type: STS dataset: name: MTEB STS12 type: mteb/sts12-sts config: default split: test revision: a0d554a64d88156834ff5ae9920b964011b16384 metrics: - type: cos_sim_pearson value: 88.52628804556943 - type: cos_sim_spearman value: 82.5713913555672 - type: euclidean_pearson value: 85.8796774746988 - type: euclidean_spearman value: 82.57137506803424 - type: manhattan_pearson value: 85.79671002960058 - type: manhattan_spearman value: 82.49445981618027 - task: type: STS dataset: name: MTEB STS13 type: mteb/sts13-sts config: default split: test revision: 7e90230a92c190f1bf69ae9002b8cea547a64cca metrics: - type: cos_sim_pearson value: 86.23682503505542 - type: cos_sim_spearman value: 87.15008956711806 - type: euclidean_pearson value: 86.79805401524959 - type: euclidean_spearman value: 87.15008956711806 - type: manhattan_pearson value: 86.65298502699244 - type: manhattan_spearman value: 86.97677821948562 - task: type: STS dataset: name: MTEB STS14 type: mteb/sts14-sts config: default split: test revision: 6031580fec1f6af667f0bd2da0a551cf4f0b2375 metrics: - type: cos_sim_pearson value: 85.63370304677802 - type: cos_sim_spearman value: 84.97105553540318 - type: euclidean_pearson value: 85.28896108687721 - type: euclidean_spearman value: 84.97105553540318 - type: manhattan_pearson value: 85.09663190337331 - type: manhattan_spearman value: 84.79126831644619 - task: type: STS dataset: name: MTEB STS15 type: mteb/sts15-sts config: default split: test revision: ae752c7c21bf194d8b67fd573edf7ae58183cbe3 metrics: - type: cos_sim_pearson value: 90.2614838800733 - type: cos_sim_spearman value: 91.0509162991835 - type: euclidean_pearson value: 90.33098317533373 - type: euclidean_spearman value: 91.05091625871644 - type: manhattan_pearson value: 90.26250435151107 - type: manhattan_spearman value: 90.97999594417519 - task: type: STS dataset: name: MTEB STS16 type: mteb/sts16-sts config: default split: test revision: 4d8694f8f0e0100860b497b999b3dbed754a0513 metrics: - type: cos_sim_pearson value: 85.80480973335091 - type: cos_sim_spearman value: 87.313695492969 - type: euclidean_pearson value: 86.49267251576939 - type: euclidean_spearman value: 87.313695492969 - type: manhattan_pearson value: 86.44019901831935 - type: manhattan_spearman value: 87.24205395460392 - task: type: STS dataset: name: MTEB STS17 (en-en) type: mteb/sts17-crosslingual-sts config: en-en split: test revision: af5e6fb845001ecf41f4c1e033ce921939a2a68d metrics: - type: cos_sim_pearson value: 90.05662789380672 - type: cos_sim_spearman value: 90.02759424426651 - type: euclidean_pearson value: 90.4042483422981 - type: euclidean_spearman value: 90.02759424426651 - type: manhattan_pearson value: 90.51446975000226 - type: manhattan_spearman value: 90.08832889933616 - task: type: STS dataset: name: MTEB STS22 (en) type: mteb/sts22-crosslingual-sts config: en split: test revision: 6d1ba47164174a496b7fa5d3569dae26a6813b80 metrics: - type: cos_sim_pearson value: 67.5975528273532 - type: cos_sim_spearman value: 67.62969861411354 - type: euclidean_pearson value: 69.224275734323 - type: euclidean_spearman value: 67.62969861411354 - type: manhattan_pearson value: 69.3761447059927 - type: manhattan_spearman value: 67.90921005611467 - task: type: STS dataset: name: MTEB STSBenchmark type: mteb/stsbenchmark-sts config: default split: test revision: b0fddb56ed78048fa8b90373c8a3cfc37b684831 metrics: - type: cos_sim_pearson value: 87.11244327231684 - type: cos_sim_spearman value: 88.37902438979035 - type: euclidean_pearson value: 87.86054279847336 - type: euclidean_spearman value: 88.37902438979035 - type: manhattan_pearson value: 87.77257757320378 - type: manhattan_spearman value: 88.25208966098123 - task: type: Reranking dataset: name: MTEB SciDocsRR type: mteb/scidocs-reranking config: default split: test revision: d3c5e1fc0b855ab6097bf1cda04dd73947d7caab metrics: - type: map value: 85.87174608143563 - type: mrr value: 96.12836872640794 - task: type: Retrieval dataset: name: MTEB SciFact type: scifact config: default split: test revision: None metrics: - type: map_at_1 value: 57.760999999999996 - type: map_at_10 value: 67.258 - type: map_at_100 value: 67.757 - type: map_at_1000 value: 67.78800000000001 - type: map_at_3 value: 64.602 - type: map_at_5 value: 65.64 - type: mrr_at_1 value: 60.667 - type: mrr_at_10 value: 68.441 - type: mrr_at_100 value: 68.825 - type: mrr_at_1000 value: 68.853 - type: mrr_at_3 value: 66.444 - type: mrr_at_5 value: 67.26100000000001 - type: ndcg_at_1 value: 60.667 - type: ndcg_at_10 value: 71.852 - type: ndcg_at_100 value: 73.9 - type: ndcg_at_1000 value: 74.628 - type: ndcg_at_3 value: 67.093 - type: ndcg_at_5 value: 68.58 - type: precision_at_1 value: 60.667 - type: precision_at_10 value: 9.6 - type: precision_at_100 value: 1.0670000000000002 - type: precision_at_1000 value: 0.11199999999999999 - type: precision_at_3 value: 26.111 - type: precision_at_5 value: 16.733 - type: recall_at_1 value: 57.760999999999996 - type: recall_at_10 value: 84.967 - type: recall_at_100 value: 93.833 - type: recall_at_1000 value: 99.333 - type: recall_at_3 value: 71.589 - type: recall_at_5 value: 75.483 - task: type: PairClassification dataset: name: MTEB SprintDuplicateQuestions type: mteb/sprintduplicatequestions-pairclassification config: default split: test revision: d66bd1f72af766a5cc4b0ca5e00c162f89e8cc46 metrics: - type: cos_sim_accuracy value: 99.66633663366336 - type: cos_sim_ap value: 91.17685358899108 - type: cos_sim_f1 value: 82.16818642350559 - type: cos_sim_precision value: 83.26488706365504 - type: cos_sim_recall value: 81.10000000000001 - type: dot_accuracy value: 99.66633663366336 - type: dot_ap value: 91.17663411119032 - type: dot_f1 value: 82.16818642350559 - type: dot_precision value: 83.26488706365504 - type: dot_recall value: 81.10000000000001 - type: euclidean_accuracy value: 99.66633663366336 - type: euclidean_ap value: 91.17685189882275 - type: euclidean_f1 value: 82.16818642350559 - type: euclidean_precision value: 83.26488706365504 - type: euclidean_recall value: 81.10000000000001 - type: manhattan_accuracy value: 99.66633663366336 - type: manhattan_ap value: 91.2241619496737 - type: manhattan_f1 value: 82.20472440944883 - type: manhattan_precision value: 86.51933701657458 - type: manhattan_recall value: 78.3 - type: max_accuracy value: 99.66633663366336 - type: max_ap value: 91.2241619496737 - type: max_f1 value: 82.20472440944883 - task: type: Clustering dataset: name: MTEB StackExchangeClustering type: mteb/stackexchange-clustering config: default split: test revision: 6cbc1f7b2bc0622f2e39d2c77fa502909748c259 metrics: - type: v_measure value: 66.85101268897951 - task: type: Clustering dataset: name: MTEB StackExchangeClusteringP2P type: mteb/stackexchange-clustering-p2p config: default split: test revision: 815ca46b2622cec33ccafc3735d572c266efdb44 metrics: - type: v_measure value: 42.461184054706905 - task: type: Reranking dataset: name: MTEB StackOverflowDupQuestions type: mteb/stackoverflowdupquestions-reranking config: default split: test revision: e185fbe320c72810689fc5848eb6114e1ef5ec69 metrics: - type: map value: 51.44542568873886 - type: mrr value: 52.33656151854681 - task: type: Summarization dataset: name: MTEB SummEval type: mteb/summeval config: default split: test revision: cda12ad7615edc362dbf25a00fdd61d3b1eaf93c metrics: - type: cos_sim_pearson value: 30.75982974997539 - type: cos_sim_spearman value: 30.385405026539914 - type: dot_pearson value: 30.75982433546523 - type: dot_spearman value: 30.385405026539914 - task: type: Retrieval dataset: name: MTEB TRECCOVID type: trec-covid config: default split: test revision: None metrics: - type: map_at_1 value: 0.22799999999999998 - type: map_at_10 value: 2.064 - type: map_at_100 value: 13.056000000000001 - type: map_at_1000 value: 31.747999999999998 - type: map_at_3 value: 0.67 - type: map_at_5 value: 1.097 - type: mrr_at_1 value: 90 - type: mrr_at_10 value: 94.667 - type: mrr_at_100 value: 94.667 - type: mrr_at_1000 value: 94.667 - type: mrr_at_3 value: 94.667 - type: mrr_at_5 value: 94.667 - type: ndcg_at_1 value: 86 - type: ndcg_at_10 value: 82 - type: ndcg_at_100 value: 64.307 - type: ndcg_at_1000 value: 57.023999999999994 - type: ndcg_at_3 value: 85.816 - type: ndcg_at_5 value: 84.904 - type: precision_at_1 value: 90 - type: precision_at_10 value: 85.8 - type: precision_at_100 value: 66.46 - type: precision_at_1000 value: 25.202 - type: precision_at_3 value: 90 - type: precision_at_5 value: 89.2 - type: recall_at_1 value: 0.22799999999999998 - type: recall_at_10 value: 2.235 - type: recall_at_100 value: 16.185 - type: recall_at_1000 value: 53.620999999999995 - type: recall_at_3 value: 0.7040000000000001 - type: recall_at_5 value: 1.172 - task: type: BitextMining dataset: name: MTEB Tatoeba (sqi-eng) type: mteb/tatoeba-bitext-mining config: sqi-eng split: test revision: 9080400076fbadbb4c4dcb136ff4eddc40b42553 metrics: - type: accuracy value: 97.39999999999999 - type: f1 value: 96.75 - type: precision value: 96.45 - type: recall value: 97.39999999999999 - task: type: BitextMining dataset: name: MTEB Tatoeba (fry-eng) type: mteb/tatoeba-bitext-mining config: fry-eng split: test revision: 9080400076fbadbb4c4dcb136ff4eddc40b42553 metrics: - type: accuracy value: 85.54913294797689 - type: f1 value: 82.46628131021194 - type: precision value: 81.1175337186898 - type: recall value: 85.54913294797689 - task: type: BitextMining dataset: name: MTEB Tatoeba (kur-eng) type: mteb/tatoeba-bitext-mining config: kur-eng split: test revision: 9080400076fbadbb4c4dcb136ff4eddc40b42553 metrics: - type: accuracy value: 81.21951219512195 - type: f1 value: 77.33333333333334 - type: precision value: 75.54878048780488 - type: recall value: 81.21951219512195 - task: type: BitextMining dataset: name: MTEB Tatoeba (tur-eng) type: mteb/tatoeba-bitext-mining config: tur-eng split: test revision: 9080400076fbadbb4c4dcb136ff4eddc40b42553 metrics: - type: accuracy value: 98.6 - type: f1 value: 98.26666666666665 - type: precision value: 98.1 - type: recall value: 98.6 - task: type: BitextMining dataset: name: MTEB Tatoeba (deu-eng) type: mteb/tatoeba-bitext-mining config: deu-eng split: test revision: 9080400076fbadbb4c4dcb136ff4eddc40b42553 metrics: - type: accuracy value: 99.5 - type: f1 value: 99.33333333333333 - type: precision value: 99.25 - type: recall value: 99.5 - task: type: BitextMining dataset: name: MTEB Tatoeba (nld-eng) type: mteb/tatoeba-bitext-mining config: nld-eng split: test revision: 9080400076fbadbb4c4dcb136ff4eddc40b42553 metrics: - type: accuracy value: 97.8 - type: f1 value: 97.2 - type: precision value: 96.89999999999999 - type: recall value: 97.8 - task: type: BitextMining dataset: name: MTEB Tatoeba (ron-eng) type: mteb/tatoeba-bitext-mining config: ron-eng split: test revision: 9080400076fbadbb4c4dcb136ff4eddc40b42553 metrics: - type: accuracy value: 97.8 - type: f1 value: 97.18333333333334 - type: precision value: 96.88333333333333 - type: recall value: 97.8 - task: type: BitextMining dataset: name: MTEB Tatoeba (ang-eng) type: mteb/tatoeba-bitext-mining config: ang-eng split: test revision: 9080400076fbadbb4c4dcb136ff4eddc40b42553 metrics: - type: accuracy value: 77.61194029850746 - type: f1 value: 72.81094527363183 - type: precision value: 70.83333333333333 - type: recall value: 77.61194029850746 - task: type: BitextMining dataset: name: MTEB Tatoeba (ido-eng) type: mteb/tatoeba-bitext-mining config: ido-eng split: test revision: 9080400076fbadbb4c4dcb136ff4eddc40b42553 metrics: - type: accuracy value: 93.7 - type: f1 value: 91.91666666666667 - type: precision value: 91.08333333333334 - type: recall value: 93.7 - task: type: BitextMining dataset: name: MTEB Tatoeba (jav-eng) type: mteb/tatoeba-bitext-mining config: jav-eng split: test revision: 9080400076fbadbb4c4dcb136ff4eddc40b42553 metrics: - type: accuracy value: 88.29268292682927 - type: f1 value: 85.27642276422765 - type: precision value: 84.01277584204414 - type: recall value: 88.29268292682927 - task: type: BitextMining dataset: name: MTEB Tatoeba (isl-eng) type: mteb/tatoeba-bitext-mining config: isl-eng split: test revision: 9080400076fbadbb4c4dcb136ff4eddc40b42553 metrics: - type: accuracy value: 96.1 - type: f1 value: 95 - type: precision value: 94.46666666666668 - type: recall value: 96.1 - task: type: BitextMining dataset: name: MTEB Tatoeba (slv-eng) type: mteb/tatoeba-bitext-mining config: slv-eng split: test revision: 9080400076fbadbb4c4dcb136ff4eddc40b42553 metrics: - type: accuracy value: 93.681652490887 - type: f1 value: 91.90765492102065 - type: precision value: 91.05913325232888 - type: recall value: 93.681652490887 - task: type: BitextMining dataset: name: MTEB Tatoeba (cym-eng) type: mteb/tatoeba-bitext-mining config: cym-eng split: test revision: 9080400076fbadbb4c4dcb136ff4eddc40b42553 metrics: - type: accuracy value: 92.17391304347827 - type: f1 value: 89.97101449275361 - type: precision value: 88.96811594202899 - type: recall value: 92.17391304347827 - task: type: BitextMining dataset: name: MTEB Tatoeba (kaz-eng) type: mteb/tatoeba-bitext-mining config: kaz-eng split: test revision: 9080400076fbadbb4c4dcb136ff4eddc40b42553 metrics: - type: accuracy value: 90.43478260869566 - type: f1 value: 87.72173913043478 - type: precision value: 86.42028985507245 - type: recall value: 90.43478260869566 - task: type: BitextMining dataset: name: MTEB Tatoeba (est-eng) type: mteb/tatoeba-bitext-mining config: est-eng split: test revision: 9080400076fbadbb4c4dcb136ff4eddc40b42553 metrics: - type: accuracy value: 90.4 - type: f1 value: 88.03 - type: precision value: 86.95 - type: recall value: 90.4 - task: type: BitextMining dataset: name: MTEB Tatoeba (heb-eng) type: mteb/tatoeba-bitext-mining config: heb-eng split: test revision: 9080400076fbadbb4c4dcb136ff4eddc40b42553 metrics: - type: accuracy value: 93.4 - type: f1 value: 91.45666666666666 - type: precision value: 90.525 - type: recall value: 93.4 - task: type: BitextMining dataset: name: MTEB Tatoeba (gla-eng) type: mteb/tatoeba-bitext-mining config: gla-eng split: test revision: 9080400076fbadbb4c4dcb136ff4eddc40b42553 metrics: - type: accuracy value: 81.9059107358263 - type: f1 value: 78.32557872364869 - type: precision value: 76.78260286824823 - type: recall value: 81.9059107358263 - task: type: BitextMining dataset: name: MTEB Tatoeba (mar-eng) type: mteb/tatoeba-bitext-mining config: mar-eng split: test revision: 9080400076fbadbb4c4dcb136ff4eddc40b42553 metrics: - type: accuracy value: 94.3 - type: f1 value: 92.58333333333333 - type: precision value: 91.73333333333332 - type: recall value: 94.3 - task: type: BitextMining dataset: name: MTEB Tatoeba (lat-eng) type: mteb/tatoeba-bitext-mining config: lat-eng split: test revision: 9080400076fbadbb4c4dcb136ff4eddc40b42553 metrics: - type: accuracy value: 79.10000000000001 - type: f1 value: 74.50500000000001 - type: precision value: 72.58928571428571 - type: recall value: 79.10000000000001 - task: type: BitextMining dataset: name: MTEB Tatoeba (bel-eng) type: mteb/tatoeba-bitext-mining config: bel-eng split: test revision: 9080400076fbadbb4c4dcb136ff4eddc40b42553 metrics: - type: accuracy value: 96.6 - type: f1 value: 95.55 - type: precision value: 95.05 - type: recall value: 96.6 - task: type: BitextMining dataset: name: MTEB Tatoeba (pms-eng) type: mteb/tatoeba-bitext-mining config: pms-eng split: test revision: 9080400076fbadbb4c4dcb136ff4eddc40b42553 metrics: - type: accuracy value: 82.0952380952381 - type: f1 value: 77.98458049886621 - type: precision value: 76.1968253968254 - type: recall value: 82.0952380952381 - task: type: BitextMining dataset: name: MTEB Tatoeba (gle-eng) type: mteb/tatoeba-bitext-mining config: gle-eng split: test revision: 9080400076fbadbb4c4dcb136ff4eddc40b42553 metrics: - type: accuracy value: 87.9 - type: f1 value: 84.99190476190476 - type: precision value: 83.65 - type: recall value: 87.9 - task: type: BitextMining dataset: name: MTEB Tatoeba (pes-eng) type: mteb/tatoeba-bitext-mining config: pes-eng split: test revision: 9080400076fbadbb4c4dcb136ff4eddc40b42553 metrics: - type: accuracy value: 95.7 - type: f1 value: 94.56666666666666 - type: precision value: 94.01666666666667 - type: recall value: 95.7 - task: type: BitextMining dataset: name: MTEB Tatoeba (nob-eng) type: mteb/tatoeba-bitext-mining config: nob-eng split: test revision: 9080400076fbadbb4c4dcb136ff4eddc40b42553 metrics: - type: accuracy value: 98.6 - type: f1 value: 98.2 - type: precision value: 98 - type: recall value: 98.6 - task: type: BitextMining dataset: name: MTEB Tatoeba (bul-eng) type: mteb/tatoeba-bitext-mining config: bul-eng split: test revision: 9080400076fbadbb4c4dcb136ff4eddc40b42553 metrics: - type: accuracy value: 95.6 - type: f1 value: 94.38333333333334 - type: precision value: 93.78333333333335 - type: recall value: 95.6 - task: type: BitextMining dataset: name: MTEB Tatoeba (cbk-eng) type: mteb/tatoeba-bitext-mining config: cbk-eng split: test revision: 9080400076fbadbb4c4dcb136ff4eddc40b42553 metrics: - type: accuracy value: 87.4 - type: f1 value: 84.10380952380952 - type: precision value: 82.67 - type: recall value: 87.4 - task: type: BitextMining dataset: name: MTEB Tatoeba (hun-eng) type: mteb/tatoeba-bitext-mining config: hun-eng split: test revision: 9080400076fbadbb4c4dcb136ff4eddc40b42553 metrics: - type: accuracy value: 95.5 - type: f1 value: 94.33333333333334 - type: precision value: 93.78333333333333 - type: recall value: 95.5 - task: type: BitextMining dataset: name: MTEB Tatoeba (uig-eng) type: mteb/tatoeba-bitext-mining config: uig-eng split: test revision: 9080400076fbadbb4c4dcb136ff4eddc40b42553 metrics: - type: accuracy value: 89.4 - type: f1 value: 86.82000000000001 - type: precision value: 85.64500000000001 - type: recall value: 89.4 - task: type: BitextMining dataset: name: MTEB Tatoeba (rus-eng) type: mteb/tatoeba-bitext-mining config: rus-eng split: test revision: 9080400076fbadbb4c4dcb136ff4eddc40b42553 metrics: - type: accuracy value: 95.1 - type: f1 value: 93.56666666666668 - type: precision value: 92.81666666666666 - type: recall value: 95.1 - task: type: BitextMining dataset: name: MTEB Tatoeba (spa-eng) type: mteb/tatoeba-bitext-mining config: spa-eng split: test revision: 9080400076fbadbb4c4dcb136ff4eddc40b42553 metrics: - type: accuracy value: 98.9 - type: f1 value: 98.6 - type: precision value: 98.45 - type: recall value: 98.9 - task: type: BitextMining dataset: name: MTEB Tatoeba (hye-eng) type: mteb/tatoeba-bitext-mining config: hye-eng split: test revision: 9080400076fbadbb4c4dcb136ff4eddc40b42553 metrics: - type: accuracy value: 95.01347708894879 - type: f1 value: 93.51752021563343 - type: precision value: 92.82794249775381 - type: recall value: 95.01347708894879 - task: type: BitextMining dataset: name: MTEB Tatoeba (tel-eng) type: mteb/tatoeba-bitext-mining config: tel-eng split: test revision: 9080400076fbadbb4c4dcb136ff4eddc40b42553 metrics: - type: accuracy value: 97.00854700854701 - type: f1 value: 96.08262108262107 - type: precision value: 95.65527065527067 - type: recall value: 97.00854700854701 - task: type: BitextMining dataset: name: MTEB Tatoeba (afr-eng) type: mteb/tatoeba-bitext-mining config: afr-eng split: test revision: 9080400076fbadbb4c4dcb136ff4eddc40b42553 metrics: - type: accuracy value: 96.5 - type: f1 value: 95.39999999999999 - type: precision value: 94.88333333333333 - type: recall value: 96.5 - task: type: BitextMining dataset: name: MTEB Tatoeba (mon-eng) type: mteb/tatoeba-bitext-mining config: mon-eng split: test revision: 9080400076fbadbb4c4dcb136ff4eddc40b42553 metrics: - type: accuracy value: 96.5909090909091 - type: f1 value: 95.49242424242425 - type: precision value: 94.9621212121212 - type: recall value: 96.5909090909091 - task: type: BitextMining dataset: name: MTEB Tatoeba (arz-eng) type: mteb/tatoeba-bitext-mining config: arz-eng split: test revision: 9080400076fbadbb4c4dcb136ff4eddc40b42553 metrics: - type: accuracy value: 84.90566037735849 - type: f1 value: 81.85883997204752 - type: precision value: 80.54507337526205 - type: recall value: 84.90566037735849 - task: type: BitextMining dataset: name: MTEB Tatoeba (hrv-eng) type: mteb/tatoeba-bitext-mining config: hrv-eng split: test revision: 9080400076fbadbb4c4dcb136ff4eddc40b42553 metrics: - type: accuracy value: 97.5 - type: f1 value: 96.75 - type: precision value: 96.38333333333333 - type: recall value: 97.5 - task: type: BitextMining dataset: name: MTEB Tatoeba (nov-eng) type: mteb/tatoeba-bitext-mining config: nov-eng split: test revision: 9080400076fbadbb4c4dcb136ff4eddc40b42553 metrics: - type: accuracy value: 86.7704280155642 - type: f1 value: 82.99610894941635 - type: precision value: 81.32295719844358 - type: recall value: 86.7704280155642 - task: type: BitextMining dataset: name: MTEB Tatoeba (gsw-eng) type: mteb/tatoeba-bitext-mining config: gsw-eng split: test revision: 9080400076fbadbb4c4dcb136ff4eddc40b42553 metrics: - type: accuracy value: 67.52136752136752 - type: f1 value: 61.89662189662191 - type: precision value: 59.68660968660969 - type: recall value: 67.52136752136752 - task: type: BitextMining dataset: name: MTEB Tatoeba (nds-eng) type: mteb/tatoeba-bitext-mining config: nds-eng split: test revision: 9080400076fbadbb4c4dcb136ff4eddc40b42553 metrics: - type: accuracy value: 89.2 - type: f1 value: 86.32 - type: precision value: 85.015 - type: recall value: 89.2 - task: type: BitextMining dataset: name: MTEB Tatoeba (ukr-eng) type: mteb/tatoeba-bitext-mining config: ukr-eng split: test revision: 9080400076fbadbb4c4dcb136ff4eddc40b42553 metrics: - type: accuracy value: 96 - type: f1 value: 94.78333333333333 - type: precision value: 94.18333333333334 - type: recall value: 96 - task: type: BitextMining dataset: name: MTEB Tatoeba (uzb-eng) type: mteb/tatoeba-bitext-mining config: uzb-eng split: test revision: 9080400076fbadbb4c4dcb136ff4eddc40b42553 metrics: - type: accuracy value: 83.8785046728972 - type: f1 value: 80.54517133956385 - type: precision value: 79.154984423676 - type: recall value: 83.8785046728972 - task: type: BitextMining dataset: name: MTEB Tatoeba (lit-eng) type: mteb/tatoeba-bitext-mining config: lit-eng split: test revision: 9080400076fbadbb4c4dcb136ff4eddc40b42553 metrics: - type: accuracy value: 93.60000000000001 - type: f1 value: 92.01333333333334 - type: precision value: 91.28333333333333 - type: recall value: 93.60000000000001 - task: type: BitextMining dataset: name: MTEB Tatoeba (ina-eng) type: mteb/tatoeba-bitext-mining config: ina-eng split: test revision: 9080400076fbadbb4c4dcb136ff4eddc40b42553 metrics: - type: accuracy value: 97.1 - type: f1 value: 96.26666666666667 - type: precision value: 95.85000000000001 - type: recall value: 97.1 - task: type: BitextMining dataset: name: MTEB Tatoeba (lfn-eng) type: mteb/tatoeba-bitext-mining config: lfn-eng split: test revision: 9080400076fbadbb4c4dcb136ff4eddc40b42553 metrics: - type: accuracy value: 84.3 - type: f1 value: 80.67833333333333 - type: precision value: 79.03928571428571 - type: recall value: 84.3 - task: type: BitextMining dataset: name: MTEB Tatoeba (zsm-eng) type: mteb/tatoeba-bitext-mining config: zsm-eng split: test revision: 9080400076fbadbb4c4dcb136ff4eddc40b42553 metrics: - type: accuracy value: 97.3 - type: f1 value: 96.48333333333332 - type: precision value: 96.08333333333331 - type: recall value: 97.3 - task: type: BitextMining dataset: name: MTEB Tatoeba (ita-eng) type: mteb/tatoeba-bitext-mining config: ita-eng split: test revision: 9080400076fbadbb4c4dcb136ff4eddc40b42553 metrics: - type: accuracy value: 95.7 - type: f1 value: 94.66666666666667 - type: precision value: 94.16666666666667 - type: recall value: 95.7 - task: type: BitextMining dataset: name: MTEB Tatoeba (cmn-eng) type: mteb/tatoeba-bitext-mining config: cmn-eng split: test revision: 9080400076fbadbb4c4dcb136ff4eddc40b42553 metrics: - type: accuracy value: 97.2 - type: f1 value: 96.36666666666667 - type: precision value: 95.96666666666668 - type: recall value: 97.2 - task: type: BitextMining dataset: name: MTEB Tatoeba (lvs-eng) type: mteb/tatoeba-bitext-mining config: lvs-eng split: test revision: 9080400076fbadbb4c4dcb136ff4eddc40b42553 metrics: - type: accuracy value: 94.3 - type: f1 value: 92.80666666666667 - type: precision value: 92.12833333333333 - type: recall value: 94.3 - task: type: BitextMining dataset: name: MTEB Tatoeba (glg-eng) type: mteb/tatoeba-bitext-mining config: glg-eng split: test revision: 9080400076fbadbb4c4dcb136ff4eddc40b42553 metrics: - type: accuracy value: 97 - type: f1 value: 96.22333333333334 - type: precision value: 95.875 - type: recall value: 97 - task: type: BitextMining dataset: name: MTEB Tatoeba (ceb-eng) type: mteb/tatoeba-bitext-mining config: ceb-eng split: test revision: 9080400076fbadbb4c4dcb136ff4eddc40b42553 metrics: - type: accuracy value: 74.33333333333333 - type: f1 value: 70.78174603174602 - type: precision value: 69.28333333333332 - type: recall value: 74.33333333333333 - task: type: BitextMining dataset: name: MTEB Tatoeba (bre-eng) type: mteb/tatoeba-bitext-mining config: bre-eng split: test revision: 9080400076fbadbb4c4dcb136ff4eddc40b42553 metrics: - type: accuracy value: 37.6 - type: f1 value: 32.938348952090365 - type: precision value: 31.2811038961039 - type: recall value: 37.6 - task: type: BitextMining dataset: name: MTEB Tatoeba (ben-eng) type: mteb/tatoeba-bitext-mining config: ben-eng split: test revision: 9080400076fbadbb4c4dcb136ff4eddc40b42553 metrics: - type: accuracy value: 91.5 - type: f1 value: 89.13333333333333 - type: precision value: 88.03333333333333 - type: recall value: 91.5 - task: type: BitextMining dataset: name: MTEB Tatoeba (swg-eng) type: mteb/tatoeba-bitext-mining config: swg-eng split: test revision: 9080400076fbadbb4c4dcb136ff4eddc40b42553 metrics: - type: accuracy value: 82.14285714285714 - type: f1 value: 77.67857142857143 - type: precision value: 75.59523809523809 - type: recall value: 82.14285714285714 - task: type: BitextMining dataset: name: MTEB Tatoeba (arq-eng) type: mteb/tatoeba-bitext-mining config: arq-eng split: test revision: 9080400076fbadbb4c4dcb136ff4eddc40b42553 metrics: - type: accuracy value: 69.0450054884742 - type: f1 value: 63.070409283362075 - type: precision value: 60.58992781824835 - type: recall value: 69.0450054884742 - task: type: BitextMining dataset: name: MTEB Tatoeba (kab-eng) type: mteb/tatoeba-bitext-mining config: kab-eng split: test revision: 9080400076fbadbb4c4dcb136ff4eddc40b42553 metrics: - type: accuracy value: 63.1 - type: f1 value: 57.848333333333336 - type: precision value: 55.69500000000001 - type: recall value: 63.1 - task: type: BitextMining dataset: name: MTEB Tatoeba (fra-eng) type: mteb/tatoeba-bitext-mining config: fra-eng split: test revision: 9080400076fbadbb4c4dcb136ff4eddc40b42553 metrics: - type: accuracy value: 96.1 - type: f1 value: 95.01666666666667 - type: precision value: 94.5 - type: recall value: 96.1 - task: type: BitextMining dataset: name: MTEB Tatoeba (por-eng) type: mteb/tatoeba-bitext-mining config: por-eng split: test revision: 9080400076fbadbb4c4dcb136ff4eddc40b42553 metrics: - type: accuracy value: 95.89999999999999 - type: f1 value: 94.90666666666667 - type: precision value: 94.425 - type: recall value: 95.89999999999999 - task: type: BitextMining dataset: name: MTEB Tatoeba (tat-eng) type: mteb/tatoeba-bitext-mining config: tat-eng split: test revision: 9080400076fbadbb4c4dcb136ff4eddc40b42553 metrics: - type: accuracy value: 87.6 - type: f1 value: 84.61333333333333 - type: precision value: 83.27 - type: recall value: 87.6 - task: type: BitextMining dataset: name: MTEB Tatoeba (oci-eng) type: mteb/tatoeba-bitext-mining config: oci-eng split: test revision: 9080400076fbadbb4c4dcb136ff4eddc40b42553 metrics: - type: accuracy value: 76.4 - type: f1 value: 71.90746031746032 - type: precision value: 70.07027777777778 - type: recall value: 76.4 - task: type: BitextMining dataset: name: MTEB Tatoeba (pol-eng) type: mteb/tatoeba-bitext-mining config: pol-eng split: test revision: 9080400076fbadbb4c4dcb136ff4eddc40b42553 metrics: - type: accuracy value: 97.89999999999999 - type: f1 value: 97.26666666666667 - type: precision value: 96.95 - type: recall value: 97.89999999999999 - task: type: BitextMining dataset: name: MTEB Tatoeba (war-eng) type: mteb/tatoeba-bitext-mining config: war-eng split: test revision: 9080400076fbadbb4c4dcb136ff4eddc40b42553 metrics: - type: accuracy value: 78.8 - type: f1 value: 74.39555555555555 - type: precision value: 72.59416666666667 - type: recall value: 78.8 - task: type: BitextMining dataset: name: MTEB Tatoeba (aze-eng) type: mteb/tatoeba-bitext-mining config: aze-eng split: test revision: 9080400076fbadbb4c4dcb136ff4eddc40b42553 metrics: - type: accuracy value: 95.19999999999999 - type: f1 value: 93.78999999999999 - type: precision value: 93.125 - type: recall value: 95.19999999999999 - task: type: BitextMining dataset: name: MTEB Tatoeba (vie-eng) type: mteb/tatoeba-bitext-mining config: vie-eng split: test revision: 9080400076fbadbb4c4dcb136ff4eddc40b42553 metrics: - type: accuracy value: 97.8 - type: f1 value: 97.1 - type: precision value: 96.75 - type: recall value: 97.8 - task: type: BitextMining dataset: name: MTEB Tatoeba (nno-eng) type: mteb/tatoeba-bitext-mining config: nno-eng split: test revision: 9080400076fbadbb4c4dcb136ff4eddc40b42553 metrics: - type: accuracy value: 95.6 - type: f1 value: 94.25666666666666 - type: precision value: 93.64166666666668 - type: recall value: 95.6 - task: type: BitextMining dataset: name: MTEB Tatoeba (cha-eng) type: mteb/tatoeba-bitext-mining config: cha-eng split: test revision: 9080400076fbadbb4c4dcb136ff4eddc40b42553 metrics: - type: accuracy value: 56.934306569343065 - type: f1 value: 51.461591936044485 - type: precision value: 49.37434827945776 - type: recall value: 56.934306569343065 - task: type: BitextMining dataset: name: MTEB Tatoeba (mhr-eng) type: mteb/tatoeba-bitext-mining config: mhr-eng split: test revision: 9080400076fbadbb4c4dcb136ff4eddc40b42553 metrics: - type: accuracy value: 20.200000000000003 - type: f1 value: 16.91799284049284 - type: precision value: 15.791855158730158 - type: recall value: 20.200000000000003 - task: type: BitextMining dataset: name: MTEB Tatoeba (dan-eng) type: mteb/tatoeba-bitext-mining config: dan-eng split: test revision: 9080400076fbadbb4c4dcb136ff4eddc40b42553 metrics: - type: accuracy value: 96.2 - type: f1 value: 95.3 - type: precision value: 94.85 - type: recall value: 96.2 - task: type: BitextMining dataset: name: MTEB Tatoeba (ell-eng) type: mteb/tatoeba-bitext-mining config: ell-eng split: test revision: 9080400076fbadbb4c4dcb136ff4eddc40b42553 metrics: - type: accuracy value: 96.3 - type: f1 value: 95.11666666666667 - type: precision value: 94.53333333333333 - type: recall value: 96.3 - task: type: BitextMining dataset: name: MTEB Tatoeba (amh-eng) type: mteb/tatoeba-bitext-mining config: amh-eng split: test revision: 9080400076fbadbb4c4dcb136ff4eddc40b42553 metrics: - type: accuracy value: 89.88095238095238 - type: f1 value: 87.14285714285714 - type: precision value: 85.96230158730161 - type: recall value: 89.88095238095238 - task: type: BitextMining dataset: name: MTEB Tatoeba (pam-eng) type: mteb/tatoeba-bitext-mining config: pam-eng split: test revision: 9080400076fbadbb4c4dcb136ff4eddc40b42553 metrics: - type: accuracy value: 24.099999999999998 - type: f1 value: 19.630969083349783 - type: precision value: 18.275094905094907 - type: recall value: 24.099999999999998 - task: type: BitextMining dataset: name: MTEB Tatoeba (hsb-eng) type: mteb/tatoeba-bitext-mining config: hsb-eng split: test revision: 9080400076fbadbb4c4dcb136ff4eddc40b42553 metrics: - type: accuracy value: 83.4368530020704 - type: f1 value: 79.45183870649709 - type: precision value: 77.7432712215321 - type: recall value: 83.4368530020704 - task: type: BitextMining dataset: name: MTEB Tatoeba (srp-eng) type: mteb/tatoeba-bitext-mining config: srp-eng split: test revision: 9080400076fbadbb4c4dcb136ff4eddc40b42553 metrics: - type: accuracy value: 95.8 - type: f1 value: 94.53333333333333 - type: precision value: 93.91666666666666 - type: recall value: 95.8 - task: type: BitextMining dataset: name: MTEB Tatoeba (epo-eng) type: mteb/tatoeba-bitext-mining config: epo-eng split: test revision: 9080400076fbadbb4c4dcb136ff4eddc40b42553 metrics: - type: accuracy value: 98.8 - type: f1 value: 98.48333333333332 - type: precision value: 98.33333333333334 - type: recall value: 98.8 - task: type: BitextMining dataset: name: MTEB Tatoeba (kzj-eng) type: mteb/tatoeba-bitext-mining config: kzj-eng split: test revision: 9080400076fbadbb4c4dcb136ff4eddc40b42553 metrics: - type: accuracy value: 17.5 - type: f1 value: 14.979285714285714 - type: precision value: 14.23235060690943 - type: recall value: 17.5 - task: type: BitextMining dataset: name: MTEB Tatoeba (awa-eng) type: mteb/tatoeba-bitext-mining config: awa-eng split: test revision: 9080400076fbadbb4c4dcb136ff4eddc40b42553 metrics: - type: accuracy value: 93.93939393939394 - type: f1 value: 91.991341991342 - type: precision value: 91.05339105339105 - type: recall value: 93.93939393939394 - task: type: BitextMining dataset: name: MTEB Tatoeba (fao-eng) type: mteb/tatoeba-bitext-mining config: fao-eng split: test revision: 9080400076fbadbb4c4dcb136ff4eddc40b42553 metrics: - type: accuracy value: 89.31297709923665 - type: f1 value: 86.76844783715012 - type: precision value: 85.63613231552164 - type: recall value: 89.31297709923665 - task: type: BitextMining dataset: name: MTEB Tatoeba (mal-eng) type: mteb/tatoeba-bitext-mining config: mal-eng split: test revision: 9080400076fbadbb4c4dcb136ff4eddc40b42553 metrics: - type: accuracy value: 99.12663755458514 - type: f1 value: 98.93255701115964 - type: precision value: 98.83551673944687 - type: recall value: 99.12663755458514 - task: type: BitextMining dataset: name: MTEB Tatoeba (ile-eng) type: mteb/tatoeba-bitext-mining config: ile-eng split: test revision: 9080400076fbadbb4c4dcb136ff4eddc40b42553 metrics: - type: accuracy value: 92 - type: f1 value: 89.77999999999999 - type: precision value: 88.78333333333333 - type: recall value: 92 - task: type: BitextMining dataset: name: MTEB Tatoeba (bos-eng) type: mteb/tatoeba-bitext-mining config: bos-eng split: test revision: 9080400076fbadbb4c4dcb136ff4eddc40b42553 metrics: - type: accuracy value: 96.89265536723164 - type: f1 value: 95.85687382297553 - type: precision value: 95.33898305084746 - type: recall value: 96.89265536723164 - task: type: BitextMining dataset: name: MTEB Tatoeba (cor-eng) type: mteb/tatoeba-bitext-mining config: cor-eng split: test revision: 9080400076fbadbb4c4dcb136ff4eddc40b42553 metrics: - type: accuracy value: 14.6 - type: f1 value: 11.820611790170615 - type: precision value: 11.022616224355355 - type: recall value: 14.6 - task: type: BitextMining dataset: name: MTEB Tatoeba (cat-eng) type: mteb/tatoeba-bitext-mining config: cat-eng split: test revision: 9080400076fbadbb4c4dcb136ff4eddc40b42553 metrics: - type: accuracy value: 95.89999999999999 - type: f1 value: 94.93333333333334 - type: precision value: 94.48666666666666 - type: recall value: 95.89999999999999 - task: type: BitextMining dataset: name: MTEB Tatoeba (eus-eng) type: mteb/tatoeba-bitext-mining config: eus-eng split: test revision: 9080400076fbadbb4c4dcb136ff4eddc40b42553 metrics: - type: accuracy value: 87.6 - type: f1 value: 84.72333333333334 - type: precision value: 83.44166666666666 - type: recall value: 87.6 - task: type: BitextMining dataset: name: MTEB Tatoeba (yue-eng) type: mteb/tatoeba-bitext-mining config: yue-eng split: test revision: 9080400076fbadbb4c4dcb136ff4eddc40b42553 metrics: - type: accuracy value: 94.8 - type: f1 value: 93.47333333333333 - type: precision value: 92.875 - type: recall value: 94.8 - task: type: BitextMining dataset: name: MTEB Tatoeba (swe-eng) type: mteb/tatoeba-bitext-mining config: swe-eng split: test revision: 9080400076fbadbb4c4dcb136ff4eddc40b42553 metrics: - type: accuracy value: 96.6 - type: f1 value: 95.71666666666665 - type: precision value: 95.28333333333335 - type: recall value: 96.6 - task: type: BitextMining dataset: name: MTEB Tatoeba (dtp-eng) type: mteb/tatoeba-bitext-mining config: dtp-eng split: test revision: 9080400076fbadbb4c4dcb136ff4eddc40b42553 metrics: - type: accuracy value: 17.8 - type: f1 value: 14.511074040901628 - type: precision value: 13.503791000666002 - type: recall value: 17.8 - task: type: BitextMining dataset: name: MTEB Tatoeba (kat-eng) type: mteb/tatoeba-bitext-mining config: kat-eng split: test revision: 9080400076fbadbb4c4dcb136ff4eddc40b42553 metrics: - type: accuracy value: 94.10187667560321 - type: f1 value: 92.46648793565683 - type: precision value: 91.71134941912423 - type: recall value: 94.10187667560321 - task: type: BitextMining dataset: name: MTEB Tatoeba (jpn-eng) type: mteb/tatoeba-bitext-mining config: jpn-eng split: test revision: 9080400076fbadbb4c4dcb136ff4eddc40b42553 metrics: - type: accuracy value: 97 - type: f1 value: 96.11666666666666 - type: precision value: 95.68333333333334 - type: recall value: 97 - task: type: BitextMining dataset: name: MTEB Tatoeba (csb-eng) type: mteb/tatoeba-bitext-mining config: csb-eng split: test revision: 9080400076fbadbb4c4dcb136ff4eddc40b42553 metrics: - type: accuracy value: 72.72727272727273 - type: f1 value: 66.58949745906267 - type: precision value: 63.86693017127799 - type: recall value: 72.72727272727273 - task: type: BitextMining dataset: name: MTEB Tatoeba (xho-eng) type: mteb/tatoeba-bitext-mining config: xho-eng split: test revision: 9080400076fbadbb4c4dcb136ff4eddc40b42553 metrics: - type: accuracy value: 90.14084507042254 - type: f1 value: 88.26291079812206 - type: precision value: 87.32394366197182 - type: recall value: 90.14084507042254 - task: type: BitextMining dataset: name: MTEB Tatoeba (orv-eng) type: mteb/tatoeba-bitext-mining config: orv-eng split: test revision: 9080400076fbadbb4c4dcb136ff4eddc40b42553 metrics: - type: accuracy value: 64.67065868263472 - type: f1 value: 58.2876627696987 - type: precision value: 55.79255774165953 - type: recall value: 64.67065868263472 - task: type: BitextMining dataset: name: MTEB Tatoeba (ind-eng) type: mteb/tatoeba-bitext-mining config: ind-eng split: test revision: 9080400076fbadbb4c4dcb136ff4eddc40b42553 metrics: - type: accuracy value: 95.6 - type: f1 value: 94.41666666666667 - type: precision value: 93.85 - type: recall value: 95.6 - task: type: BitextMining dataset: name: MTEB Tatoeba (tuk-eng) type: mteb/tatoeba-bitext-mining config: tuk-eng split: test revision: 9080400076fbadbb4c4dcb136ff4eddc40b42553 metrics: - type: accuracy value: 55.172413793103445 - type: f1 value: 49.63992493549144 - type: precision value: 47.71405113769646 - type: recall value: 55.172413793103445 - task: type: BitextMining dataset: name: MTEB Tatoeba (max-eng) type: mteb/tatoeba-bitext-mining config: max-eng split: test revision: 9080400076fbadbb4c4dcb136ff4eddc40b42553 metrics: - type: accuracy value: 77.46478873239437 - type: f1 value: 73.4417616811983 - type: precision value: 71.91607981220658 - type: recall value: 77.46478873239437 - task: type: BitextMining dataset: name: MTEB Tatoeba (swh-eng) type: mteb/tatoeba-bitext-mining config: swh-eng split: test revision: 9080400076fbadbb4c4dcb136ff4eddc40b42553 metrics: - type: accuracy value: 84.61538461538461 - type: f1 value: 80.91452991452994 - type: precision value: 79.33760683760683 - type: recall value: 84.61538461538461 - task: type: BitextMining dataset: name: MTEB Tatoeba (hin-eng) type: mteb/tatoeba-bitext-mining config: hin-eng split: test revision: 9080400076fbadbb4c4dcb136ff4eddc40b42553 metrics: - type: accuracy value: 98.2 - type: f1 value: 97.6 - type: precision value: 97.3 - type: recall value: 98.2 - task: type: BitextMining dataset: name: MTEB Tatoeba (dsb-eng) type: mteb/tatoeba-bitext-mining config: dsb-eng split: test revision: 9080400076fbadbb4c4dcb136ff4eddc40b42553 metrics: - type: accuracy value: 75.5741127348643 - type: f1 value: 72.00417536534445 - type: precision value: 70.53467872883321 - type: recall value: 75.5741127348643 - task: type: BitextMining dataset: name: MTEB Tatoeba (ber-eng) type: mteb/tatoeba-bitext-mining config: ber-eng split: test revision: 9080400076fbadbb4c4dcb136ff4eddc40b42553 metrics: - type: accuracy value: 62.2 - type: f1 value: 55.577460317460314 - type: precision value: 52.98583333333333 - type: recall value: 62.2 - task: type: BitextMining dataset: name: MTEB Tatoeba (tam-eng) type: mteb/tatoeba-bitext-mining config: tam-eng split: test revision: 9080400076fbadbb4c4dcb136ff4eddc40b42553 metrics: - type: accuracy value: 92.18241042345277 - type: f1 value: 90.6468124709167 - type: precision value: 89.95656894679696 - type: recall value: 92.18241042345277 - task: type: BitextMining dataset: name: MTEB Tatoeba (slk-eng) type: mteb/tatoeba-bitext-mining config: slk-eng split: test revision: 9080400076fbadbb4c4dcb136ff4eddc40b42553 metrics: - type: accuracy value: 96.1 - type: f1 value: 95.13333333333333 - type: precision value: 94.66666666666667 - type: recall value: 96.1 - task: type: BitextMining dataset: name: MTEB Tatoeba (tgl-eng) type: mteb/tatoeba-bitext-mining config: tgl-eng split: test revision: 9080400076fbadbb4c4dcb136ff4eddc40b42553 metrics: - type: accuracy value: 96.8 - type: f1 value: 95.85000000000001 - type: precision value: 95.39999999999999 - type: recall value: 96.8 - task: type: BitextMining dataset: name: MTEB Tatoeba (ast-eng) type: mteb/tatoeba-bitext-mining config: ast-eng split: test revision: 9080400076fbadbb4c4dcb136ff4eddc40b42553 metrics: - type: accuracy value: 92.1259842519685 - type: f1 value: 89.76377952755905 - type: precision value: 88.71391076115485 - type: recall value: 92.1259842519685 - task: type: BitextMining dataset: name: MTEB Tatoeba (mkd-eng) type: mteb/tatoeba-bitext-mining config: mkd-eng split: test revision: 9080400076fbadbb4c4dcb136ff4eddc40b42553 metrics: - type: accuracy value: 94.1 - type: f1 value: 92.49 - type: precision value: 91.725 - type: recall value: 94.1 - task: type: BitextMining dataset: name: MTEB Tatoeba (khm-eng) type: mteb/tatoeba-bitext-mining config: khm-eng split: test revision: 9080400076fbadbb4c4dcb136ff4eddc40b42553 metrics: - type: accuracy value: 77.5623268698061 - type: f1 value: 73.27364463791058 - type: precision value: 71.51947852086357 - type: recall value: 77.5623268698061 - task: type: BitextMining dataset: name: MTEB Tatoeba (ces-eng) type: mteb/tatoeba-bitext-mining config: ces-eng split: test revision: 9080400076fbadbb4c4dcb136ff4eddc40b42553 metrics: - type: accuracy value: 97.39999999999999 - type: f1 value: 96.56666666666666 - type: precision value: 96.16666666666667 - type: recall value: 97.39999999999999 - task: type: BitextMining dataset: name: MTEB Tatoeba (tzl-eng) type: mteb/tatoeba-bitext-mining config: tzl-eng split: test revision: 9080400076fbadbb4c4dcb136ff4eddc40b42553 metrics: - type: accuracy value: 66.34615384615384 - type: f1 value: 61.092032967032964 - type: precision value: 59.27197802197802 - type: recall value: 66.34615384615384 - task: type: BitextMining dataset: name: MTEB Tatoeba (urd-eng) type: mteb/tatoeba-bitext-mining config: urd-eng split: test revision: 9080400076fbadbb4c4dcb136ff4eddc40b42553 metrics: - type: accuracy value: 94.89999999999999 - type: f1 value: 93.41190476190476 - type: precision value: 92.7 - type: recall value: 94.89999999999999 - task: type: BitextMining dataset: name: MTEB Tatoeba (ara-eng) type: mteb/tatoeba-bitext-mining config: ara-eng split: test revision: 9080400076fbadbb4c4dcb136ff4eddc40b42553 metrics: - type: accuracy value: 93.10000000000001 - type: f1 value: 91.10000000000001 - type: precision value: 90.13333333333333 - type: recall value: 93.10000000000001 - task: type: BitextMining dataset: name: MTEB Tatoeba (kor-eng) type: mteb/tatoeba-bitext-mining config: kor-eng split: test revision: 9080400076fbadbb4c4dcb136ff4eddc40b42553 metrics: - type: accuracy value: 93.7 - type: f1 value: 91.97333333333334 - type: precision value: 91.14166666666667 - type: recall value: 93.7 - task: type: BitextMining dataset: name: MTEB Tatoeba (yid-eng) type: mteb/tatoeba-bitext-mining config: yid-eng split: test revision: 9080400076fbadbb4c4dcb136ff4eddc40b42553 metrics: - type: accuracy value: 92.21698113207547 - type: f1 value: 90.3796046720575 - type: precision value: 89.56367924528303 - type: recall value: 92.21698113207547 - task: type: BitextMining dataset: name: MTEB Tatoeba (fin-eng) type: mteb/tatoeba-bitext-mining config: fin-eng split: test revision: 9080400076fbadbb4c4dcb136ff4eddc40b42553 metrics: - type: accuracy value: 97.6 - type: f1 value: 96.91666666666667 - type: precision value: 96.6 - type: recall value: 97.6 - task: type: BitextMining dataset: name: MTEB Tatoeba (tha-eng) type: mteb/tatoeba-bitext-mining config: tha-eng split: test revision: 9080400076fbadbb4c4dcb136ff4eddc40b42553 metrics: - type: accuracy value: 97.44525547445255 - type: f1 value: 96.71532846715328 - type: precision value: 96.35036496350365 - type: recall value: 97.44525547445255 - task: type: BitextMining dataset: name: MTEB Tatoeba (wuu-eng) type: mteb/tatoeba-bitext-mining config: wuu-eng split: test revision: 9080400076fbadbb4c4dcb136ff4eddc40b42553 metrics: - type: accuracy value: 94.1 - type: f1 value: 92.34000000000002 - type: precision value: 91.49166666666667 - type: recall value: 94.1 - task: type: Retrieval dataset: name: MTEB Touche2020 type: webis-touche2020 config: default split: test revision: None metrics: - type: map_at_1 value: 3.2910000000000004 - type: map_at_10 value: 10.373000000000001 - type: map_at_100 value: 15.612 - type: map_at_1000 value: 17.06 - type: map_at_3 value: 6.119 - type: map_at_5 value: 7.917000000000001 - type: mrr_at_1 value: 44.897999999999996 - type: mrr_at_10 value: 56.054 - type: mrr_at_100 value: 56.82000000000001 - type: mrr_at_1000 value: 56.82000000000001 - type: mrr_at_3 value: 52.381 - type: mrr_at_5 value: 53.81 - type: ndcg_at_1 value: 42.857 - type: ndcg_at_10 value: 27.249000000000002 - type: ndcg_at_100 value: 36.529 - type: ndcg_at_1000 value: 48.136 - type: ndcg_at_3 value: 33.938 - type: ndcg_at_5 value: 29.951 - type: precision_at_1 value: 44.897999999999996 - type: precision_at_10 value: 22.653000000000002 - type: precision_at_100 value: 7.000000000000001 - type: precision_at_1000 value: 1.48 - type: precision_at_3 value: 32.653 - type: precision_at_5 value: 27.755000000000003 - type: recall_at_1 value: 3.2910000000000004 - type: recall_at_10 value: 16.16 - type: recall_at_100 value: 43.908 - type: recall_at_1000 value: 79.823 - type: recall_at_3 value: 7.156 - type: recall_at_5 value: 10.204 - task: type: Classification dataset: name: MTEB ToxicConversationsClassification type: mteb/toxic_conversations_50k config: default split: test revision: d7c0de2777da35d6aae2200a62c6e0e5af397c4c metrics: - type: accuracy value: 71.05879999999999 - type: ap value: 14.609748142799111 - type: f1 value: 54.878956295843096 - task: type: Classification dataset: name: MTEB TweetSentimentExtractionClassification type: mteb/tweet_sentiment_extraction config: default split: test revision: d604517c81ca91fe16a244d1248fc021f9ecee7a metrics: - type: accuracy value: 64.61799660441426 - type: f1 value: 64.8698191961434 - task: type: Clustering dataset: name: MTEB TwentyNewsgroupsClustering type: mteb/twentynewsgroups-clustering config: default split: test revision: 6125ec4e24fa026cec8a478383ee943acfbd5449 metrics: - type: v_measure value: 51.32860036611885 - task: type: PairClassification dataset: name: MTEB TwitterSemEval2015 type: mteb/twittersemeval2015-pairclassification config: default split: test revision: 70970daeab8776df92f5ea462b6173c0b46fd2d1 metrics: - type: cos_sim_accuracy value: 88.34714192048638 - type: cos_sim_ap value: 80.26732975975634 - type: cos_sim_f1 value: 73.53415148134374 - type: cos_sim_precision value: 69.34767360299276 - type: cos_sim_recall value: 78.25857519788919 - type: dot_accuracy value: 88.34714192048638 - type: dot_ap value: 80.26733698491206 - type: dot_f1 value: 73.53415148134374 - type: dot_precision value: 69.34767360299276 - type: dot_recall value: 78.25857519788919 - type: euclidean_accuracy value: 88.34714192048638 - type: euclidean_ap value: 80.26734337771738 - type: euclidean_f1 value: 73.53415148134374 - type: euclidean_precision value: 69.34767360299276 - type: euclidean_recall value: 78.25857519788919 - type: manhattan_accuracy value: 88.30541813196639 - type: manhattan_ap value: 80.19415808104145 - type: manhattan_f1 value: 73.55143870713441 - type: manhattan_precision value: 73.25307511122743 - type: manhattan_recall value: 73.85224274406332 - type: max_accuracy value: 88.34714192048638 - type: max_ap value: 80.26734337771738 - type: max_f1 value: 73.55143870713441 - task: type: PairClassification dataset: name: MTEB TwitterURLCorpus type: mteb/twitterurlcorpus-pairclassification config: default split: test revision: 8b6510b0b1fa4e4c4f879467980e9be563ec1cdf metrics: - type: cos_sim_accuracy value: 89.81061047075717 - type: cos_sim_ap value: 87.11747055081017 - type: cos_sim_f1 value: 80.04355498817256 - type: cos_sim_precision value: 78.1165262000733 - type: cos_sim_recall value: 82.06806282722513 - type: dot_accuracy value: 89.81061047075717 - type: dot_ap value: 87.11746902745236 - type: dot_f1 value: 80.04355498817256 - type: dot_precision value: 78.1165262000733 - type: dot_recall value: 82.06806282722513 - type: euclidean_accuracy value: 89.81061047075717 - type: euclidean_ap value: 87.11746919324248 - type: euclidean_f1 value: 80.04355498817256 - type: euclidean_precision value: 78.1165262000733 - type: euclidean_recall value: 82.06806282722513 - type: manhattan_accuracy value: 89.79508673885202 - type: manhattan_ap value: 87.11074390832218 - type: manhattan_f1 value: 80.13002540726349 - type: manhattan_precision value: 77.83826945412311 - type: manhattan_recall value: 82.56082537727133 - type: max_accuracy value: 89.81061047075717 - type: max_ap value: 87.11747055081017 - type: max_f1 value: 80.13002540726349 --- # Impulse2000/multilingual-e5-large-instruct-GGUF This model was converted to GGUF format from [`intfloat/multilingual-e5-large-instruct`](https://huggingface.co/intfloat/multilingual-e5-large-instruct) using llama.cpp via its 'convert_hf_to_gguf.py' script. Refer to the [original model card](https://huggingface.co/intfloat/multilingual-e5-large-instruct) for more details on the model.
[ "SUMMARIZATION" ]
[ "BIOSSES", "SCIFACT" ]
jspringer/echo-mistral-7b-instruct-lasttoken
jspringer
feature-extraction
[ "transformers", "safetensors", "mistral", "feature-extraction", "mteb", "arxiv:2402.15449", "model-index", "text-generation-inference", "text-embeddings-inference", "endpoints_compatible", "region:us" ]
2024-02-19T04:50:08
2024-02-26T05:59:22
112
6
--- tags: - mteb model-index: - name: mlm results: - task: type: Classification dataset: name: MTEB AmazonCounterfactualClassification (en) type: mteb/amazon_counterfactual config: en split: test revision: e8379541af4e31359cca9fbcf4b00f2671dba205 metrics: - type: accuracy value: 82.97014925373135 - type: ap value: 49.6288385893607 - type: f1 value: 77.58957447993662 - task: type: Classification dataset: name: MTEB AmazonPolarityClassification type: mteb/amazon_polarity config: default split: test revision: e2d317d38cd51312af73b3d32a06d1a08b442046 metrics: - type: accuracy value: 90.975425 - type: ap value: 87.57349835900825 - type: f1 value: 90.96732416386632 - task: type: Classification dataset: name: MTEB AmazonReviewsClassification (en) type: mteb/amazon_reviews_multi config: en split: test revision: 1399c76144fd37290681b995c656ef9b2e06e26d metrics: - type: accuracy value: 48.708 - type: f1 value: 47.736228936979586 - task: type: Retrieval dataset: name: MTEB ArguAna type: arguana config: default split: test revision: None metrics: - type: map_at_1 value: 32.006 - type: map_at_10 value: 49.268 - type: map_at_100 value: 49.903999999999996 - type: map_at_1000 value: 49.909 - type: map_at_3 value: 44.334 - type: map_at_5 value: 47.374 - type: mrr_at_1 value: 32.788000000000004 - type: mrr_at_10 value: 49.707 - type: mrr_at_100 value: 50.346999999999994 - type: mrr_at_1000 value: 50.352 - type: mrr_at_3 value: 44.95 - type: mrr_at_5 value: 47.766999999999996 - type: ndcg_at_1 value: 32.006 - type: ndcg_at_10 value: 58.523 - type: ndcg_at_100 value: 61.095 - type: ndcg_at_1000 value: 61.190999999999995 - type: ndcg_at_3 value: 48.431000000000004 - type: ndcg_at_5 value: 53.94 - type: precision_at_1 value: 32.006 - type: precision_at_10 value: 8.791 - type: precision_at_100 value: 0.989 - type: precision_at_1000 value: 0.1 - type: precision_at_3 value: 20.104 - type: precision_at_5 value: 14.751 - type: recall_at_1 value: 32.006 - type: recall_at_10 value: 87.909 - type: recall_at_100 value: 98.86200000000001 - type: recall_at_1000 value: 99.57300000000001 - type: recall_at_3 value: 60.313 - type: recall_at_5 value: 73.75500000000001 - task: type: Clustering dataset: name: MTEB ArxivClusteringP2P type: mteb/arxiv-clustering-p2p config: default split: test revision: a122ad7f3f0291bf49cc6f4d32aa80929df69d5d metrics: - type: v_measure value: 47.01500173547629 - task: type: Clustering dataset: name: MTEB ArxivClusteringS2S type: mteb/arxiv-clustering-s2s config: default split: test revision: f910caf1a6075f7329cdf8c1a6135696f37dbd53 metrics: - type: v_measure value: 43.52209238193538 - task: type: Reranking dataset: name: MTEB AskUbuntuDupQuestions type: mteb/askubuntudupquestions-reranking config: default split: test revision: 2000358ca161889fa9c082cb41daa8dcfb161a54 metrics: - type: map value: 64.1348784470504 - type: mrr value: 76.93762916062083 - task: type: STS dataset: name: MTEB BIOSSES type: mteb/biosses-sts config: default split: test revision: d3fb88f8f02e40887cd149695127462bbcf29b4a metrics: - type: cos_sim_pearson value: 87.8322696692348 - type: cos_sim_spearman value: 86.53751398463592 - type: euclidean_pearson value: 86.1435544054336 - type: euclidean_spearman value: 86.70799979698164 - type: manhattan_pearson value: 86.1206703865016 - type: manhattan_spearman value: 86.47004256773585 - task: type: Classification dataset: name: MTEB Banking77Classification type: mteb/banking77 config: default split: test revision: 0fd18e25b25c072e09e0d92ab615fda904d66300 metrics: - type: accuracy value: 88.1461038961039 - type: f1 value: 88.09877611214092 - task: type: Clustering dataset: name: MTEB BiorxivClusteringP2P type: mteb/biorxiv-clustering-p2p config: default split: test revision: 65b79d1d13f80053f67aca9498d9402c2d9f1f40 metrics: - type: v_measure value: 35.53021718892608 - task: type: Clustering dataset: name: MTEB BiorxivClusteringS2S type: mteb/biorxiv-clustering-s2s config: default split: test revision: 258694dd0231531bc1fd9de6ceb52a0853c6d908 metrics: - type: v_measure value: 35.34236915611622 - task: type: Retrieval dataset: name: MTEB CQADupstackAndroidRetrieval type: BeIR/cqadupstack config: default split: test revision: None metrics: - type: map_at_1 value: 36.435 - type: map_at_10 value: 49.437999999999995 - type: map_at_100 value: 51.105999999999995 - type: map_at_1000 value: 51.217999999999996 - type: map_at_3 value: 44.856 - type: map_at_5 value: 47.195 - type: mrr_at_1 value: 45.78 - type: mrr_at_10 value: 56.302 - type: mrr_at_100 value: 56.974000000000004 - type: mrr_at_1000 value: 57.001999999999995 - type: mrr_at_3 value: 53.6 - type: mrr_at_5 value: 55.059999999999995 - type: ndcg_at_1 value: 44.921 - type: ndcg_at_10 value: 56.842000000000006 - type: ndcg_at_100 value: 61.586 - type: ndcg_at_1000 value: 63.039 - type: ndcg_at_3 value: 50.612 - type: ndcg_at_5 value: 53.181 - type: precision_at_1 value: 44.921 - type: precision_at_10 value: 11.245 - type: precision_at_100 value: 1.7069999999999999 - type: precision_at_1000 value: 0.216 - type: precision_at_3 value: 24.224999999999998 - type: precision_at_5 value: 17.511 - type: recall_at_1 value: 36.435 - type: recall_at_10 value: 70.998 - type: recall_at_100 value: 89.64 - type: recall_at_1000 value: 98.654 - type: recall_at_3 value: 53.034000000000006 - type: recall_at_5 value: 60.41 - type: map_at_1 value: 33.371 - type: map_at_10 value: 45.301 - type: map_at_100 value: 46.663 - type: map_at_1000 value: 46.791 - type: map_at_3 value: 41.79 - type: map_at_5 value: 43.836999999999996 - type: mrr_at_1 value: 42.611 - type: mrr_at_10 value: 51.70400000000001 - type: mrr_at_100 value: 52.342 - type: mrr_at_1000 value: 52.38 - type: mrr_at_3 value: 49.374 - type: mrr_at_5 value: 50.82 - type: ndcg_at_1 value: 42.166 - type: ndcg_at_10 value: 51.49 - type: ndcg_at_100 value: 56.005 - type: ndcg_at_1000 value: 57.748 - type: ndcg_at_3 value: 46.769 - type: ndcg_at_5 value: 49.155 - type: precision_at_1 value: 42.166 - type: precision_at_10 value: 9.841 - type: precision_at_100 value: 1.569 - type: precision_at_1000 value: 0.202 - type: precision_at_3 value: 22.803 - type: precision_at_5 value: 16.229 - type: recall_at_1 value: 33.371 - type: recall_at_10 value: 62.52799999999999 - type: recall_at_100 value: 81.269 - type: recall_at_1000 value: 91.824 - type: recall_at_3 value: 48.759 - type: recall_at_5 value: 55.519 - type: map_at_1 value: 41.421 - type: map_at_10 value: 55.985 - type: map_at_100 value: 56.989999999999995 - type: map_at_1000 value: 57.028 - type: map_at_3 value: 52.271 - type: map_at_5 value: 54.517 - type: mrr_at_1 value: 47.272999999999996 - type: mrr_at_10 value: 59.266 - type: mrr_at_100 value: 59.821999999999996 - type: mrr_at_1000 value: 59.839 - type: mrr_at_3 value: 56.677 - type: mrr_at_5 value: 58.309999999999995 - type: ndcg_at_1 value: 47.147 - type: ndcg_at_10 value: 62.596 - type: ndcg_at_100 value: 66.219 - type: ndcg_at_1000 value: 66.886 - type: ndcg_at_3 value: 56.558 - type: ndcg_at_5 value: 59.805 - type: precision_at_1 value: 47.147 - type: precision_at_10 value: 10.245 - type: precision_at_100 value: 1.302 - type: precision_at_1000 value: 0.13899999999999998 - type: precision_at_3 value: 25.663999999999998 - type: precision_at_5 value: 17.793 - type: recall_at_1 value: 41.421 - type: recall_at_10 value: 78.77499999999999 - type: recall_at_100 value: 93.996 - type: recall_at_1000 value: 98.60600000000001 - type: recall_at_3 value: 62.891 - type: recall_at_5 value: 70.819 - type: map_at_1 value: 27.517999999999997 - type: map_at_10 value: 37.468 - type: map_at_100 value: 38.667 - type: map_at_1000 value: 38.743 - type: map_at_3 value: 34.524 - type: map_at_5 value: 36.175000000000004 - type: mrr_at_1 value: 29.378999999999998 - type: mrr_at_10 value: 39.54 - type: mrr_at_100 value: 40.469 - type: mrr_at_1000 value: 40.522000000000006 - type: mrr_at_3 value: 36.685 - type: mrr_at_5 value: 38.324000000000005 - type: ndcg_at_1 value: 29.718 - type: ndcg_at_10 value: 43.091 - type: ndcg_at_100 value: 48.44 - type: ndcg_at_1000 value: 50.181 - type: ndcg_at_3 value: 37.34 - type: ndcg_at_5 value: 40.177 - type: precision_at_1 value: 29.718 - type: precision_at_10 value: 6.723 - type: precision_at_100 value: 0.992 - type: precision_at_1000 value: 0.117 - type: precision_at_3 value: 16.083 - type: precision_at_5 value: 11.322000000000001 - type: recall_at_1 value: 27.517999999999997 - type: recall_at_10 value: 58.196999999999996 - type: recall_at_100 value: 82.07799999999999 - type: recall_at_1000 value: 94.935 - type: recall_at_3 value: 42.842 - type: recall_at_5 value: 49.58 - type: map_at_1 value: 19.621 - type: map_at_10 value: 30.175 - type: map_at_100 value: 31.496000000000002 - type: map_at_1000 value: 31.602000000000004 - type: map_at_3 value: 26.753 - type: map_at_5 value: 28.857 - type: mrr_at_1 value: 25.497999999999998 - type: mrr_at_10 value: 35.44 - type: mrr_at_100 value: 36.353 - type: mrr_at_1000 value: 36.412 - type: mrr_at_3 value: 32.275999999999996 - type: mrr_at_5 value: 34.434 - type: ndcg_at_1 value: 24.502 - type: ndcg_at_10 value: 36.423 - type: ndcg_at_100 value: 42.289 - type: ndcg_at_1000 value: 44.59 - type: ndcg_at_3 value: 30.477999999999998 - type: ndcg_at_5 value: 33.787 - type: precision_at_1 value: 24.502 - type: precision_at_10 value: 6.978 - type: precision_at_100 value: 1.139 - type: precision_at_1000 value: 0.145 - type: precision_at_3 value: 15.008 - type: precision_at_5 value: 11.468 - type: recall_at_1 value: 19.621 - type: recall_at_10 value: 50.516000000000005 - type: recall_at_100 value: 75.721 - type: recall_at_1000 value: 91.77199999999999 - type: recall_at_3 value: 34.695 - type: recall_at_5 value: 42.849 - type: map_at_1 value: 33.525 - type: map_at_10 value: 46.153 - type: map_at_100 value: 47.61 - type: map_at_1000 value: 47.715 - type: map_at_3 value: 42.397 - type: map_at_5 value: 44.487 - type: mrr_at_1 value: 42.445 - type: mrr_at_10 value: 52.174 - type: mrr_at_100 value: 52.986999999999995 - type: mrr_at_1000 value: 53.016 - type: mrr_at_3 value: 49.647000000000006 - type: mrr_at_5 value: 51.215999999999994 - type: ndcg_at_1 value: 42.156 - type: ndcg_at_10 value: 52.698 - type: ndcg_at_100 value: 58.167 - type: ndcg_at_1000 value: 59.71300000000001 - type: ndcg_at_3 value: 47.191 - type: ndcg_at_5 value: 49.745 - type: precision_at_1 value: 42.156 - type: precision_at_10 value: 9.682 - type: precision_at_100 value: 1.469 - type: precision_at_1000 value: 0.17700000000000002 - type: precision_at_3 value: 22.682 - type: precision_at_5 value: 16.035 - type: recall_at_1 value: 33.525 - type: recall_at_10 value: 66.142 - type: recall_at_100 value: 88.248 - type: recall_at_1000 value: 97.806 - type: recall_at_3 value: 50.541000000000004 - type: recall_at_5 value: 57.275 - type: map_at_1 value: 28.249000000000002 - type: map_at_10 value: 41.659 - type: map_at_100 value: 43.001 - type: map_at_1000 value: 43.094 - type: map_at_3 value: 37.607 - type: map_at_5 value: 39.662 - type: mrr_at_1 value: 36.301 - type: mrr_at_10 value: 47.482 - type: mrr_at_100 value: 48.251 - type: mrr_at_1000 value: 48.288 - type: mrr_at_3 value: 44.444 - type: mrr_at_5 value: 46.013999999999996 - type: ndcg_at_1 value: 35.616 - type: ndcg_at_10 value: 49.021 - type: ndcg_at_100 value: 54.362 - type: ndcg_at_1000 value: 55.864999999999995 - type: ndcg_at_3 value: 42.515 - type: ndcg_at_5 value: 45.053 - type: precision_at_1 value: 35.616 - type: precision_at_10 value: 9.372 - type: precision_at_100 value: 1.4120000000000001 - type: precision_at_1000 value: 0.172 - type: precision_at_3 value: 21.043 - type: precision_at_5 value: 14.84 - type: recall_at_1 value: 28.249000000000002 - type: recall_at_10 value: 65.514 - type: recall_at_100 value: 87.613 - type: recall_at_1000 value: 97.03 - type: recall_at_3 value: 47.21 - type: recall_at_5 value: 54.077 - type: map_at_1 value: 29.164583333333333 - type: map_at_10 value: 40.632000000000005 - type: map_at_100 value: 41.96875 - type: map_at_1000 value: 42.07508333333333 - type: map_at_3 value: 37.18458333333333 - type: map_at_5 value: 39.13700000000001 - type: mrr_at_1 value: 35.2035 - type: mrr_at_10 value: 45.28816666666666 - type: mrr_at_100 value: 46.11466666666667 - type: mrr_at_1000 value: 46.15741666666667 - type: mrr_at_3 value: 42.62925 - type: mrr_at_5 value: 44.18141666666667 - type: ndcg_at_1 value: 34.88958333333333 - type: ndcg_at_10 value: 46.90650000000001 - type: ndcg_at_100 value: 52.135333333333335 - type: ndcg_at_1000 value: 53.89766666666668 - type: ndcg_at_3 value: 41.32075 - type: ndcg_at_5 value: 44.02083333333333 - type: precision_at_1 value: 34.88958333333333 - type: precision_at_10 value: 8.392833333333332 - type: precision_at_100 value: 1.3085833333333334 - type: precision_at_1000 value: 0.16458333333333333 - type: precision_at_3 value: 19.361166666666666 - type: precision_at_5 value: 13.808416666666668 - type: recall_at_1 value: 29.164583333333333 - type: recall_at_10 value: 60.874666666666656 - type: recall_at_100 value: 83.21008333333334 - type: recall_at_1000 value: 95.09275000000001 - type: recall_at_3 value: 45.37591666666667 - type: recall_at_5 value: 52.367666666666665 - type: map_at_1 value: 28.682000000000002 - type: map_at_10 value: 37.913000000000004 - type: map_at_100 value: 39.037 - type: map_at_1000 value: 39.123999999999995 - type: map_at_3 value: 35.398 - type: map_at_5 value: 36.906 - type: mrr_at_1 value: 32.362 - type: mrr_at_10 value: 40.92 - type: mrr_at_100 value: 41.748000000000005 - type: mrr_at_1000 value: 41.81 - type: mrr_at_3 value: 38.701 - type: mrr_at_5 value: 39.936 - type: ndcg_at_1 value: 32.208999999999996 - type: ndcg_at_10 value: 42.84 - type: ndcg_at_100 value: 47.927 - type: ndcg_at_1000 value: 50.048 - type: ndcg_at_3 value: 38.376 - type: ndcg_at_5 value: 40.661 - type: precision_at_1 value: 32.208999999999996 - type: precision_at_10 value: 6.718 - type: precision_at_100 value: 1.012 - type: precision_at_1000 value: 0.127 - type: precision_at_3 value: 16.667 - type: precision_at_5 value: 11.503 - type: recall_at_1 value: 28.682000000000002 - type: recall_at_10 value: 54.872 - type: recall_at_100 value: 77.42999999999999 - type: recall_at_1000 value: 93.054 - type: recall_at_3 value: 42.577999999999996 - type: recall_at_5 value: 48.363 - type: map_at_1 value: 19.698 - type: map_at_10 value: 28.777 - type: map_at_100 value: 30.091 - type: map_at_1000 value: 30.209999999999997 - type: map_at_3 value: 25.874000000000002 - type: map_at_5 value: 27.438000000000002 - type: mrr_at_1 value: 24.295 - type: mrr_at_10 value: 33.077 - type: mrr_at_100 value: 34.036 - type: mrr_at_1000 value: 34.1 - type: mrr_at_3 value: 30.523 - type: mrr_at_5 value: 31.891000000000002 - type: ndcg_at_1 value: 24.535 - type: ndcg_at_10 value: 34.393 - type: ndcg_at_100 value: 40.213 - type: ndcg_at_1000 value: 42.748000000000005 - type: ndcg_at_3 value: 29.316 - type: ndcg_at_5 value: 31.588 - type: precision_at_1 value: 24.535 - type: precision_at_10 value: 6.483 - type: precision_at_100 value: 1.102 - type: precision_at_1000 value: 0.151 - type: precision_at_3 value: 14.201 - type: precision_at_5 value: 10.344000000000001 - type: recall_at_1 value: 19.698 - type: recall_at_10 value: 46.903 - type: recall_at_100 value: 72.624 - type: recall_at_1000 value: 90.339 - type: recall_at_3 value: 32.482 - type: recall_at_5 value: 38.452 - type: map_at_1 value: 30.56 - type: map_at_10 value: 41.993 - type: map_at_100 value: 43.317 - type: map_at_1000 value: 43.399 - type: map_at_3 value: 38.415 - type: map_at_5 value: 40.472 - type: mrr_at_1 value: 36.474000000000004 - type: mrr_at_10 value: 46.562 - type: mrr_at_100 value: 47.497 - type: mrr_at_1000 value: 47.532999999999994 - type: mrr_at_3 value: 43.905 - type: mrr_at_5 value: 45.379000000000005 - type: ndcg_at_1 value: 36.287000000000006 - type: ndcg_at_10 value: 48.262 - type: ndcg_at_100 value: 53.789 - type: ndcg_at_1000 value: 55.44 - type: ndcg_at_3 value: 42.358000000000004 - type: ndcg_at_5 value: 45.221000000000004 - type: precision_at_1 value: 36.287000000000006 - type: precision_at_10 value: 8.265 - type: precision_at_100 value: 1.24 - type: precision_at_1000 value: 0.148 - type: precision_at_3 value: 19.558 - type: precision_at_5 value: 13.880999999999998 - type: recall_at_1 value: 30.56 - type: recall_at_10 value: 62.891 - type: recall_at_100 value: 85.964 - type: recall_at_1000 value: 97.087 - type: recall_at_3 value: 46.755 - type: recall_at_5 value: 53.986000000000004 - type: map_at_1 value: 29.432000000000002 - type: map_at_10 value: 40.898 - type: map_at_100 value: 42.794 - type: map_at_1000 value: 43.029 - type: map_at_3 value: 37.658 - type: map_at_5 value: 39.519 - type: mrr_at_1 value: 36.364000000000004 - type: mrr_at_10 value: 46.9 - type: mrr_at_100 value: 47.819 - type: mrr_at_1000 value: 47.848 - type: mrr_at_3 value: 44.202999999999996 - type: mrr_at_5 value: 45.715 - type: ndcg_at_1 value: 35.573 - type: ndcg_at_10 value: 47.628 - type: ndcg_at_100 value: 53.88699999999999 - type: ndcg_at_1000 value: 55.584 - type: ndcg_at_3 value: 42.669000000000004 - type: ndcg_at_5 value: 45.036 - type: precision_at_1 value: 35.573 - type: precision_at_10 value: 8.933 - type: precision_at_100 value: 1.8159999999999998 - type: precision_at_1000 value: 0.256 - type: precision_at_3 value: 20.29 - type: precision_at_5 value: 14.387 - type: recall_at_1 value: 29.432000000000002 - type: recall_at_10 value: 60.388 - type: recall_at_100 value: 87.144 - type: recall_at_1000 value: 97.154 - type: recall_at_3 value: 45.675 - type: recall_at_5 value: 52.35300000000001 - type: map_at_1 value: 21.462999999999997 - type: map_at_10 value: 31.824 - type: map_at_100 value: 32.853 - type: map_at_1000 value: 32.948 - type: map_at_3 value: 28.671999999999997 - type: map_at_5 value: 30.579 - type: mrr_at_1 value: 23.66 - type: mrr_at_10 value: 34.091 - type: mrr_at_100 value: 35.077999999999996 - type: mrr_at_1000 value: 35.138999999999996 - type: mrr_at_3 value: 31.516 - type: mrr_at_5 value: 33.078 - type: ndcg_at_1 value: 23.845 - type: ndcg_at_10 value: 37.594 - type: ndcg_at_100 value: 42.74 - type: ndcg_at_1000 value: 44.93 - type: ndcg_at_3 value: 31.667 - type: ndcg_at_5 value: 34.841 - type: precision_at_1 value: 23.845 - type: precision_at_10 value: 6.229 - type: precision_at_100 value: 0.943 - type: precision_at_1000 value: 0.125 - type: precision_at_3 value: 14.11 - type: precision_at_5 value: 10.388 - type: recall_at_1 value: 21.462999999999997 - type: recall_at_10 value: 52.772 - type: recall_at_100 value: 76.794 - type: recall_at_1000 value: 92.852 - type: recall_at_3 value: 37.049 - type: recall_at_5 value: 44.729 - task: type: Retrieval dataset: name: MTEB ClimateFEVER type: climate-fever config: default split: test revision: None metrics: - type: map_at_1 value: 15.466 - type: map_at_10 value: 25.275 - type: map_at_100 value: 27.176000000000002 - type: map_at_1000 value: 27.374 - type: map_at_3 value: 21.438 - type: map_at_5 value: 23.366 - type: mrr_at_1 value: 35.699999999999996 - type: mrr_at_10 value: 47.238 - type: mrr_at_100 value: 47.99 - type: mrr_at_1000 value: 48.021 - type: mrr_at_3 value: 44.463 - type: mrr_at_5 value: 46.039 - type: ndcg_at_1 value: 35.244 - type: ndcg_at_10 value: 34.559 - type: ndcg_at_100 value: 41.74 - type: ndcg_at_1000 value: 45.105000000000004 - type: ndcg_at_3 value: 29.284 - type: ndcg_at_5 value: 30.903999999999996 - type: precision_at_1 value: 35.244 - type: precision_at_10 value: 10.463000000000001 - type: precision_at_100 value: 1.8259999999999998 - type: precision_at_1000 value: 0.246 - type: precision_at_3 value: 21.65 - type: precision_at_5 value: 16.078 - type: recall_at_1 value: 15.466 - type: recall_at_10 value: 39.782000000000004 - type: recall_at_100 value: 64.622 - type: recall_at_1000 value: 83.233 - type: recall_at_3 value: 26.398 - type: recall_at_5 value: 31.676 - task: type: Retrieval dataset: name: MTEB DBPedia type: dbpedia-entity config: default split: test revision: None metrics: - type: map_at_1 value: 9.414 - type: map_at_10 value: 22.435 - type: map_at_100 value: 32.393 - type: map_at_1000 value: 34.454 - type: map_at_3 value: 15.346000000000002 - type: map_at_5 value: 18.282999999999998 - type: mrr_at_1 value: 71.5 - type: mrr_at_10 value: 78.795 - type: mrr_at_100 value: 79.046 - type: mrr_at_1000 value: 79.054 - type: mrr_at_3 value: 77.333 - type: mrr_at_5 value: 78.146 - type: ndcg_at_1 value: 60.75000000000001 - type: ndcg_at_10 value: 46.829 - type: ndcg_at_100 value: 52.370000000000005 - type: ndcg_at_1000 value: 59.943999999999996 - type: ndcg_at_3 value: 51.33 - type: ndcg_at_5 value: 48.814 - type: precision_at_1 value: 71.75 - type: precision_at_10 value: 37.525 - type: precision_at_100 value: 12.075 - type: precision_at_1000 value: 2.464 - type: precision_at_3 value: 54.75 - type: precision_at_5 value: 47.55 - type: recall_at_1 value: 9.414 - type: recall_at_10 value: 28.67 - type: recall_at_100 value: 59.924 - type: recall_at_1000 value: 83.921 - type: recall_at_3 value: 16.985 - type: recall_at_5 value: 21.372 - task: type: Classification dataset: name: MTEB EmotionClassification type: mteb/emotion config: default split: test revision: 4f58c6b202a23cf9a4da393831edf4f9183cad37 metrics: - type: accuracy value: 52.18000000000001 - type: f1 value: 47.04613218997081 - task: type: Retrieval dataset: name: MTEB FEVER type: fever config: default split: test revision: None metrics: - type: map_at_1 value: 82.57900000000001 - type: map_at_10 value: 88.465 - type: map_at_100 value: 88.649 - type: map_at_1000 value: 88.661 - type: map_at_3 value: 87.709 - type: map_at_5 value: 88.191 - type: mrr_at_1 value: 88.899 - type: mrr_at_10 value: 93.35900000000001 - type: mrr_at_100 value: 93.38499999999999 - type: mrr_at_1000 value: 93.38499999999999 - type: mrr_at_3 value: 93.012 - type: mrr_at_5 value: 93.282 - type: ndcg_at_1 value: 88.98899999999999 - type: ndcg_at_10 value: 91.22 - type: ndcg_at_100 value: 91.806 - type: ndcg_at_1000 value: 92.013 - type: ndcg_at_3 value: 90.236 - type: ndcg_at_5 value: 90.798 - type: precision_at_1 value: 88.98899999999999 - type: precision_at_10 value: 10.537 - type: precision_at_100 value: 1.106 - type: precision_at_1000 value: 0.11399999999999999 - type: precision_at_3 value: 33.598 - type: precision_at_5 value: 20.618 - type: recall_at_1 value: 82.57900000000001 - type: recall_at_10 value: 94.95400000000001 - type: recall_at_100 value: 97.14 - type: recall_at_1000 value: 98.407 - type: recall_at_3 value: 92.203 - type: recall_at_5 value: 93.747 - task: type: Retrieval dataset: name: MTEB FiQA2018 type: fiqa config: default split: test revision: None metrics: - type: map_at_1 value: 27.871000000000002 - type: map_at_10 value: 46.131 - type: map_at_100 value: 48.245 - type: map_at_1000 value: 48.361 - type: map_at_3 value: 40.03 - type: map_at_5 value: 43.634 - type: mrr_at_1 value: 52.932 - type: mrr_at_10 value: 61.61299999999999 - type: mrr_at_100 value: 62.205 - type: mrr_at_1000 value: 62.224999999999994 - type: mrr_at_3 value: 59.388 - type: mrr_at_5 value: 60.760999999999996 - type: ndcg_at_1 value: 53.395 - type: ndcg_at_10 value: 54.506 - type: ndcg_at_100 value: 61.151999999999994 - type: ndcg_at_1000 value: 62.882000000000005 - type: ndcg_at_3 value: 49.903999999999996 - type: ndcg_at_5 value: 51.599 - type: precision_at_1 value: 53.395 - type: precision_at_10 value: 15.247 - type: precision_at_100 value: 2.221 - type: precision_at_1000 value: 0.255 - type: precision_at_3 value: 33.539 - type: precision_at_5 value: 24.722 - type: recall_at_1 value: 27.871000000000002 - type: recall_at_10 value: 62.074 - type: recall_at_100 value: 86.531 - type: recall_at_1000 value: 96.574 - type: recall_at_3 value: 45.003 - type: recall_at_5 value: 53.00899999999999 - task: type: Retrieval dataset: name: MTEB HotpotQA type: hotpotqa config: default split: test revision: None metrics: - type: map_at_1 value: 40.513 - type: map_at_10 value: 69.066 - type: map_at_100 value: 69.903 - type: map_at_1000 value: 69.949 - type: map_at_3 value: 65.44200000000001 - type: map_at_5 value: 67.784 - type: mrr_at_1 value: 80.891 - type: mrr_at_10 value: 86.42699999999999 - type: mrr_at_100 value: 86.577 - type: mrr_at_1000 value: 86.58200000000001 - type: mrr_at_3 value: 85.6 - type: mrr_at_5 value: 86.114 - type: ndcg_at_1 value: 81.026 - type: ndcg_at_10 value: 76.412 - type: ndcg_at_100 value: 79.16 - type: ndcg_at_1000 value: 79.989 - type: ndcg_at_3 value: 71.45 - type: ndcg_at_5 value: 74.286 - type: precision_at_1 value: 81.026 - type: precision_at_10 value: 16.198999999999998 - type: precision_at_100 value: 1.831 - type: precision_at_1000 value: 0.194 - type: precision_at_3 value: 46.721000000000004 - type: precision_at_5 value: 30.266 - type: recall_at_1 value: 40.513 - type: recall_at_10 value: 80.99300000000001 - type: recall_at_100 value: 91.526 - type: recall_at_1000 value: 96.935 - type: recall_at_3 value: 70.081 - type: recall_at_5 value: 75.665 - task: type: Classification dataset: name: MTEB ImdbClassification type: mteb/imdb config: default split: test revision: 3d86128a09e091d6018b6d26cad27f2739fc2db7 metrics: - type: accuracy value: 87.42320000000001 - type: ap value: 83.59975323233843 - type: f1 value: 87.38669942597816 - task: type: Retrieval dataset: name: MTEB MSMARCO type: msmarco config: default split: dev revision: None metrics: - type: map_at_1 value: 22.676 - type: map_at_10 value: 35.865 - type: map_at_100 value: 37.019000000000005 - type: map_at_1000 value: 37.062 - type: map_at_3 value: 31.629 - type: map_at_5 value: 34.050999999999995 - type: mrr_at_1 value: 23.023 - type: mrr_at_10 value: 36.138999999999996 - type: mrr_at_100 value: 37.242 - type: mrr_at_1000 value: 37.28 - type: mrr_at_3 value: 32.053 - type: mrr_at_5 value: 34.383 - type: ndcg_at_1 value: 23.308999999999997 - type: ndcg_at_10 value: 43.254 - type: ndcg_at_100 value: 48.763 - type: ndcg_at_1000 value: 49.788 - type: ndcg_at_3 value: 34.688 - type: ndcg_at_5 value: 38.973 - type: precision_at_1 value: 23.308999999999997 - type: precision_at_10 value: 6.909999999999999 - type: precision_at_100 value: 0.967 - type: precision_at_1000 value: 0.106 - type: precision_at_3 value: 14.818999999999999 - type: precision_at_5 value: 11.072 - type: recall_at_1 value: 22.676 - type: recall_at_10 value: 66.077 - type: recall_at_100 value: 91.4 - type: recall_at_1000 value: 99.143 - type: recall_at_3 value: 42.845 - type: recall_at_5 value: 53.08500000000001 - task: type: Classification dataset: name: MTEB MTOPDomainClassification (en) type: mteb/mtop_domain config: en split: test revision: d80d48c1eb48d3562165c59d59d0034df9fff0bf metrics: - type: accuracy value: 96.16279069767444 - type: f1 value: 96.02183835878418 - task: type: Classification dataset: name: MTEB MTOPIntentClassification (en) type: mteb/mtop_intent config: en split: test revision: ae001d0e6b1228650b7bd1c2c65fb50ad11a8aba metrics: - type: accuracy value: 85.74783401732788 - type: f1 value: 70.59661579230463 - task: type: Classification dataset: name: MTEB MassiveIntentClassification (en) type: mteb/amazon_massive_intent config: en split: test revision: 31efe3c427b0bae9c22cbb560b8f15491cc6bed7 metrics: - type: accuracy value: 79.67047747141895 - type: f1 value: 77.06311183471965 - task: type: Classification dataset: name: MTEB MassiveScenarioClassification (en) type: mteb/amazon_massive_scenario config: en split: test revision: 7d571f92784cd94a019292a1f45445077d0ef634 metrics: - type: accuracy value: 82.82447881640887 - type: f1 value: 82.37598020010746 - task: type: Clustering dataset: name: MTEB MedrxivClusteringP2P type: mteb/medrxiv-clustering-p2p config: default split: test revision: e7a26af6f3ae46b30dde8737f02c07b1505bcc73 metrics: - type: v_measure value: 30.266131881264467 - task: type: Clustering dataset: name: MTEB MedrxivClusteringS2S type: mteb/medrxiv-clustering-s2s config: default split: test revision: 35191c8c0dca72d8ff3efcd72aa802307d469663 metrics: - type: v_measure value: 29.673653452453998 - task: type: Reranking dataset: name: MTEB MindSmallReranking type: mteb/mind_small config: default split: test revision: 3bdac13927fdc888b903db93b2ffdbd90b295a69 metrics: - type: map value: 32.91846122902102 - type: mrr value: 34.2557300204471 - task: type: Retrieval dataset: name: MTEB NFCorpus type: nfcorpus config: default split: test revision: None metrics: - type: map_at_1 value: 6.762 - type: map_at_10 value: 15.134 - type: map_at_100 value: 19.341 - type: map_at_1000 value: 20.961 - type: map_at_3 value: 10.735999999999999 - type: map_at_5 value: 12.751999999999999 - type: mrr_at_1 value: 52.941 - type: mrr_at_10 value: 60.766 - type: mrr_at_100 value: 61.196 - type: mrr_at_1000 value: 61.227 - type: mrr_at_3 value: 58.720000000000006 - type: mrr_at_5 value: 59.866 - type: ndcg_at_1 value: 50.929 - type: ndcg_at_10 value: 39.554 - type: ndcg_at_100 value: 36.307 - type: ndcg_at_1000 value: 44.743 - type: ndcg_at_3 value: 44.157000000000004 - type: ndcg_at_5 value: 42.142 - type: precision_at_1 value: 52.322 - type: precision_at_10 value: 29.412 - type: precision_at_100 value: 9.365 - type: precision_at_1000 value: 2.2159999999999997 - type: precision_at_3 value: 40.557 - type: precision_at_5 value: 35.913000000000004 - type: recall_at_1 value: 6.762 - type: recall_at_10 value: 19.689999999999998 - type: recall_at_100 value: 36.687 - type: recall_at_1000 value: 67.23 - type: recall_at_3 value: 11.773 - type: recall_at_5 value: 15.18 - task: type: Retrieval dataset: name: MTEB NQ type: nq config: default split: test revision: None metrics: - type: map_at_1 value: 36.612 - type: map_at_10 value: 54.208 - type: map_at_100 value: 55.056000000000004 - type: map_at_1000 value: 55.069 - type: map_at_3 value: 49.45 - type: map_at_5 value: 52.556000000000004 - type: mrr_at_1 value: 41.976 - type: mrr_at_10 value: 56.972 - type: mrr_at_100 value: 57.534 - type: mrr_at_1000 value: 57.542 - type: mrr_at_3 value: 53.312000000000005 - type: mrr_at_5 value: 55.672999999999995 - type: ndcg_at_1 value: 41.338 - type: ndcg_at_10 value: 62.309000000000005 - type: ndcg_at_100 value: 65.557 - type: ndcg_at_1000 value: 65.809 - type: ndcg_at_3 value: 53.74100000000001 - type: ndcg_at_5 value: 58.772999999999996 - type: precision_at_1 value: 41.338 - type: precision_at_10 value: 10.107 - type: precision_at_100 value: 1.1900000000000002 - type: precision_at_1000 value: 0.121 - type: precision_at_3 value: 24.488 - type: precision_at_5 value: 17.596 - type: recall_at_1 value: 36.612 - type: recall_at_10 value: 84.408 - type: recall_at_100 value: 97.929 - type: recall_at_1000 value: 99.725 - type: recall_at_3 value: 62.676 - type: recall_at_5 value: 74.24199999999999 - task: type: Retrieval dataset: name: MTEB QuoraRetrieval type: quora config: default split: test revision: None metrics: - type: map_at_1 value: 71.573 - type: map_at_10 value: 85.81 - type: map_at_100 value: 86.434 - type: map_at_1000 value: 86.446 - type: map_at_3 value: 82.884 - type: map_at_5 value: 84.772 - type: mrr_at_1 value: 82.53 - type: mrr_at_10 value: 88.51299999999999 - type: mrr_at_100 value: 88.59700000000001 - type: mrr_at_1000 value: 88.598 - type: mrr_at_3 value: 87.595 - type: mrr_at_5 value: 88.266 - type: ndcg_at_1 value: 82.39999999999999 - type: ndcg_at_10 value: 89.337 - type: ndcg_at_100 value: 90.436 - type: ndcg_at_1000 value: 90.498 - type: ndcg_at_3 value: 86.676 - type: ndcg_at_5 value: 88.241 - type: precision_at_1 value: 82.39999999999999 - type: precision_at_10 value: 13.58 - type: precision_at_100 value: 1.543 - type: precision_at_1000 value: 0.157 - type: precision_at_3 value: 38.04 - type: precision_at_5 value: 25.044 - type: recall_at_1 value: 71.573 - type: recall_at_10 value: 96.066 - type: recall_at_100 value: 99.73100000000001 - type: recall_at_1000 value: 99.991 - type: recall_at_3 value: 88.34 - type: recall_at_5 value: 92.79899999999999 - task: type: Clustering dataset: name: MTEB RedditClustering type: mteb/reddit-clustering config: default split: test revision: 24640382cdbf8abc73003fb0fa6d111a705499eb metrics: - type: v_measure value: 61.767168063971724 - task: type: Clustering dataset: name: MTEB RedditClusteringP2P type: mteb/reddit-clustering-p2p config: default split: test revision: 282350215ef01743dc01b456c7f5241fa8937f16 metrics: - type: v_measure value: 66.00502775826037 - task: type: Retrieval dataset: name: MTEB SCIDOCS type: scidocs config: default split: test revision: None metrics: - type: map_at_1 value: 4.718 - type: map_at_10 value: 12.13 - type: map_at_100 value: 14.269000000000002 - type: map_at_1000 value: 14.578 - type: map_at_3 value: 8.605 - type: map_at_5 value: 10.483 - type: mrr_at_1 value: 23.7 - type: mrr_at_10 value: 34.354 - type: mrr_at_100 value: 35.522 - type: mrr_at_1000 value: 35.571999999999996 - type: mrr_at_3 value: 31.15 - type: mrr_at_5 value: 32.98 - type: ndcg_at_1 value: 23.3 - type: ndcg_at_10 value: 20.171 - type: ndcg_at_100 value: 28.456 - type: ndcg_at_1000 value: 33.826 - type: ndcg_at_3 value: 19.104 - type: ndcg_at_5 value: 16.977999999999998 - type: precision_at_1 value: 23.3 - type: precision_at_10 value: 10.45 - type: precision_at_100 value: 2.239 - type: precision_at_1000 value: 0.35300000000000004 - type: precision_at_3 value: 17.933 - type: precision_at_5 value: 15.1 - type: recall_at_1 value: 4.718 - type: recall_at_10 value: 21.221999999999998 - type: recall_at_100 value: 45.42 - type: recall_at_1000 value: 71.642 - type: recall_at_3 value: 10.922 - type: recall_at_5 value: 15.322 - task: type: STS dataset: name: MTEB SICK-R type: mteb/sickr-sts config: default split: test revision: a6ea5a8cab320b040a23452cc28066d9beae2cee metrics: - type: cos_sim_pearson value: 85.2065344862739 - type: cos_sim_spearman value: 83.2276569587515 - type: euclidean_pearson value: 83.42726762105312 - type: euclidean_spearman value: 83.31396596997742 - type: manhattan_pearson value: 83.41123401762816 - type: manhattan_spearman value: 83.34393052682026 - task: type: STS dataset: name: MTEB STS12 type: mteb/sts12-sts config: default split: test revision: a0d554a64d88156834ff5ae9920b964011b16384 metrics: - type: cos_sim_pearson value: 81.28253173719754 - type: cos_sim_spearman value: 76.12995701324436 - type: euclidean_pearson value: 75.30693691794121 - type: euclidean_spearman value: 75.12472789129536 - type: manhattan_pearson value: 75.35860808729171 - type: manhattan_spearman value: 75.30445827952794 - task: type: STS dataset: name: MTEB STS13 type: mteb/sts13-sts config: default split: test revision: 7e90230a92c190f1bf69ae9002b8cea547a64cca metrics: - type: cos_sim_pearson value: 82.09358031005694 - type: cos_sim_spearman value: 83.18811147636619 - type: euclidean_pearson value: 82.65513459991631 - type: euclidean_spearman value: 82.71085530442987 - type: manhattan_pearson value: 82.67700926821576 - type: manhattan_spearman value: 82.73815539380426 - task: type: STS dataset: name: MTEB STS14 type: mteb/sts14-sts config: default split: test revision: 6031580fec1f6af667f0bd2da0a551cf4f0b2375 metrics: - type: cos_sim_pearson value: 81.51365440223137 - type: cos_sim_spearman value: 80.59933905019179 - type: euclidean_pearson value: 80.56660025433806 - type: euclidean_spearman value: 80.27926539084027 - type: manhattan_pearson value: 80.64632724055481 - type: manhattan_spearman value: 80.43616365139444 - task: type: STS dataset: name: MTEB STS15 type: mteb/sts15-sts config: default split: test revision: ae752c7c21bf194d8b67fd573edf7ae58183cbe3 metrics: - type: cos_sim_pearson value: 86.8590461417506 - type: cos_sim_spearman value: 87.16337291721602 - type: euclidean_pearson value: 85.8847725068404 - type: euclidean_spearman value: 86.12602873624066 - type: manhattan_pearson value: 86.04095861363909 - type: manhattan_spearman value: 86.35535645007629 - task: type: STS dataset: name: MTEB STS16 type: mteb/sts16-sts config: default split: test revision: 4d8694f8f0e0100860b497b999b3dbed754a0513 metrics: - type: cos_sim_pearson value: 83.61371557181502 - type: cos_sim_spearman value: 85.16330754442785 - type: euclidean_pearson value: 84.20831431260608 - type: euclidean_spearman value: 84.33191523212125 - type: manhattan_pearson value: 84.34911007642411 - type: manhattan_spearman value: 84.49670164290394 - task: type: STS dataset: name: MTEB STS17 (en-en) type: mteb/sts17-crosslingual-sts config: en-en split: test revision: af5e6fb845001ecf41f4c1e033ce921939a2a68d metrics: - type: cos_sim_pearson value: 90.54452933158781 - type: cos_sim_spearman value: 90.88214621695892 - type: euclidean_pearson value: 91.38488015281216 - type: euclidean_spearman value: 91.01822259603908 - type: manhattan_pearson value: 91.36449776198687 - type: manhattan_spearman value: 90.90478717381717 - task: type: STS dataset: name: MTEB STS22 (en) type: mteb/sts22-crosslingual-sts config: en split: test revision: 6d1ba47164174a496b7fa5d3569dae26a6813b80 metrics: - type: cos_sim_pearson value: 68.00941643037453 - type: cos_sim_spearman value: 67.03588472081898 - type: euclidean_pearson value: 67.35224911601603 - type: euclidean_spearman value: 66.35544831459266 - type: manhattan_pearson value: 67.35080066508304 - type: manhattan_spearman value: 66.07893473733782 - task: type: STS dataset: name: MTEB STSBenchmark type: mteb/stsbenchmark-sts config: default split: test revision: b0fddb56ed78048fa8b90373c8a3cfc37b684831 metrics: - type: cos_sim_pearson value: 85.18291011086279 - type: cos_sim_spearman value: 85.66913777481429 - type: euclidean_pearson value: 84.81115930027242 - type: euclidean_spearman value: 85.07133983924173 - type: manhattan_pearson value: 84.88932120524983 - type: manhattan_spearman value: 85.176903109055 - task: type: Reranking dataset: name: MTEB SciDocsRR type: mteb/scidocs-reranking config: default split: test revision: d3c5e1fc0b855ab6097bf1cda04dd73947d7caab metrics: - type: map value: 83.67543572266588 - type: mrr value: 95.9468146232852 - task: type: Retrieval dataset: name: MTEB SciFact type: scifact config: default split: test revision: None metrics: - type: map_at_1 value: 59.633 - type: map_at_10 value: 69.801 - type: map_at_100 value: 70.504 - type: map_at_1000 value: 70.519 - type: map_at_3 value: 67.72500000000001 - type: map_at_5 value: 68.812 - type: mrr_at_1 value: 62.333000000000006 - type: mrr_at_10 value: 70.956 - type: mrr_at_100 value: 71.489 - type: mrr_at_1000 value: 71.504 - type: mrr_at_3 value: 69.44399999999999 - type: mrr_at_5 value: 70.244 - type: ndcg_at_1 value: 62.0 - type: ndcg_at_10 value: 73.98599999999999 - type: ndcg_at_100 value: 76.629 - type: ndcg_at_1000 value: 77.054 - type: ndcg_at_3 value: 70.513 - type: ndcg_at_5 value: 71.978 - type: precision_at_1 value: 62.0 - type: precision_at_10 value: 9.633 - type: precision_at_100 value: 1.097 - type: precision_at_1000 value: 0.11299999999999999 - type: precision_at_3 value: 27.556000000000004 - type: precision_at_5 value: 17.666999999999998 - type: recall_at_1 value: 59.633 - type: recall_at_10 value: 85.52199999999999 - type: recall_at_100 value: 96.667 - type: recall_at_1000 value: 100.0 - type: recall_at_3 value: 75.767 - type: recall_at_5 value: 79.76100000000001 - task: type: PairClassification dataset: name: MTEB SprintDuplicateQuestions type: mteb/sprintduplicatequestions-pairclassification config: default split: test revision: d66bd1f72af766a5cc4b0ca5e00c162f89e8cc46 metrics: - type: cos_sim_accuracy value: 99.77821782178218 - type: cos_sim_ap value: 94.58684455008866 - type: cos_sim_f1 value: 88.51282051282053 - type: cos_sim_precision value: 90.84210526315789 - type: cos_sim_recall value: 86.3 - type: dot_accuracy value: 99.77623762376237 - type: dot_ap value: 94.86277541733045 - type: dot_f1 value: 88.66897575457693 - type: dot_precision value: 87.75710088148874 - type: dot_recall value: 89.60000000000001 - type: euclidean_accuracy value: 99.76732673267327 - type: euclidean_ap value: 94.12114402691984 - type: euclidean_f1 value: 87.96804792810784 - type: euclidean_precision value: 87.83649052841476 - type: euclidean_recall value: 88.1 - type: manhattan_accuracy value: 99.77227722772277 - type: manhattan_ap value: 94.33665105240306 - type: manhattan_f1 value: 88.25587206396803 - type: manhattan_precision value: 88.21178821178822 - type: manhattan_recall value: 88.3 - type: max_accuracy value: 99.77821782178218 - type: max_ap value: 94.86277541733045 - type: max_f1 value: 88.66897575457693 - task: type: Clustering dataset: name: MTEB StackExchangeClustering type: mteb/stackexchange-clustering config: default split: test revision: 6cbc1f7b2bc0622f2e39d2c77fa502909748c259 metrics: - type: v_measure value: 72.03943478268592 - task: type: Clustering dataset: name: MTEB StackExchangeClusteringP2P type: mteb/stackexchange-clustering-p2p config: default split: test revision: 815ca46b2622cec33ccafc3735d572c266efdb44 metrics: - type: v_measure value: 35.285037897356496 - task: type: Reranking dataset: name: MTEB StackOverflowDupQuestions type: mteb/stackoverflowdupquestions-reranking config: default split: test revision: e185fbe320c72810689fc5848eb6114e1ef5ec69 metrics: - type: map value: 51.83578447913503 - type: mrr value: 52.69070696460402 - task: type: Summarization dataset: name: MTEB SummEval type: mteb/summeval config: default split: test revision: cda12ad7615edc362dbf25a00fdd61d3b1eaf93c metrics: - type: cos_sim_pearson value: 30.89437612567638 - type: cos_sim_spearman value: 30.7277819987126 - type: dot_pearson value: 30.999783674122526 - type: dot_spearman value: 30.992168551124905 - task: type: Retrieval dataset: name: MTEB TRECCOVID type: trec-covid config: default split: test revision: None metrics: - type: map_at_1 value: 0.22699999999999998 - type: map_at_10 value: 1.8950000000000002 - type: map_at_100 value: 11.712 - type: map_at_1000 value: 28.713 - type: map_at_3 value: 0.65 - type: map_at_5 value: 1.011 - type: mrr_at_1 value: 92.0 - type: mrr_at_10 value: 95.39999999999999 - type: mrr_at_100 value: 95.39999999999999 - type: mrr_at_1000 value: 95.39999999999999 - type: mrr_at_3 value: 95.0 - type: mrr_at_5 value: 95.39999999999999 - type: ndcg_at_1 value: 83.0 - type: ndcg_at_10 value: 76.658 - type: ndcg_at_100 value: 60.755 - type: ndcg_at_1000 value: 55.05 - type: ndcg_at_3 value: 82.961 - type: ndcg_at_5 value: 80.008 - type: precision_at_1 value: 90.0 - type: precision_at_10 value: 79.80000000000001 - type: precision_at_100 value: 62.019999999999996 - type: precision_at_1000 value: 24.157999999999998 - type: precision_at_3 value: 88.0 - type: precision_at_5 value: 83.6 - type: recall_at_1 value: 0.22699999999999998 - type: recall_at_10 value: 2.086 - type: recall_at_100 value: 15.262 - type: recall_at_1000 value: 51.800000000000004 - type: recall_at_3 value: 0.679 - type: recall_at_5 value: 1.0739999999999998 - task: type: Retrieval dataset: name: MTEB Touche2020 type: webis-touche2020 config: default split: test revision: None metrics: - type: map_at_1 value: 1.521 - type: map_at_10 value: 7.281 - type: map_at_100 value: 12.717 - type: map_at_1000 value: 14.266000000000002 - type: map_at_3 value: 3.62 - type: map_at_5 value: 4.7010000000000005 - type: mrr_at_1 value: 18.367 - type: mrr_at_10 value: 34.906 - type: mrr_at_100 value: 36.333 - type: mrr_at_1000 value: 36.348 - type: mrr_at_3 value: 29.592000000000002 - type: mrr_at_5 value: 33.367000000000004 - type: ndcg_at_1 value: 19.387999999999998 - type: ndcg_at_10 value: 18.523 - type: ndcg_at_100 value: 30.932 - type: ndcg_at_1000 value: 42.942 - type: ndcg_at_3 value: 18.901 - type: ndcg_at_5 value: 17.974999999999998 - type: precision_at_1 value: 20.408 - type: precision_at_10 value: 17.347 - type: precision_at_100 value: 6.898 - type: precision_at_1000 value: 1.482 - type: precision_at_3 value: 21.088 - type: precision_at_5 value: 19.184 - type: recall_at_1 value: 1.521 - type: recall_at_10 value: 13.406 - type: recall_at_100 value: 43.418 - type: recall_at_1000 value: 80.247 - type: recall_at_3 value: 4.673 - type: recall_at_5 value: 7.247000000000001 - task: type: Classification dataset: name: MTEB ToxicConversationsClassification type: mteb/toxic_conversations_50k config: default split: test revision: d7c0de2777da35d6aae2200a62c6e0e5af397c4c metrics: - type: accuracy value: 71.9084 - type: ap value: 15.388385311898144 - type: f1 value: 55.760189174489426 - task: type: Classification dataset: name: MTEB TweetSentimentExtractionClassification type: mteb/tweet_sentiment_extraction config: default split: test revision: d604517c81ca91fe16a244d1248fc021f9ecee7a metrics: - type: accuracy value: 62.399547255234864 - type: f1 value: 62.61398519525303 - task: type: Clustering dataset: name: MTEB TwentyNewsgroupsClustering type: mteb/twentynewsgroups-clustering config: default split: test revision: 6125ec4e24fa026cec8a478383ee943acfbd5449 metrics: - type: v_measure value: 53.041094760846164 - task: type: PairClassification dataset: name: MTEB TwitterSemEval2015 type: mteb/twittersemeval2015-pairclassification config: default split: test revision: 70970daeab8776df92f5ea462b6173c0b46fd2d1 metrics: - type: cos_sim_accuracy value: 87.92394349406926 - type: cos_sim_ap value: 79.93037248584875 - type: cos_sim_f1 value: 73.21063394683026 - type: cos_sim_precision value: 70.99652949925633 - type: cos_sim_recall value: 75.56728232189973 - type: dot_accuracy value: 87.80473266972642 - type: dot_ap value: 79.11055417163318 - type: dot_f1 value: 72.79587473273801 - type: dot_precision value: 69.55058880076905 - type: dot_recall value: 76.35883905013192 - type: euclidean_accuracy value: 87.91202241163496 - type: euclidean_ap value: 79.61955502404068 - type: euclidean_f1 value: 72.65956080647231 - type: euclidean_precision value: 70.778083562672 - type: euclidean_recall value: 74.64379947229551 - type: manhattan_accuracy value: 87.7749299636407 - type: manhattan_ap value: 79.33286131650932 - type: manhattan_f1 value: 72.44748412310699 - type: manhattan_precision value: 67.43974533879036 - type: manhattan_recall value: 78.25857519788919 - type: max_accuracy value: 87.92394349406926 - type: max_ap value: 79.93037248584875 - type: max_f1 value: 73.21063394683026 - task: type: PairClassification dataset: name: MTEB TwitterURLCorpus type: mteb/twitterurlcorpus-pairclassification config: default split: test revision: 8b6510b0b1fa4e4c4f879467980e9be563ec1cdf metrics: - type: cos_sim_accuracy value: 89.89987192921178 - type: cos_sim_ap value: 87.49525152555509 - type: cos_sim_f1 value: 80.05039276715578 - type: cos_sim_precision value: 77.15714285714286 - type: cos_sim_recall value: 83.1690791499846 - type: dot_accuracy value: 89.58163542515621 - type: dot_ap value: 86.87353801172357 - type: dot_f1 value: 79.50204384986993 - type: dot_precision value: 76.83522482401953 - type: dot_recall value: 82.36064059131506 - type: euclidean_accuracy value: 89.81255093724532 - type: euclidean_ap value: 87.41058010369022 - type: euclidean_f1 value: 79.94095829233214 - type: euclidean_precision value: 78.61396456751525 - type: euclidean_recall value: 81.3135201724669 - type: manhattan_accuracy value: 89.84553886754377 - type: manhattan_ap value: 87.41173628281432 - type: manhattan_f1 value: 79.9051922079846 - type: manhattan_precision value: 76.98016269444841 - type: manhattan_recall value: 83.06128734216199 - type: max_accuracy value: 89.89987192921178 - type: max_ap value: 87.49525152555509 - type: max_f1 value: 80.05039276715578 --- # Repetition Improves Language Model Embeddings Please refer to our paper: [https://arxiv.org/abs/2402.15449](https://arxiv.org/abs/2402.15449) And our GitHub: [https://github.com/jakespringer/echo-embeddings](https://github.com/jakespringer/echo-embeddings) We provide a description of the model as well as example usage in the above links.
[ "SUMMARIZATION" ]
[ "BIOSSES", "SCIFACT" ]
namngo/CDS_retrival
namngo
sentence-similarity
[ "sentence-transformers", "safetensors", "roberta", "sentence-similarity", "feature-extraction", "generated_from_trainer", "dataset_size:150000", "loss:TripletLoss", "arxiv:1908.10084", "arxiv:1703.07737", "base_model:VoVanPhuc/sup-SimCSE-VietNamese-phobert-base", "base_model:finetune:VoVanPhuc/sup-SimCSE-VietNamese-phobert-base", "autotrain_compatible", "endpoints_compatible", "region:us" ]
2025-02-23T08:51:33
2025-02-23T09:14:29
111
0
--- base_model: VoVanPhuc/sup-SimCSE-VietNamese-phobert-base library_name: sentence-transformers pipeline_tag: sentence-similarity tags: - sentence-transformers - sentence-similarity - feature-extraction - generated_from_trainer - dataset_size:150000 - loss:TripletLoss widget: - source_sentence: Theo quyết định số 749/QĐ-TTG của Thủ tướng Chính phủ, vai trò của người dân trong chuyển đổi số là gì? sentences: - 'Nhiều phần mềm soạn thảo văn bản phổ biến giúp người dùng tạo, biên tập và định dạng văn bản một cách tiện lợi có thể kể đến như Microsoft Word, một công cụ trong bộ Office của Microsoft, nổi tiếng với giaọ diện thân thiện và đầy đủ tính năng. Google Docs mang đến sự linh hoạt với khả năng làm việc đồng thời trực tuyến. LibreOffice Writer, với ưu điểm mã nguồn mở, hỗ trợ nhiều định dạng và tích họp nhiều tính năng soạn thảo. Notion không chỉ là ứng dụng ghi chú mà còn giúp quản lý dự án và tài liệu liên quan. Scrivener được thiết kế đặc biệt cho viết sách và sáng tác, với khả năng tổ chức nội dung hiệu quả. Tùy thuộc vào nhu cầu cụ thể, người dùng có thể lựa chọn phần mềm phù họp để tối ưu hóa quá trình làm việc với văn bản. Trong tài liệu này, chúng tôi giới thiệu một công cụ xử lý văn bản mạnh mẽ và phổ biến là Microsoft Word, một phần quan trọng của bộ ứng dụng Microsoft Office. Được thiết kế để đáp ứng nhu cầu đa dạng của người dùng từ cá nhân đến doanh nghiệp, Word không chỉ là một trình soạn thảo văn bản mà còn là nguồn cảm hứng sáng tạo. Với giao diện dễ sử dụng, Word cung cấp nhiều công cụ và tính năng giúp người dùng tạo, chỉnh sửa và định dạng văn bản thuận tiện. Từ việc thêm hình ảnh, biểu đồ, đến việc tạo bảng và sắp xếp trang, Word mang lại khả năng linh hoạt cho việc tạo ra tài liệu đa dạng. Một trong những điểm mạnh của Word là khả năng tích họp chặt chẽ với các ứng dụng khác trong bộ Microsoft Office, như Excel và PowerPoint, tạo điều kiện thuận lợi cho việc chia sẻ thông tin và làm việc nhóm. Ngoài ra, Word cung cấp nhiều tính năng tiên tiến như kiểm tra chính tả và ngf pháp, dịch ngôn ngữ trực tuyến, và khả năng tương tác với các dịch vụ đám mây. Với sức mạnh và sự linh hoạt, Microsoft Word đóng vai trò quan trọng trong việc hỗ trợ người dùng thế hiện ý tưởng, sáng tạo tài liệu chuyên nghiệp và nâng cao hiệu suất làm việc.' - 'Theo quyết định số 749/QĐ-TTG của Thủ tướng chính phủ về việc phê duyệt “Chương trình Chuyển đổi số quốc gia đến năm 2025, định hướng đến năm 2030” [21] thì người dân là trung tâm của chuyển đổi số. Mỗi công dân thực hiện các hoạt động trên môi trường thế giới số sẽ trở thành công dân số để góp phần phát triển chính phủ số, kinh tế số và xã hội số. Hiện nay có nhiều khái niệm công dân số nhưng về cơ bản thì công dân số là người áp dụng kiến thức và kỹ năng để sử dụng các công nghệ kỹ thuật số và Internet một cách hiệu quả. Công dân số có khả năng truy cập Internet thông qua máy tính, điện thoại di động, các thiết bị công nghệ khác để kết nối và tương tác với các cá nhân, cộng đồng, tham gia vào các hoạt động kinh tế, chính trị và xã hội trên nền tảng kỹ thuật số [2, 4]. Công dân số là thành phần quan trọng trong thế giới số, họ cần có trách nhiệm với hoạt động của mình trên môi trường số và chủ động sử dụng công nghệ để phát triển sự kết nối, chia sẻ thông tin trên cộng đồng toàn cầu. Đối tượng của công dân số có thể là bất kỳ ai và không giới hạn về độ tuối, giới tính, tôn giáo và nơi họ sống, họ có thể là một trẻ em, thanh niên hoặc người lớn tuối. Công dân số đóng vai trò vừa là những người học cũng đồng thời là người sáng tạo ra nội dung, kiến thức và thông tin trên môi trường kỹ thuật số. Đây là một phương pháp hiệu quả để tạo nên nguồn dữ liệu, thông tin phong phú giúp mỗi người tự cập nhật kiến thức kịp thời trong kỷ nguyên thế giới số phát triển mạnh mẽ ngày nay. Công dân số có vai trò quan trọng trong việc thúc đẩy các cơ hội phát triến kinh tế, bình đẳng xã hội và tăng cường sự tham gia vào các hoạt động cộng đồng. Với cách tiếp cận này, công nghệ kỹ thuật số giúp loại bỏ các rào cản về ngôn ngữ, khoảng cách và văn hóa, cho phép mọi người trên toàn thế giới có thể tham gia như một công dân trong cộng đồng toàn cầu. Công dân số là cách tiếp cận và thực hiện các quyền và nghĩa vụ của mình trong môi trường thế giới số, đặc biệt là cách chúng ta tương tác với người khác. Mỗi cá nhân đều đóng vai trò quan trọng để tạo ra những công dân số có trách n' - " hoặc outlook kèm theo phần mở rộng như .com, .net, hoặc .org. Ví dụ về địa chỉ\ \ thư điện tử bao gồm: \"[email protected]\" hoặc \"[email protected].\"\n\ -\tCách thức gửi thư: \n•\tViệc gửi thư truyền thống đòi hỏi nhiều bước hơn so\ \ với thư điện tử. Người gửi cần chuẩn bị nội dung thư, đặt vào phong bì, ghi\ \ rõ địa chỉ người nhận và người gửi, sau đó dán tem phù hợp. Thư sẽ được gửi\ \ tại bưu điện, công ty vận chuyển hoặc thông qua dịch vụ thu thập thư tận nơi.\ \ Sau đó, bưu điện tiến hành phân loại thư tại trung tâm xử lý, vận chuyển đến\ \ khu vực gần người nhận nhất và cuối cùng là giao trực tiếp đến tay người nhận\ \ thông qua nhân viên bưu tá.\n•\tNgược lại, gửi thư điện tử đơn giản và nhanh\ \ chóng hơn. Người gửi chỉ cần soạn nội dung trên thiết bị điện tử, nhập địa chỉ\ \ email của người nhận, kiểm tra nội dung và các tệp đính kèm nếu có, rồi nhấn\ \ \"Gửi.\" Thư sẽ được truyền đi tức thời qua mạng Internet và đến hộp thư của\ \ người nhận. Người nhận có thể truy cập và đọc thư từ bất kỳ thiết bị nào có\ \ kết nối Internet như điện thoại, máy tính hoặc máy tính bảng.\n-\tThời gian\ \ xử lý:\n•\tThời gian gửi thư truyền thống thường kéo dài từ vài ngày đến vài\ \ tuần, phụ thuộc vào khoảng cách giữa người gửi và người nhận cũng như dịch vụ\ \ bưu chính được sử dụng. Thư nội địa có thể mất vài ngày để đến nơi, trong khi\ \ thư quốc tế thường mất nhiều thời gian hơn, đặc biệt nếu liên quan đến các thủ\ \ tục hải quan hoặc phải đi qua nhiều quốc gia khác nhau. Các yếu tố như thời\ \ tiết, ngày lễ và hiệu quả hoạt động của hệ thống bưu điện cũng ảnh hưởng đến\ \ thời gian xử lý thư.\n•\tTrong khi đó, thư điện tử được gửi và nhận gần như\ \ ngay lập tức, chỉ mất vài giây đến vài phút sau khi người gửi nhấn \"Gửi.\"\ \ Thời gian xử lý không bị ảnh hưởng bởi khoảng cách địa lý giữa người gửi và\ \ người nhận. Tuy nhiên, trong một số trường hợp hiếm gặp, email có thể bị chậm\ \ trễ do vấn đề kỹ thuật như máy chủ bị quá tải hoặc lỗi hệ thống." - source_sentence: Tại sao cần lựa chọn phương pháp giao tiếp phù hợp trong môi trường số? sentences: - "Trong phần khái niệm, chúng ta đã biết công dân số là những người có kiến thức,\ \ kỹ năng và thao tác trên không gian mạng, tuy nhiên để trở thành công dân số\ \ thểo đúng ý nghĩa, vai trò và chức năng thì cần phải đáp ứng các yếu tố cơ bản.\n\ a.\tKhả năng truy cập nền tảng công nghệ kỹ thuật số\nTrong thời đại số, việc\ \ truy cập và sử dụng tài nguyên trên môi truờng thế giới số là bình đẳng cho\ \ mọi người. Tuy nhiên, quyền truy cập này không phải lúc nào cũng đảm bảo do\ \ những yếu tố khác nhau như địa lý, điều kiện kinh tế, độ tuổi, trình độ học\ \ vấn, và các điều kiện khác có thể ảnh hưởng. Do đó, để tạo ra môi trường số\ \ phát triển và khuyến khích sự tham gia tích cực của mọi người vào xã hội số,\ \ chúng ta cần tạo điều kiện và hỗ trợ việc truy cập công nghệ của mỗi công dân\ \ số. Điều này nhằm đảm bảo rằng mọi người đều có cơ hội tiếp cận với thế giới\ \ số.\nb.\tKhả năng giao tiếp trên môi trường số\nGiao tiếp trong môi trường số\ \ là việc sử dụng các thiết bị công nghệ và mạng Internet để liên lạc, tương tác\ \ và tham gia vào các hoạt động khoa học công nghệ, giáo dục, kinh tế xã hội,\ \ chính trị và giao dịch thương mại mọi lúc, mọi nơi.\nNgày nay, có nhiều phương\ \ pháp giao tiếp trực tuyến giúp các công dân số kết nối và hợp tác với bất cứ\ \ ai từ khắp mọi nơi và vào bất cứ thời điểm nào thông qua các phương tiện như\ \ tin nhắn văn bản trên các phần mềm ứng dụng, các mạng xã hội như Zalo, Facebook,\ \ Twitter, điện thoại di động, thư điện tử, v.v...\nKhi lựa chọn phương pháp tiếp\ \ cận và giao tiếp trong môi trường số, người dùng nên đưa ra quyết định phù hợp\ \ để hạn chế nguy cơ bị xâm phạm bởi các đối tượng xấu, vì bất kỳ thông tin gửi\ \ đi đều có thể bị kẻ xấu đánh cắp. Vì vậy, người dùng số cần phải được trang\ \ bị các kỹ năng giao tiếp phù hợp đế đảm bảo an toàn khi tương tác trên không\ \ gian số.\nCông dân số cần được đào tạo các tiêu chuẩn giao tiếp số để tuân thủ\ \ các nguyên tắc cơ bản là “Đối xử với người khác thểo cách bạn muốn được đối\ \ xử\", \"Tôn trọng người khác cũng như bản" - 'ăng quản lý mật khấu trong trình duyệt để tạo và lưu trữ mật khẩu an toàn cho các trang web khác nhau. Điều này giúp bạn tránh việc sử dụng mật khẩu yếu và giữ cho thông tin cá nhân của bạn an toàn. Nhớ rằng, việc sử dụng tính năng duyệt web an toàn không chỉ là vấn đề của trình duyệt mà còn phụ thuộc vào tư duy an toàn của người sử dụng. Luôn cân nhắc và thực hiện các biện pháp an toàn khi duyệt web đế tối uu hóa bảo vệ cá nhân và dữ liệu của bạn. ' - 'Biên soạn văn bản là một phương tiện quan trọng trong giao tiếp, giúp ghi lại, truyền đạt và lưu trữ thông tin giữa các cá nhân, tổ chức, hoặc cơ quan. Văn bản có thể mang nhiều hình thức nhu thư từ, hồ sơ, sách, báo cáo, và các loại tài liệu khác. Văn bản không chỉ sử dụng ngôn ngữ viết mà còn bao gồm các hình thức biểu đạt khác như hình ảnh, biểụ đồ, và ký hiệu, tùy thuộc vào ngữ cảnh và mục đích. Văn bản không chỉ đơn thuần là phương tiện truyền tải thông tin mà còn phản ánh văn hóa, tri thức, và quy định xã hội. Đối với các cơ quan và tổ chức, việc soạn thảo và quản lý văn bản tuân theo các quy tắc và kỹ thuật cụ thể để đảm bảo tính chính xác, rõ ràng, và hiệu quả trong việc truyền đạt thông tin. Văn bản có thế được phân loại theo mục đích sử dụng như văn bản hành chính, văn bản pháp luật, hay văn bản thông tin. Trong các cơ quan, tố chức tại Việt Nam, văn bản hành chính là một loại văn bản quan trọng và phổ biến. Theo Điều 7 Nghị định 30/2020/NĐ-CP , văn bản hành chính bao gồm nhiều loại như nghị quyết (cá biệt), quyết định (cá biệt), chỉ thị, quy chế, quy định, thông cáo, thông báo, hướng dẫn, chương trình, kế hoạch, phương án, đề án, dự án, báo cáo, biên bản, tờ trình, họp đồng, công văn, công điện, bản ghi nhớ, bản thỏa thuận, giấy ủy quyền, giấy mời, giấy giới thiệu, giấy nghỉ phép, phiếu gửi, phiếu chuyển, phiếu báo, và thư công.' - source_sentence: Microsoft Word hỗ trợ những tính năng nào? sentences: - 'Nhiều phần mềm soạn thảo văn bản phổ biến giúp người dùng tạo, biên tập và định dạng văn bản một cách tiện lợi có thể kể đến như Microsoft Word, một công cụ trong bộ Office của Microsoft, nổi tiếng với giaọ diện thân thiện và đầy đủ tính năng. Google Docs mang đến sự linh hoạt với khả năng làm việc đồng thời trực tuyến. LibreOffice Writer, với ưu điểm mã nguồn mở, hỗ trợ nhiều định dạng và tích họp nhiều tính năng soạn thảo. Notion không chỉ là ứng dụng ghi chú mà còn giúp quản lý dự án và tài liệu liên quan. Scrivener được thiết kế đặc biệt cho viết sách và sáng tác, với khả năng tổ chức nội dung hiệu quả. Tùy thuộc vào nhu cầu cụ thể, người dùng có thể lựa chọn phần mềm phù họp để tối ưu hóa quá trình làm việc với văn bản. Trong tài liệu này, chúng tôi giới thiệu một công cụ xử lý văn bản mạnh mẽ và phổ biến là Microsoft Word, một phần quan trọng của bộ ứng dụng Microsoft Office. Được thiết kế để đáp ứng nhu cầu đa dạng của người dùng từ cá nhân đến doanh nghiệp, Word không chỉ là một trình soạn thảo văn bản mà còn là nguồn cảm hứng sáng tạo. Với giao diện dễ sử dụng, Word cung cấp nhiều công cụ và tính năng giúp người dùng tạo, chỉnh sửa và định dạng văn bản thuận tiện. Từ việc thêm hình ảnh, biểu đồ, đến việc tạo bảng và sắp xếp trang, Word mang lại khả năng linh hoạt cho việc tạo ra tài liệu đa dạng. Một trong những điểm mạnh của Word là khả năng tích họp chặt chẽ với các ứng dụng khác trong bộ Microsoft Office, như Excel và PowerPoint, tạo điều kiện thuận lợi cho việc chia sẻ thông tin và làm việc nhóm. Ngoài ra, Word cung cấp nhiều tính năng tiên tiến như kiểm tra chính tả và ngf pháp, dịch ngôn ngữ trực tuyến, và khả năng tương tác với các dịch vụ đám mây. Với sức mạnh và sự linh hoạt, Microsoft Word đóng vai trò quan trọng trong việc hỗ trợ người dùng thế hiện ý tưởng, sáng tạo tài liệu chuyên nghiệp và nâng cao hiệu suất làm việc.' - "ật của giao tiếp trực tuyến trong tương lai được thể hiện ở những khía cạnh sau:\n\ -\tTích hợp trí tuệ nhân tạo (AI) và Chatbots: AI sẽ tiếp tục phát triển, cho\ \ phép các chatbot trở nên thông minh hơn, có khả năng xử lý các yêu cầu phức\ \ tạp hơn và cung cấp trải nghiệm giao tiếp tự nhiên hơn. Chatbots sẽ hỗ trợ trong\ \ nhiều lĩnh vực từ dịch vụ khách hàng đến trợ lý cá nhân.\n-\tGiao tiếp thực\ \ tế ảo và tăng cường (VR/AR): VR và AR sẽ mang lại cách thức giao tiếp mới, tạo\ \ ra môi trường ảo cho cuộc họp và giao tiếp xã hội. Điều này sẽ tạo ra trải nghiệm\ \ gần gũi hơn, dù khoảng cách địa lý.\n-\tTăng cường bảo mật và quyền riêng tư:\ \ Khi giao tiếp trực tuyến trở nên phổ biến hơn, vấn đề bảo mật và quyền riêng\ \ tư ngày càng trở nên quan trọng hơn.\n-\tTích hợp nhiều kênh: Tương lai sẽ chứng\ \ kiến sự tích họp liền mạch giữa các kênh giao tiếp như văn bản, giọng nói và\ \ video. Người dùng có thể chuyển đổi giữa các kênh một cách dễ dàng trong cùng\ \ một cuộc trò chuyện.\n-\tTự động hóa và phân tích dữ liệu: Sử dụng công nghệ\ \ để phân tích xu hướng giao tiếp và hành vi người dùng, giúp cá nhân hóa trải\ \ nghiệm giao tiếp và cung cấp thông tin hữu ích cho doanh nghiệp và người dùng.\n\ Nhìn chung, tương lai của trò chuyện trực tuyến sẽ tập trung vào việc tạo ra trải\ \ nghiệm người dùng phong phú hơn, an toàn hơn và tương tác hơn, nhờ vào sự tiến\ \ bộ của công nghệ.\n" - 'Nhiều phần mềm soạn thảo văn bản phổ biến giúp người dùng tạo, biên tập và định dạng văn bản một cách tiện lợi có thể kể đến như Microsoft Word, một công cụ trong bộ Office của Microsoft, nổi tiếng với giaọ diện thân thiện và đầy đủ tính năng. Google Docs mang đến sự linh hoạt với khả năng làm việc đồng thời trực tuyến. LibreOffice Writer, với ưu điểm mã nguồn mở, hỗ trợ nhiều định dạng và tích họp nhiều tính năng soạn thảo. Notion không chỉ là ứng dụng ghi chú mà còn giúp quản lý dự án và tài liệu liên quan. Scrivener được thiết kế đặc biệt cho viết sách và sáng tác, với khả năng tổ chức nội dung hiệu quả. Tùy thuộc vào nhu cầu cụ thể, người dùng có thể lựa chọn phần mềm phù họp để tối ưu hóa quá trình làm việc với văn bản. Trong tài liệu này, chúng tôi giới thiệu một công cụ xử lý văn bản mạnh mẽ và phổ biến là Microsoft Word, một phần quan trọng của bộ ứng dụng Microsoft Office. Được thiết kế để đáp ứng nhu cầu đa dạng của người dùng từ cá nhân đến doanh nghiệp, Word không chỉ là một trình soạn thảo văn bản mà còn là nguồn cảm hứng sáng tạo. Với giao diện dễ sử dụng, Word cung cấp nhiều công cụ và tính năng giúp người dùng tạo, chỉnh sửa và định dạng văn bản thuận tiện. Từ việc thêm hình ảnh, biểu đồ, đến việc tạo bảng và sắp xếp trang, Word mang lại khả năng linh hoạt cho việc tạo ra tài liệu đa dạng. Một trong những điểm mạnh của Word là khả năng tích họp chặt chẽ với các ứng dụng khác trong bộ Microsoft Office, như Excel và PowerPoint, tạo điều kiện thuận lợi cho việc chia sẻ thông tin và làm việc nhóm. Ngoài ra, Word cung cấp nhiều tính năng tiên tiến như kiểm tra chính tả và ngf pháp, dịch ngôn ngữ trực tuyến, và khả năng tương tác với các dịch vụ đám mây. Với sức mạnh và sự linh hoạt, Microsoft Word đóng vai trò quan trọng trong việc hỗ trợ người dùng thế hiện ý tưởng, sáng tạo tài liệu chuyên nghiệp và nâng cao hiệu suất làm việc.' - source_sentence: Kỹ năng bảo mật bao gồm những gì? sentences: - 'Trong môi trường số, công dân số là những người chủ động và có trách nhiệm để tham gia vào các hoạt động kinh tế, văn hóa, chính trị và xã hội. Mặc dù bất kỳ cá nhân nào sử dụng công nghệ số đều có thể được coi là công dân số, nhưng công dân số thường được xác định rõ hơn thông qua việc hiểu biết và tuân thủ các quyền lợi và trách nhiệm liên quan đến việc sử dụng công nghệ theo quy định pháp luật. Đây chính là lý do tại sao việc hỗ trợ, đào tạo công dân số trong các cơ sở giáo dục trở nên cực kỳ cần thiết. Công dân số cần được trang bị kiến thức về công nghệ, pháp luật và cảm xúc cá nhân đế nâng cao nhận thức về bản thân, môi trường xã hội và kỹ năng quản lý mối quan hệ, giúp chúng ta tương tác hiệu quả trong không gian thế giới số. Đồng thời, công dân số cần chịu trách nhiệm với các hành động của mình trên môi trường mạng và nhận thức rõ về hậu quả của hành vi không đúng đắn. Để đảm bảo quyền lợi và trách nhiệm đúng đắn, công dân số cần tuân thủ nghiêm quy định của pháp luật, tránh việc thiếu hiểu biết pháp luật gây hậu quả nghiêm trọng. Bộ luật Dân sự 2015, Bộ luật Hình sự 2017 và Luật sửa đổi và bổ sung một số điều bộ luật hình sự năm 2017, cùng các nghị định của Chính phủ quy định cụ thể những hành vi và mức phạt theo quy định. Quốc hội thông qua Luật an ninh mạng, Luật số: 24/2018/QH14, có hiệu lực từ ngày 01/1/2019. Luật này quy định về hoạt động bảo vệ an ninh quốc gia và bảo đảm trật tự, an toàn xã hội trên không gian mạng; trách nhiệm của cơ quan, tổ chức, cá nhân có liên quan. Sự phát triển của khoa học công nghệ sẽ nâng cao trình độ và kỹ năng của công dân. Hệ thống pháp luật và văn bản quy phạm pháp luật sẽ luôn đuợc cập nhật để phù họp với mỗi giai đoạn phát triển đó.' - 'Theo quyết định số 749/QĐ-TTG của Thủ tướng chính phủ về việc phê duyệt “Chương trình Chuyển đổi số quốc gia đến năm 2025, định hướng đến năm 2030” [21] thì người dân là trung tâm của chuyển đổi số. Mỗi công dân thực hiện các hoạt động trên môi trường thế giới số sẽ trở thành công dân số để góp phần phát triển chính phủ số, kinh tế số và xã hội số. Hiện nay có nhiều khái niệm công dân số nhưng về cơ bản thì công dân số là người áp dụng kiến thức và kỹ năng để sử dụng các công nghệ kỹ thuật số và Internet một cách hiệu quả. Công dân số có khả năng truy cập Internet thông qua máy tính, điện thoại di động, các thiết bị công nghệ khác để kết nối và tương tác với các cá nhân, cộng đồng, tham gia vào các hoạt động kinh tế, chính trị và xã hội trên nền tảng kỹ thuật số [2, 4]. Công dân số là thành phần quan trọng trong thế giới số, họ cần có trách nhiệm với hoạt động của mình trên môi trường số và chủ động sử dụng công nghệ để phát triển sự kết nối, chia sẻ thông tin trên cộng đồng toàn cầu. Đối tượng của công dân số có thể là bất kỳ ai và không giới hạn về độ tuối, giới tính, tôn giáo và nơi họ sống, họ có thể là một trẻ em, thanh niên hoặc người lớn tuối. Công dân số đóng vai trò vừa là những người học cũng đồng thời là người sáng tạo ra nội dung, kiến thức và thông tin trên môi trường kỹ thuật số. Đây là một phương pháp hiệu quả để tạo nên nguồn dữ liệu, thông tin phong phú giúp mỗi người tự cập nhật kiến thức kịp thời trong kỷ nguyên thế giới số phát triển mạnh mẽ ngày nay. Công dân số có vai trò quan trọng trong việc thúc đẩy các cơ hội phát triến kinh tế, bình đẳng xã hội và tăng cường sự tham gia vào các hoạt động cộng đồng. Với cách tiếp cận này, công nghệ kỹ thuật số giúp loại bỏ các rào cản về ngôn ngữ, khoảng cách và văn hóa, cho phép mọi người trên toàn thế giới có thể tham gia như một công dân trong cộng đồng toàn cầu. Công dân số là cách tiếp cận và thực hiện các quyền và nghĩa vụ của mình trong môi trường thế giới số, đặc biệt là cách chúng ta tương tác với người khác. Mỗi cá nhân đều đóng vai trò quan trọng để tạo ra những công dân số có trách n' - "ều này giúp hỗ trợ duy trì cân bằng giữa sức khỏe tinh thần và thể chất.\n-\t\ Quản lý việc đe dọa trên môi trường mạng: Khả năng phát hiện và quản lý các trường\ \ hợp mâu thuẫn, gây xung đột, đe dọa trực tuyến trên không gian mạng là một kỹ\ \ năng quan trọng. Một môi trường trực tuyển lành mạnh được tạo ra khi công dân\ \ số có khả năng nhận diện, quản lý và giảm thiểu các nguy cơ liên quan đến bắt\ \ nạt, đe dọa, quấy rối,... bằng cách phát triển nhận thức xã hội, kỹ năng ra\ \ quyết định và giao tiếp hiệu quả. Khi được trang bị những kỹ năng này, công\ \ dân số sẽ chủ động bảo vệ bản thân trước những trường hợp bị dọa nạt trên môi\ \ trường trực tuyến và sẵn sàng có những biện pháp đối phó hiệu quả.\n-\tQuản\ \ lý an ninh mạng: Khả năng quản lý an ninh mạng bao gồm việc bảo vệ dữ liệu và\ \ thông tin cá nhân bằng cách tạo mật khẩu mạnh và đối phó với các loại tấn công.\ \ Mặc dù các tố chức, cơ quan thưòng có các hệ thống quản lý bảo mật riêng, các\ \ ứng dụng xây hệ thống bảo mật. Tuy nhiên, công dân số cần chủ động phòng ngừa\ \ và loại bỏ các mối đe dọa và bảo vệ dữ liệu cũng như thiết bị cá nhân. Đồng\ \ thời, họ phải luôn cảnh giác lỗ hổng bảo mật và xử lý kịp thời khi gặp sự cố.\n\ -\tQuản lý bảo mật: Kỹ năng bảo mật bao gồm việc cấn trọng khi chia sẻ thông tin\ \ cá nhân trực tuyến và sử dụng các công cụ bảo mật để bảo vệ dữ liệu khỏi kẻ\ \ xấu. Đặc biệt, việc sử dụng, lưu trữ, xử lý và chia sẻ thông tin cá nhân trong\ \ không gian số cần kết hợp với các công cụ bảo mật để bảo vệ thông tin cá nhân\ \ để không bị kẻ xấu đánh cắp thông tin làm ảnh hưởng danh tiếng, uy tín và nhân\ \ phẩm. Bên cạnh đó, công dân số cần tôn trọng quyền riêng tư và thông tin cá\ \ nhân của người khác\n-\tTư duy phê phán: Khả năng phân biệt bao gồm nhận diện\ \ tính đúng sai trong cách ứng xử, đánh giá thông tin hữu ích và có hại, cũng\ \ như phân biệt các nguồn thông tin đáng tin cậy và không đáng tin cậy. Công dân\ \ số cần nâng cao kỹ năng sử dụng máy tính, phần mềm, ứng dụng đồng thời hiểu\ \ rõ nhu cầu thông tin, điều hướng hiệu quả và đánh giá phê bình để thu thập và\ \ sắ" - source_sentence: Kỹ năng bảo mật bao gồm những gì? sentences: - "a.\tSao chép ô\nExcel cho phép sao chép không chỉ nội dung mà còn định dạng của\ \ ô đó. Khi sao chép ta có thể có nhiều lựa chọn khác nhau như:\n-\tAll: Sao chép\ \ tất cả nội dung, định dạng...\n-\tFormulas: Chỉ sao chép công ức\n-\tValues:\ \ Chỉ sao chép giá trị, hay nội dung ô đó\n-\tFormats: Chỉ sao chép định dạng\n\ -\tComments: Chỉ sao chép chú thích\n-\tAll except borders: Sao chép tất cả trừ\ \ đường viền\n-\tColumn widths: Sao chép giá trị độ rộng của cột\n-\tFormulas\ \ and number formats: Sao chép cả công thức và định dạng số\n-\tValues and number\ \ formats: Sao chép giá trị và định dạng số\nb.\tDi chuyến các ô tỉnh\n-\tChọn\ \ các ô cần di chuyển\n-\tChọn lệnh Cut hoặc bấm tổ hợp phím Ctrl + X\n-\tĐưa\ \ chuột đến vị trí muốn dán\n-\tChọn lệnh Paste hoặc bấm Ctrl + V để dán các ô.\n\ c.\tXóa dữ liệu các ô\n-\tChọn các ô cần xoá dữ liệu\n-\tNhấn nút Clear trên thẻ\ \ Home" - "ều này giúp hỗ trợ duy trì cân bằng giữa sức khỏe tinh thần và thể chất.\n-\t\ Quản lý việc đe dọa trên môi trường mạng: Khả năng phát hiện và quản lý các trường\ \ hợp mâu thuẫn, gây xung đột, đe dọa trực tuyến trên không gian mạng là một kỹ\ \ năng quan trọng. Một môi trường trực tuyển lành mạnh được tạo ra khi công dân\ \ số có khả năng nhận diện, quản lý và giảm thiểu các nguy cơ liên quan đến bắt\ \ nạt, đe dọa, quấy rối,... bằng cách phát triển nhận thức xã hội, kỹ năng ra\ \ quyết định và giao tiếp hiệu quả. Khi được trang bị những kỹ năng này, công\ \ dân số sẽ chủ động bảo vệ bản thân trước những trường hợp bị dọa nạt trên môi\ \ trường trực tuyến và sẵn sàng có những biện pháp đối phó hiệu quả.\n-\tQuản\ \ lý an ninh mạng: Khả năng quản lý an ninh mạng bao gồm việc bảo vệ dữ liệu và\ \ thông tin cá nhân bằng cách tạo mật khẩu mạnh và đối phó với các loại tấn công.\ \ Mặc dù các tố chức, cơ quan thưòng có các hệ thống quản lý bảo mật riêng, các\ \ ứng dụng xây hệ thống bảo mật. Tuy nhiên, công dân số cần chủ động phòng ngừa\ \ và loại bỏ các mối đe dọa và bảo vệ dữ liệu cũng như thiết bị cá nhân. Đồng\ \ thời, họ phải luôn cảnh giác lỗ hổng bảo mật và xử lý kịp thời khi gặp sự cố.\n\ -\tQuản lý bảo mật: Kỹ năng bảo mật bao gồm việc cấn trọng khi chia sẻ thông tin\ \ cá nhân trực tuyến và sử dụng các công cụ bảo mật để bảo vệ dữ liệu khỏi kẻ\ \ xấu. Đặc biệt, việc sử dụng, lưu trữ, xử lý và chia sẻ thông tin cá nhân trong\ \ không gian số cần kết hợp với các công cụ bảo mật để bảo vệ thông tin cá nhân\ \ để không bị kẻ xấu đánh cắp thông tin làm ảnh hưởng danh tiếng, uy tín và nhân\ \ phẩm. Bên cạnh đó, công dân số cần tôn trọng quyền riêng tư và thông tin cá\ \ nhân của người khác\n-\tTư duy phê phán: Khả năng phân biệt bao gồm nhận diện\ \ tính đúng sai trong cách ứng xử, đánh giá thông tin hữu ích và có hại, cũng\ \ như phân biệt các nguồn thông tin đáng tin cậy và không đáng tin cậy. Công dân\ \ số cần nâng cao kỹ năng sử dụng máy tính, phần mềm, ứng dụng đồng thời hiểu\ \ rõ nhu cầu thông tin, điều hướng hiệu quả và đánh giá phê bình để thu thập và\ \ sắ" - "Phím tắt tạo mới, chỉnh sửa lưu văn bản:\n-\tCtrl+N: Tạo mới một văn bản\n-\t\ Ctrl+Q: Mở văn bản đã được lưu trước đó\n-\tCtrl+C: Sao chép phần văn bản được\ \ chọn\n-\tCtrl+X: Cắt nội dung được chọn trong văn bản\n-\tCtrl+V: Dán văn bản\n\ -\tCtrl+F: Tìm kiếm từ/cụm từ hoặc đoạn văn bản\n-\tCtrl+H: Mở hộp thoại thay\ \ thế, thay thế một từ/cụm từ bằng một từ/cụm từ khác\n-\tCtrl+P: In ấn văn bản\n\ -\tCtrl+Z: Hoàn tác bước chỉnh sửa trước đó\n-\tCtrl+Y: Khôi phục trạng thái văn\ \ bản trước khi sử dụng lệnh Ctrl+Z\n-\tCtrl+F4, Ctrl+W, Alt+F4: Đóng văn bản\n\ Phím tắt với Menu, Toolbars:\n-\tAlt: Mở menu lệnh\n-\tTab: Di chuyển đến mục\ \ chọn, nhóm chọn tiếp theo\n-\tCtrl+Tab: Di chuyển qua thẻ tiếp theo trong hộp\ \ thoại\n-\tShift+Tab: Di chuyển tới thẻ phía trước trong hộp thoại\n-\tAlt+Mũi\ \ tên xuống: Hiển thị danh sách của danh sách sổ\n-\tEnter: Chọn 1 giá trị trong\ \ danh sách sổ\n-\tESC: Tắt nội dung của danh sách sổ\n-\tHome: Chọn lệnh đầu\ \ tiên trong Menu con\n-\tEnd: Chọn lệnh cuối cùng trong Menu con" --- # SentenceTransformer based on VoVanPhuc/sup-SimCSE-VietNamese-phobert-base This is a [sentence-transformers](https://www.SBERT.net) model finetuned from [VoVanPhuc/sup-SimCSE-VietNamese-phobert-base](https://huggingface.co/VoVanPhuc/sup-SimCSE-VietNamese-phobert-base). It maps sentences & paragraphs to a 768-dimensional dense vector space and can be used for semantic textual similarity, semantic search, paraphrase mining, text classification, clustering, and more. ## Model Details ### Model Description - **Model Type:** Sentence Transformer - **Base model:** [VoVanPhuc/sup-SimCSE-VietNamese-phobert-base](https://huggingface.co/VoVanPhuc/sup-SimCSE-VietNamese-phobert-base) <!-- at revision 608779b86741a8acd8c8d38132974ff04086b138 --> - **Maximum Sequence Length:** 256 tokens - **Output Dimensionality:** 768 dimensions - **Similarity Function:** Cosine Similarity <!-- - **Training Dataset:** Unknown --> <!-- - **Language:** Unknown --> <!-- - **License:** Unknown --> ### Model Sources - **Documentation:** [Sentence Transformers Documentation](https://sbert.net) - **Repository:** [Sentence Transformers on GitHub](https://github.com/UKPLab/sentence-transformers) - **Hugging Face:** [Sentence Transformers on Hugging Face](https://huggingface.co/models?library=sentence-transformers) ### Full Model Architecture ``` SentenceTransformer( (0): Transformer({'max_seq_length': 256, 'do_lower_case': False}) with Transformer model: RobertaModel (1): Pooling({'word_embedding_dimension': 768, 'pooling_mode_cls_token': False, 'pooling_mode_mean_tokens': True, 'pooling_mode_max_tokens': False, 'pooling_mode_mean_sqrt_len_tokens': False, 'pooling_mode_weightedmean_tokens': False, 'pooling_mode_lasttoken': False, 'include_prompt': True}) ) ``` ## Usage ### Direct Usage (Sentence Transformers) First install the Sentence Transformers library: ```bash pip install -U sentence-transformers ``` Then you can load this model and run inference. ```python from sentence_transformers import SentenceTransformer # Download from the 🤗 Hub model = SentenceTransformer("sentence_transformers_model_id") # Run inference sentences = [ 'Kỹ năng bảo mật bao gồm những gì?', 'ều này giúp hỗ trợ duy trì cân bằng giữa sức khỏe tinh thần và thể chất.\n-\tQuản lý việc đe dọa trên môi trường mạng: Khả năng phát hiện và quản lý các trường hợp mâu thuẫn, gây xung đột, đe dọa trực tuyến trên không gian mạng là một kỹ năng quan trọng. Một môi trường trực tuyển lành mạnh được tạo ra khi công dân số có khả năng nhận diện, quản lý và giảm thiểu các nguy cơ liên quan đến bắt nạt, đe dọa, quấy rối,... bằng cách phát triển nhận thức xã hội, kỹ năng ra quyết định và giao tiếp hiệu quả. Khi được trang bị những kỹ năng này, công dân số sẽ chủ động bảo vệ bản thân trước những trường hợp bị dọa nạt trên môi trường trực tuyến và sẵn sàng có những biện pháp đối phó hiệu quả.\n-\tQuản lý an ninh mạng: Khả năng quản lý an ninh mạng bao gồm việc bảo vệ dữ liệu và thông tin cá nhân bằng cách tạo mật khẩu mạnh và đối phó với các loại tấn công. Mặc dù các tố chức, cơ quan thưòng có các hệ thống quản lý bảo mật riêng, các ứng dụng xây hệ thống bảo mật. Tuy nhiên, công dân số cần chủ động phòng ngừa và loại bỏ các mối đe dọa và bảo vệ dữ liệu cũng như thiết bị cá nhân. Đồng thời, họ phải luôn cảnh giác lỗ hổng bảo mật và xử lý kịp thời khi gặp sự cố.\n-\tQuản lý bảo mật: Kỹ năng bảo mật bao gồm việc cấn trọng khi chia sẻ thông tin cá nhân trực tuyến và sử dụng các công cụ bảo mật để bảo vệ dữ liệu khỏi kẻ xấu. Đặc biệt, việc sử dụng, lưu trữ, xử lý và chia sẻ thông tin cá nhân trong không gian số cần kết hợp với các công cụ bảo mật để bảo vệ thông tin cá nhân để không bị kẻ xấu đánh cắp thông tin làm ảnh hưởng danh tiếng, uy tín và nhân phẩm. Bên cạnh đó, công dân số cần tôn trọng quyền riêng tư và thông tin cá nhân của người khác\n-\tTư duy phê phán: Khả năng phân biệt bao gồm nhận diện tính đúng sai trong cách ứng xử, đánh giá thông tin hữu ích và có hại, cũng như phân biệt các nguồn thông tin đáng tin cậy và không đáng tin cậy. Công dân số cần nâng cao kỹ năng sử dụng máy tính, phần mềm, ứng dụng đồng thời hiểu rõ nhu cầu thông tin, điều hướng hiệu quả và đánh giá phê bình để thu thập và sắ', 'a.\tSao chép ô\nExcel cho phép sao chép không chỉ nội dung mà còn định dạng của ô đó. Khi sao chép ta có thể có nhiều lựa chọn khác nhau như:\n-\tAll: Sao chép tất cả nội dung, định dạng...\n-\tFormulas: Chỉ sao chép công ức\n-\tValues: Chỉ sao chép giá trị, hay nội dung ô đó\n-\tFormats: Chỉ sao chép định dạng\n-\tComments: Chỉ sao chép chú thích\n-\tAll except borders: Sao chép tất cả trừ đường viền\n-\tColumn widths: Sao chép giá trị độ rộng của cột\n-\tFormulas and number formats: Sao chép cả công thức và định dạng số\n-\tValues and number formats: Sao chép giá trị và định dạng số\nb.\tDi chuyến các ô tỉnh\n-\tChọn các ô cần di chuyển\n-\tChọn lệnh Cut hoặc bấm tổ hợp phím Ctrl + X\n-\tĐưa chuột đến vị trí muốn dán\n-\tChọn lệnh Paste hoặc bấm Ctrl + V để dán các ô.\nc.\tXóa dữ liệu các ô\n-\tChọn các ô cần xoá dữ liệu\n-\tNhấn nút Clear trên thẻ Home', ] embeddings = model.encode(sentences) print(embeddings.shape) # [3, 768] # Get the similarity scores for the embeddings similarities = model.similarity(embeddings, embeddings) print(similarities.shape) # [3, 3] ``` <!-- ### Direct Usage (Transformers) <details><summary>Click to see the direct usage in Transformers</summary> </details> --> <!-- ### Downstream Usage (Sentence Transformers) You can finetune this model on your own dataset. <details><summary>Click to expand</summary> </details> --> <!-- ### Out-of-Scope Use *List how the model may foreseeably be misused and address what users ought not to do with the model.* --> <!-- ## Bias, Risks and Limitations *What are the known or foreseeable issues stemming from this model? You could also flag here known failure cases or weaknesses of the model.* --> <!-- ### Recommendations *What are recommendations with respect to the foreseeable issues? For example, filtering explicit content.* --> ## Training Details ### Training Dataset #### Unnamed Dataset * Size: 150,000 training samples * Columns: <code>query</code>, <code>pos</code>, and <code>neg</code> * Approximate statistics based on the first 1000 samples: | | query | pos | neg | |:--------|:-----------------------------------------------------------------------------------|:--------------------------------------------------------------------------------------|:-------------------------------------------------------------------------------------| | type | string | string | string | | details | <ul><li>min: 13 tokens</li><li>mean: 16.31 tokens</li><li>max: 18 tokens</li></ul> | <ul><li>min: 209 tokens</li><li>mean: 249.37 tokens</li><li>max: 256 tokens</li></ul> | <ul><li>min: 50 tokens</li><li>mean: 214.27 tokens</li><li>max: 256 tokens</li></ul> | * Samples: | query | pos | neg | |:----------------------------------------------------------------|:------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------|:---------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------| | <code>Tab 'Files' trong Microsoft Teams có chức năng gì?</code> | <code>ội nghị video, Chat nhóm, chia sẻ tài liệu và lịch làm việc, làm cho quá trình giao tiếp và họp tác trở nên thuận tiện hơn. Điểm mạnh của Microsoft Teams nằm ở khả năng tương tác và họp tác liền mạch, cung cấp một không gian làm việc ảo ở đó các nhóm có thể họp và làm việc cùng nhau một cách hiệu quả. Teams hỗ trợ tích họp tốt với các ứng dụng Office 365 khác như Word, Excel và PowerPoint, cũng như nhiều ứng dụng bên thứ ba. Các tính năng bảo mật và quản lý của Teams đảm bảo thông tin doanh nghiệp luôn được bảo vệ. Microsoft Teams đã nhanh chóng trở thành công cụ quan trọng cho nhiều tổ chức, giúp duy trì sự liên kết và hiệu quả trong làm việc trực tuyến.<br>Bước 1: Tải xuống và cài đặt ứng dụng. <br>- Truy cập trang web chính thức của Microsoft Teams tại: https://teams.microsoft.com.<br>- Chọn phiên bản ứng dụng phù họp với hệ điều hành của bạn (Windows, macOS, ios, Android) và tải xuống.<br>- Cài đặt ứng dụng trên thiết bị của bạn, tuân theo các bước hướng dẫn cài đặt.<br>Bước 2: Tạo hoặc sử dụng t...</code> | <code>Công nghệ thông tin đang phát triển mạnh mẽ và trở thành lĩnh vực không thế thiếu trong cuộc sống hiện đại. Thời kỳ hiện nay còn được gọi là thời đại kỹ thuật số, nơi công nghệ luôn thay đổi và phát triển nhanh chóng. Những tiến bộ khoa học công nghệ trong thế kỷ 21 đã tạo ra nhu cầu đào tạo những công dân chúng ta trở thành những công dân số. Công dân số là những người có kỹ năng khai thác, sử dụng Internet và công nghệ một cách an toàn và hiệu quả. Điều này không chỉ đế giải trí mà còn tìm kiếm thông tin, học tập, chia sẻ kiến thức, truyền thông, cũng như tìm hiếu kiến thức và pháp luật.<br>Chương 1 cung cấp kiến thức tống quát về thế giới số, công dân số, các yếu tố và kỹ năng cần thiết với công dân số. Những nội dung về chuyến đổi số, số hóa, chữ ký số, chính phủ số, chỉnh phủ điện tử, văn hóa, đạo đức và pháp luật trong thế giới số. Nội dung chính của chương bao gồm:<br>- Thế giới số;<br>- Công dân số;<br>- Chuyển đổi số;<br>- Chỉnh phủ điện tử và chính phủ số;<br>- Văn hóa, đạo đức và pháp luật tr...</code> | | <code>Tab 'Files' trong Microsoft Teams có chức năng gì?</code> | <code>ội nghị video, Chat nhóm, chia sẻ tài liệu và lịch làm việc, làm cho quá trình giao tiếp và họp tác trở nên thuận tiện hơn. Điểm mạnh của Microsoft Teams nằm ở khả năng tương tác và họp tác liền mạch, cung cấp một không gian làm việc ảo ở đó các nhóm có thể họp và làm việc cùng nhau một cách hiệu quả. Teams hỗ trợ tích họp tốt với các ứng dụng Office 365 khác như Word, Excel và PowerPoint, cũng như nhiều ứng dụng bên thứ ba. Các tính năng bảo mật và quản lý của Teams đảm bảo thông tin doanh nghiệp luôn được bảo vệ. Microsoft Teams đã nhanh chóng trở thành công cụ quan trọng cho nhiều tổ chức, giúp duy trì sự liên kết và hiệu quả trong làm việc trực tuyến.<br>Bước 1: Tải xuống và cài đặt ứng dụng. <br>- Truy cập trang web chính thức của Microsoft Teams tại: https://teams.microsoft.com.<br>- Chọn phiên bản ứng dụng phù họp với hệ điều hành của bạn (Windows, macOS, ios, Android) và tải xuống.<br>- Cài đặt ứng dụng trên thiết bị của bạn, tuân theo các bước hướng dẫn cài đặt.<br>Bước 2: Tạo hoặc sử dụng t...</code> | <code>Thế giới số là một khái niệm mà hiện nay chưa có một định nghĩa rõ ràng cụ thể nào để mô tả chính xác. Trong giáo trình này, chúng tôi trình bày những nội dung cơ bản liên quan đến thế giới số. Thuật ngữ “Thế giới số” được nhắc đến theo giai đoạn phát triển của lĩnh vực công nghệ thông tin, điện tử và viễn thông, đặc biệt từ những năm 1990 khi Internet bùng nổ và công nghệ kỹ thuật số trở nên phố biến. Thế giới số mô tả không gian ảo và môi trường kỹ thuật số mà con người tạo ra thông qua Internet, máy tính, các thiết bị điện tử kết nối với nhau và công nghệ kỹ thuật số để truy cập dữ liệu, thông tin và tương tác giữa con người và công nghệ.<br>Nói ngắn gọn thì thế giới số là một thuật ngữ mô tả sự phát triển không ngừng trong lĩnh vực công nghệ thông tin và công nghệ kỹ thuật số, tạo ra môi trường hỗ trợ con người tương tác thông qua Internet.<br>Thế giới số chính là thời đại thông tin ngày nay, mang lại vô số tiện ích hỗ trợ cho cuộc sống của với các thiết bị công nghệ hiện đại. Nó hỗ trợ ...</code> | | <code>Tab 'Files' trong Microsoft Teams có chức năng gì?</code> | <code>ội nghị video, Chat nhóm, chia sẻ tài liệu và lịch làm việc, làm cho quá trình giao tiếp và họp tác trở nên thuận tiện hơn. Điểm mạnh của Microsoft Teams nằm ở khả năng tương tác và họp tác liền mạch, cung cấp một không gian làm việc ảo ở đó các nhóm có thể họp và làm việc cùng nhau một cách hiệu quả. Teams hỗ trợ tích họp tốt với các ứng dụng Office 365 khác như Word, Excel và PowerPoint, cũng như nhiều ứng dụng bên thứ ba. Các tính năng bảo mật và quản lý của Teams đảm bảo thông tin doanh nghiệp luôn được bảo vệ. Microsoft Teams đã nhanh chóng trở thành công cụ quan trọng cho nhiều tổ chức, giúp duy trì sự liên kết và hiệu quả trong làm việc trực tuyến.<br>Bước 1: Tải xuống và cài đặt ứng dụng. <br>- Truy cập trang web chính thức của Microsoft Teams tại: https://teams.microsoft.com.<br>- Chọn phiên bản ứng dụng phù họp với hệ điều hành của bạn (Windows, macOS, ios, Android) và tải xuống.<br>- Cài đặt ứng dụng trên thiết bị của bạn, tuân theo các bước hướng dẫn cài đặt.<br>Bước 2: Tạo hoặc sử dụng t...</code> | <code> tác với dữ liệu và thông tin. Các ứng dụng này đa dạng từ các ứng dụng di động, phần mềm máy tính đến các ứng dụng trên web hoặc ứng dụng điều khiển thiết bị ứng dụng trong mọi lĩnh vực của đời sống. Ví dụ về sản phấm mà nền tảng cốt yếu dựa trên bộ sưu tập dữ liệu lớn, sử dụng các mô hình học máy để tạo ra ứng dụng ChatGPT và các ứng dụng thông minh mà chúng ta đang sử dụng ngày nay.<br>- Bảo mật và an ninh mạng: Bảo vệ dữ liệu quan trọng, thông tin cá nhân và hệ thống mạng là yếu tố cực kỳ quan trọng khi tham gia vào môi trường thế giới số. Bên cạnh sự bảo vệ của các ứng dụng, phần mềm, hệ thống thiết bị bảo mật thì mỗi cá nhân, tổ chức cũng tự chủ động thực hiện các biện pháp để hạn chế rủi ro khi tham gia môi trường thế giới số. Tuyệt đối tuân thủ theo các quy định pháp luật về bảo mật dữ liệu và sử dụng an toàn.<br>- Kỹ năng số và tương tác: Thế giới số là môi trường kết nối trên toàn thế giới do vậy mỗi cá nhân khi tham gia môi trường thế giới số này cần có các kỹ năng sử dụng, hiểu v...</code> | * Loss: [<code>TripletLoss</code>](https://sbert.net/docs/package_reference/sentence_transformer/losses.html#tripletloss) with these parameters: ```json { "distance_metric": "TripletDistanceMetric.EUCLIDEAN", "triplet_margin": 5 } ``` ### Evaluation Dataset #### train * Dataset: train * Size: 19,740 evaluation samples * Columns: <code>query</code>, <code>pos</code>, and <code>neg</code> * Approximate statistics based on the first 1000 samples: | | query | pos | neg | |:--------|:----------------------------------------------------------------------------------|:--------------------------------------------------------------------------------------|:-------------------------------------------------------------------------------------| | type | string | string | string | | details | <ul><li>min: 9 tokens</li><li>mean: 15.84 tokens</li><li>max: 29 tokens</li></ul> | <ul><li>min: 241 tokens</li><li>mean: 253.88 tokens</li><li>max: 256 tokens</li></ul> | <ul><li>min: 50 tokens</li><li>mean: 214.19 tokens</li><li>max: 256 tokens</li></ul> | * Samples: | query | pos | neg | |:--------------------------------------------------------|:------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------|:---------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------| | <code>Microsoft Word hỗ trợ những tính năng nào?</code> | <code>Nhiều phần mềm soạn thảo văn bản phổ biến giúp người dùng tạo, biên tập và định dạng văn bản một cách tiện lợi có thể kể đến như Microsoft Word, một công cụ trong bộ Office của Microsoft, nổi tiếng với giaọ diện thân thiện và đầy đủ tính năng. Google Docs mang đến sự linh hoạt với khả năng làm việc đồng thời trực tuyến. LibreOffice Writer, với ưu điểm mã nguồn mở, hỗ trợ nhiều định dạng và tích họp nhiều tính năng soạn thảo. Notion không chỉ là ứng dụng ghi chú mà còn giúp quản lý dự án và tài liệu liên quan. Scrivener được thiết kế đặc biệt cho viết sách và sáng tác, với khả năng tổ chức nội dung hiệu quả. Tùy thuộc vào nhu cầu cụ thể, người dùng có thể lựa chọn phần mềm phù họp để tối ưu hóa quá trình làm việc với văn bản.<br>Trong tài liệu này, chúng tôi giới thiệu một công cụ xử lý văn bản mạnh mẽ và phổ biến là Microsoft Word, một phần quan trọng của bộ ứng dụng Microsoft Office. Được thiết kế để đáp ứng nhu cầu đa dạng của người dùng từ cá nhân đến doanh nghiệp, Word không chỉ là mộ...</code> | <code>Công nghệ thông tin đang phát triển mạnh mẽ và trở thành lĩnh vực không thế thiếu trong cuộc sống hiện đại. Thời kỳ hiện nay còn được gọi là thời đại kỹ thuật số, nơi công nghệ luôn thay đổi và phát triển nhanh chóng. Những tiến bộ khoa học công nghệ trong thế kỷ 21 đã tạo ra nhu cầu đào tạo những công dân chúng ta trở thành những công dân số. Công dân số là những người có kỹ năng khai thác, sử dụng Internet và công nghệ một cách an toàn và hiệu quả. Điều này không chỉ đế giải trí mà còn tìm kiếm thông tin, học tập, chia sẻ kiến thức, truyền thông, cũng như tìm hiếu kiến thức và pháp luật.<br>Chương 1 cung cấp kiến thức tống quát về thế giới số, công dân số, các yếu tố và kỹ năng cần thiết với công dân số. Những nội dung về chuyến đổi số, số hóa, chữ ký số, chính phủ số, chỉnh phủ điện tử, văn hóa, đạo đức và pháp luật trong thế giới số. Nội dung chính của chương bao gồm:<br>- Thế giới số;<br>- Công dân số;<br>- Chuyển đổi số;<br>- Chỉnh phủ điện tử và chính phủ số;<br>- Văn hóa, đạo đức và pháp luật tr...</code> | | <code>Microsoft Word hỗ trợ những tính năng nào?</code> | <code>Nhiều phần mềm soạn thảo văn bản phổ biến giúp người dùng tạo, biên tập và định dạng văn bản một cách tiện lợi có thể kể đến như Microsoft Word, một công cụ trong bộ Office của Microsoft, nổi tiếng với giaọ diện thân thiện và đầy đủ tính năng. Google Docs mang đến sự linh hoạt với khả năng làm việc đồng thời trực tuyến. LibreOffice Writer, với ưu điểm mã nguồn mở, hỗ trợ nhiều định dạng và tích họp nhiều tính năng soạn thảo. Notion không chỉ là ứng dụng ghi chú mà còn giúp quản lý dự án và tài liệu liên quan. Scrivener được thiết kế đặc biệt cho viết sách và sáng tác, với khả năng tổ chức nội dung hiệu quả. Tùy thuộc vào nhu cầu cụ thể, người dùng có thể lựa chọn phần mềm phù họp để tối ưu hóa quá trình làm việc với văn bản.<br>Trong tài liệu này, chúng tôi giới thiệu một công cụ xử lý văn bản mạnh mẽ và phổ biến là Microsoft Word, một phần quan trọng của bộ ứng dụng Microsoft Office. Được thiết kế để đáp ứng nhu cầu đa dạng của người dùng từ cá nhân đến doanh nghiệp, Word không chỉ là mộ...</code> | <code>Thế giới số là một khái niệm mà hiện nay chưa có một định nghĩa rõ ràng cụ thể nào để mô tả chính xác. Trong giáo trình này, chúng tôi trình bày những nội dung cơ bản liên quan đến thế giới số. Thuật ngữ “Thế giới số” được nhắc đến theo giai đoạn phát triển của lĩnh vực công nghệ thông tin, điện tử và viễn thông, đặc biệt từ những năm 1990 khi Internet bùng nổ và công nghệ kỹ thuật số trở nên phố biến. Thế giới số mô tả không gian ảo và môi trường kỹ thuật số mà con người tạo ra thông qua Internet, máy tính, các thiết bị điện tử kết nối với nhau và công nghệ kỹ thuật số để truy cập dữ liệu, thông tin và tương tác giữa con người và công nghệ.<br>Nói ngắn gọn thì thế giới số là một thuật ngữ mô tả sự phát triển không ngừng trong lĩnh vực công nghệ thông tin và công nghệ kỹ thuật số, tạo ra môi trường hỗ trợ con người tương tác thông qua Internet.<br>Thế giới số chính là thời đại thông tin ngày nay, mang lại vô số tiện ích hỗ trợ cho cuộc sống của với các thiết bị công nghệ hiện đại. Nó hỗ trợ ...</code> | | <code>Microsoft Word hỗ trợ những tính năng nào?</code> | <code>Nhiều phần mềm soạn thảo văn bản phổ biến giúp người dùng tạo, biên tập và định dạng văn bản một cách tiện lợi có thể kể đến như Microsoft Word, một công cụ trong bộ Office của Microsoft, nổi tiếng với giaọ diện thân thiện và đầy đủ tính năng. Google Docs mang đến sự linh hoạt với khả năng làm việc đồng thời trực tuyến. LibreOffice Writer, với ưu điểm mã nguồn mở, hỗ trợ nhiều định dạng và tích họp nhiều tính năng soạn thảo. Notion không chỉ là ứng dụng ghi chú mà còn giúp quản lý dự án và tài liệu liên quan. Scrivener được thiết kế đặc biệt cho viết sách và sáng tác, với khả năng tổ chức nội dung hiệu quả. Tùy thuộc vào nhu cầu cụ thể, người dùng có thể lựa chọn phần mềm phù họp để tối ưu hóa quá trình làm việc với văn bản.<br>Trong tài liệu này, chúng tôi giới thiệu một công cụ xử lý văn bản mạnh mẽ và phổ biến là Microsoft Word, một phần quan trọng của bộ ứng dụng Microsoft Office. Được thiết kế để đáp ứng nhu cầu đa dạng của người dùng từ cá nhân đến doanh nghiệp, Word không chỉ là mộ...</code> | <code> tác với dữ liệu và thông tin. Các ứng dụng này đa dạng từ các ứng dụng di động, phần mềm máy tính đến các ứng dụng trên web hoặc ứng dụng điều khiển thiết bị ứng dụng trong mọi lĩnh vực của đời sống. Ví dụ về sản phấm mà nền tảng cốt yếu dựa trên bộ sưu tập dữ liệu lớn, sử dụng các mô hình học máy để tạo ra ứng dụng ChatGPT và các ứng dụng thông minh mà chúng ta đang sử dụng ngày nay.<br>- Bảo mật và an ninh mạng: Bảo vệ dữ liệu quan trọng, thông tin cá nhân và hệ thống mạng là yếu tố cực kỳ quan trọng khi tham gia vào môi trường thế giới số. Bên cạnh sự bảo vệ của các ứng dụng, phần mềm, hệ thống thiết bị bảo mật thì mỗi cá nhân, tổ chức cũng tự chủ động thực hiện các biện pháp để hạn chế rủi ro khi tham gia môi trường thế giới số. Tuyệt đối tuân thủ theo các quy định pháp luật về bảo mật dữ liệu và sử dụng an toàn.<br>- Kỹ năng số và tương tác: Thế giới số là môi trường kết nối trên toàn thế giới do vậy mỗi cá nhân khi tham gia môi trường thế giới số này cần có các kỹ năng sử dụng, hiểu v...</code> | * Loss: [<code>TripletLoss</code>](https://sbert.net/docs/package_reference/sentence_transformer/losses.html#tripletloss) with these parameters: ```json { "distance_metric": "TripletDistanceMetric.EUCLIDEAN", "triplet_margin": 5 } ``` ### Training Hyperparameters #### Non-Default Hyperparameters - `eval_strategy`: steps - `per_device_train_batch_size`: 32 - `per_device_eval_batch_size`: 32 - `gradient_accumulation_steps`: 16 - `learning_rate`: 3e-05 - `weight_decay`: 0.01 - `num_train_epochs`: 2 - `warmup_ratio`: 0.05 #### All Hyperparameters <details><summary>Click to expand</summary> - `overwrite_output_dir`: False - `do_predict`: False - `eval_strategy`: steps - `prediction_loss_only`: True - `per_device_train_batch_size`: 32 - `per_device_eval_batch_size`: 32 - `per_gpu_train_batch_size`: None - `per_gpu_eval_batch_size`: None - `gradient_accumulation_steps`: 16 - `eval_accumulation_steps`: None - `torch_empty_cache_steps`: None - `learning_rate`: 3e-05 - `weight_decay`: 0.01 - `adam_beta1`: 0.9 - `adam_beta2`: 0.999 - `adam_epsilon`: 1e-08 - `max_grad_norm`: 1.0 - `num_train_epochs`: 2 - `max_steps`: -1 - `lr_scheduler_type`: linear - `lr_scheduler_kwargs`: {} - `warmup_ratio`: 0.05 - `warmup_steps`: 0 - `log_level`: passive - `log_level_replica`: warning - `log_on_each_node`: True - `logging_nan_inf_filter`: True - `save_safetensors`: True - `save_on_each_node`: False - `save_only_model`: False - `restore_callback_states_from_checkpoint`: False - `no_cuda`: False - `use_cpu`: False - `use_mps_device`: False - `seed`: 42 - `data_seed`: None - `jit_mode_eval`: False - `use_ipex`: False - `bf16`: False - `fp16`: False - `fp16_opt_level`: O1 - `half_precision_backend`: auto - `bf16_full_eval`: False - `fp16_full_eval`: False - `tf32`: None - `local_rank`: 0 - `ddp_backend`: None - `tpu_num_cores`: None - `tpu_metrics_debug`: False - `debug`: [] - `dataloader_drop_last`: False - `dataloader_num_workers`: 0 - `dataloader_prefetch_factor`: None - `past_index`: -1 - `disable_tqdm`: False - `remove_unused_columns`: True - `label_names`: None - `load_best_model_at_end`: False - `ignore_data_skip`: False - `fsdp`: [] - `fsdp_min_num_params`: 0 - `fsdp_config`: {'min_num_params': 0, 'xla': False, 'xla_fsdp_v2': False, 'xla_fsdp_grad_ckpt': False} - `fsdp_transformer_layer_cls_to_wrap`: None - `accelerator_config`: {'split_batches': False, 'dispatch_batches': None, 'even_batches': True, 'use_seedable_sampler': True, 'non_blocking': False, 'gradient_accumulation_kwargs': None} - `deepspeed`: None - `label_smoothing_factor`: 0.0 - `optim`: adamw_torch - `optim_args`: None - `adafactor`: False - `group_by_length`: False - `length_column_name`: length - `ddp_find_unused_parameters`: None - `ddp_bucket_cap_mb`: None - `ddp_broadcast_buffers`: False - `dataloader_pin_memory`: True - `dataloader_persistent_workers`: False - `skip_memory_metrics`: True - `use_legacy_prediction_loop`: False - `push_to_hub`: False - `resume_from_checkpoint`: None - `hub_model_id`: None - `hub_strategy`: every_save - `hub_private_repo`: None - `hub_always_push`: False - `gradient_checkpointing`: False - `gradient_checkpointing_kwargs`: None - `include_inputs_for_metrics`: False - `include_for_metrics`: [] - `eval_do_concat_batches`: True - `fp16_backend`: auto - `push_to_hub_model_id`: None - `push_to_hub_organization`: None - `mp_parameters`: - `auto_find_batch_size`: False - `full_determinism`: False - `torchdynamo`: None - `ray_scope`: last - `ddp_timeout`: 1800 - `torch_compile`: False - `torch_compile_backend`: None - `torch_compile_mode`: None - `dispatch_batches`: None - `split_batches`: None - `include_tokens_per_second`: False - `include_num_input_tokens_seen`: False - `neftune_noise_alpha`: None - `optim_target_modules`: None - `batch_eval_metrics`: False - `eval_on_start`: False - `use_liger_kernel`: False - `eval_use_gather_object`: False - `average_tokens_across_devices`: False - `prompts`: None - `batch_sampler`: batch_sampler - `multi_dataset_batch_sampler`: proportional </details> ### Training Logs | Epoch | Step | Training Loss | train loss | |:------:|:----:|:-------------:|:----------:| | 1.7065 | 500 | 0.158 | 0.2567 | ### Framework Versions - Python: 3.10.12 - Sentence Transformers: 3.3.1 - Transformers: 4.47.0 - PyTorch: 2.5.1+cu121 - Accelerate: 1.2.1 - Datasets: 3.3.1 - Tokenizers: 0.21.0 ## Citation ### BibTeX #### Sentence Transformers ```bibtex @inproceedings{reimers-2019-sentence-bert, title = "Sentence-BERT: Sentence Embeddings using Siamese BERT-Networks", author = "Reimers, Nils and Gurevych, Iryna", booktitle = "Proceedings of the 2019 Conference on Empirical Methods in Natural Language Processing", month = "11", year = "2019", publisher = "Association for Computational Linguistics", url = "https://arxiv.org/abs/1908.10084", } ``` #### TripletLoss ```bibtex @misc{hermans2017defense, title={In Defense of the Triplet Loss for Person Re-Identification}, author={Alexander Hermans and Lucas Beyer and Bastian Leibe}, year={2017}, eprint={1703.07737}, archivePrefix={arXiv}, primaryClass={cs.CV} } ``` <!-- ## Glossary *Clearly define terms in order to be accessible across audiences.* --> <!-- ## Model Card Authors *Lists the people who create the model card, providing recognition and accountability for the detailed work that goes into its construction.* --> <!-- ## Model Card Contact *Provides a way for people who have updates to the Model Card, suggestions, or questions, to contact the Model Card authors.* -->
[ "TEXT_CLASSIFICATION" ]
[ "CHIA" ]
m42-health/med42-70b
m42-health
text-generation
[ "transformers", "pytorch", "llama", "text-generation", "m42", "health", "healthcare", "clinical-llm", "en", "arxiv:2404.14779", "license:other", "autotrain_compatible", "text-generation-inference", "region:us" ]
2023-10-09T10:19:14
2024-07-05T06:02:09
110
172
--- language: - en license: other license_name: med42 pipeline_tag: text-generation tags: - m42 - health - healthcare - clinical-llm extra_gated_heading: Access Med42 on Hugging Face extra_gated_description: This is a form to enable access to Med42 on Hugging Face. Please read the [Med42 License](https://huggingface.co/spaces/m42-health/License) and accept our license terms and acceptable use policy before submitting this form. Requests will be processed by the M42 Team within 2 working days. extra_gated_button_content: Submit extra_gated_fields: Full name: text Country: text Affiliation: text I certify the details provided above are correct and that I have read and agreed to the Med42 License agreement: checkbox inference: false --- # 🚨 **Update: Version 2 of Med42 Released!** 🚨 **Please find the models here:** [Med42-v2-70B](https://huggingface.co/m42-health/Llama3-Med42-70B) and [Med42-v2-8B](https://huggingface.co/m42-health/Llama3-Med42-8B) # **Med42 - Clinical Large Language Model** Med42 is an open-access clinical large language model (LLM) developed by M42 to expand access to medical knowledge. Built off LLaMA-2 and comprising 70 billion parameters, this generative AI system provides high-quality answers to medical questions. ## Model Details *Note: Use of this model is governed by the M42 Health license. In order to download the model weights (and tokenizer), please read the [Med42 License](https://huggingface.co/spaces/m42-health/License) and accept our License by requesting access here.* Beginning with the base LLaMa-2 model, Med42 was instruction-tuned on a dataset of ~250M tokens compiled from different open-access sources, including medical flashcards, exam questions, and open-domain dialogues. **Model Developers:** M42 Health AI Team **Finetuned from model:** Llama-2 - 70B **Context length:** 4k tokens **Input:** Text only data **Output:** Model generates text only **Status:** This is a static model trained on an offline dataset. Future versions of the tuned models will be released as we enhance model's performance. **License:** A custom license is available [here](https://huggingface.co/spaces/m42-health/License) **Research Paper:** [Med42 - Evaluating Fine-Tuning Strategies for Medical LLMs: Full-Parameter vs. Parameter-Efficient Approaches](https://arxiv.org/abs/2404.14779) ## Intended Use Med42 is being made available for further testing and assessment as an AI assistant to enhance clinical decision-making and enhance access to an LLM for healthcare use. Potential use cases include: - Medical question answering - Patient record summarization - Aiding medical diagnosis - General health Q&A To get the expected features and performance for the model, a specific formatting needs to be followed, including the `<|system|>`, `<|prompter|>` and `<|assistant|>` tags. ```python from transformers import AutoModelForCausalLM, AutoTokenizer model_name_or_path = "m42-health/med42-70b" model = AutoModelForCausalLM.from_pretrained(model_name_or_path, device_map="auto") tokenizer = AutoTokenizer.from_pretrained(model_name_or_path) prompt = "What are the symptoms of diabetes ?" prompt_template=f''' <|system|>: You are a helpful medical assistant created by M42 Health in the UAE. <|prompter|>:{prompt} <|assistant|>: ''' input_ids = tokenizer(prompt_template, return_tensors='pt').input_ids.cuda() output = model.generate(inputs=input_ids, temperature=0.7, do_sample=True,eos_token_id=tokenizer.eos_token_id, pad_token_id=tokenizer.pad_token_id, max_new_tokens=512) print(tokenizer.decode(output[0])) ``` ## Hardware and Software The training process was performed on the Condor Galaxy 1 (CG-1) supercomputer platform. ## Evaluation Results Med42 achieves achieves competitive performance on various medical benchmarks, including MedQA, MedMCQA, PubMedQA, HeadQA, and Measuring Massive Multitask Language Understanding (MMLU) clinical topics. For all evaluations reported so far, we use [EleutherAI's evaluation harness library](https://github.com/EleutherAI/lm-evaluation-harness) and report zero-shot accuracies (except otherwise stated). We compare the performance with that reported for other models (ClinicalCamel-70B, GPT-3.5, GPT-4.0, Med-PaLM 2). |Dataset|Med42|ClinicalCamel-70B|GPT-3.5|GPT-4.0|Med-PaLM-2 (5-shot)*| |---|---|---|---|---|---| |MMLU Clinical Knowledge|74.3|69.8|69.8|86.0|88.3| |MMLU College Biology|84.0|79.2|72.2|95.1|94.4| |MMLU College Medicine|68.8|67.0|61.3|76.9|80.9| |MMLU Medical Genetics|86.0|69.0|70.0|91.0|90.0| |MMLU Professional Medicine|79.8|71.3|70.2|93.0|95.2| |MMLU Anatomy|67.4|62.2|56.3|80.0|77.8| |MedMCQA|60.9|47.0|50.1|69.5|71.3| |MedQA|61.5|53.4|50.8|78.9|79.7| |USMLE Self-Assessment|71.7|-|49.1|83.8|-| |USMLE Sample Exam|72.0|54.3|56.9|84.3|-| **We note that 0-shot performance is not reported for Med-PaLM 2. Further details can be found at [https://github.com/m42health/med42](https://github.com/m42health/med42)*. ### Key performance metrics: - Med42 achieves a 72% accuracy on the US Medical Licensing Examination (USMLE) sample exam, surpassing the prior state of the art among openly available medical LLMs. - 61.5% on MedQA dataset (compared to 50.8% for GPT-3.5) - Consistently higher performance on MMLU clinical topics compared to GPT-3.5. ## Limitations & Safe Use - Med42 is not ready for real clinical use. Extensive human evaluation is undergoing as it is required to ensure safety. - Potential for generating incorrect or harmful information. - Risk of perpetuating biases in training data. Use this model responsibly! Do not rely on it for medical usage without rigorous safety testing. ## Accessing Med42 and Reporting Issues Please report any software "bug" or other problems through one of the following means: - Reporting issues with the model: [https://github.com/m42health/med42](https://github.com/m42health/med42) - Reporting risky content generated by the model, bugs and/or any security concerns: [https://forms.office.com/r/YMJu3kcKat](https://forms.office.com/r/YMJu3kcKat) - M42’s privacy policy available at [https://m42.ae/privacy-policy/](https://m42.ae/privacy-policy/) - Reporting violations of the Acceptable Use Policy or unlicensed uses of Med42: <[email protected]> ## Citation Our paper has been published at AAAI 2024 Spring Symposium - Clinical Foundation Models and is available on arXiv: [https://arxiv.org/abs/2404.14779](https://arxiv.org/abs/2404.14779) ``` @article{christophe2024med42, title={Med42 -- Evaluating Fine-Tuning Strategies for Medical LLMs: Full-Parameter vs. Parameter-Efficient Approaches}, author={Clément Christophe and Praveen K Kanithi and Prateek Munjal and Tathagata Raha and Nasir Hayat and Ronnie Rajan and Ahmed Al-Mahrooqi and Avani Gupta and Muhammad Umar Salman and Gurpreet Gosal and Bhargav Kanakiya and Charles Chen and Natalia Vassilieva and Boulbaba Ben Amor and Marco AF Pimentel and Shadab Khan}, year={2024}, eprint={2404.14779}, archivePrefix={arXiv}, primaryClass={cs.CL} } ```
[ "QUESTION_ANSWERING", "SUMMARIZATION" ]
[ "MEDQA", "PUBMEDQA" ]
bsmit1659/Phi-3-mini-128k-instruct-0.2-awq
bsmit1659
text-generation
[ "transformers", "safetensors", "phi3", "text-generation", "nlp", "code", "conversational", "custom_code", "en", "license:mit", "autotrain_compatible", "text-generation-inference", "endpoints_compatible", "4-bit", "awq", "region:us" ]
2024-07-03T21:53:01
2024-07-03T23:26:03
109
0
--- language: - en license: mit license_link: https://huggingface.co/microsoft/Phi-3-mini-128k-instruct/resolve/main/LICENSE pipeline_tag: text-generation tags: - nlp - code widget: - messages: - role: user content: Can you provide ways to eat combinations of bananas and dragonfruits? --- # Phi-3-mini-128k-instruct V0.2 - AWQ - Model creator: [Microsoft](https://huggingface.co/microsoft) - Original model: [Phi-3-mini-128k-instruct](https://huggingface.co/microsoft/Phi-3-mini-128k-instruct) <!-- description start --> ## Description This repo contains AWQ model files for the recently released upgrade of [Phi-3-mini-128k-instruct](https://huggingface.co/microsoft/Phi-3-mini-128k-instruct). <!-- README_AWQ.md-provided-files start --> ## AWQ parameters Models are released as sharded safetensors files. | Bits | GS | AWQ Dataset | Seq Len | Size | | ---- | -- | ----------- | ------- | ---- | | 4 | 128 | [pile-val-backup](mit-han-lab/pile-val-backup) | 128000 | 2.28 GB <!-- README_AWQ.md-provided-files end --> ## Model Summary The Phi-3-Mini-128K-Instruct is a 3.8 billion-parameter, lightweight, state-of-the-art open model trained using the Phi-3 datasets. This dataset includes both synthetic data and filtered publicly available website data, with an emphasis on high-quality and reasoning-dense properties. The model belongs to the Phi-3 family with the Mini version in two variants [4K](https://huggingface.co/microsoft/Phi-3-mini-4k-instruct) and [128K](https://huggingface.co/microsoft/Phi-3-mini-128k-instruct) which is the context length (in tokens) that it can support. After initial training, the model underwent a post-training process that involved supervised fine-tuning and direct preference optimization to enhance its ability to follow instructions and adhere to safety measures. When evaluated against benchmarks that test common sense, language understanding, mathematics, coding, long-term context, and logical reasoning, the Phi-3 Mini-128K-Instruct demonstrated robust and state-of-the-art performance among models with fewer than 13 billion parameters. Resources and Technical Documentation: 🏡 [Phi-3 Portal](https://azure.microsoft.com/en-us/products/phi-3) <br> 📰 [Phi-3 Microsoft Blog](https://aka.ms/Phi-3Build2024) <br> 📖 [Phi-3 Technical Report](https://aka.ms/phi3-tech-report) <br> 🛠️ [Phi-3 on Azure AI Studio](https://aka.ms/phi3-azure-ai) <br> 👩‍🍳 [Phi-3 Cookbook](https://github.com/microsoft/Phi-3CookBook) <br> 🖥️ [Try It](https://aka.ms/try-phi3) | | Short Context | Long Context | | :- | :- | :- | | Mini | 4K [[HF]](https://huggingface.co/microsoft/Phi-3-mini-4k-instruct) ; [[ONNX]](https://huggingface.co/microsoft/Phi-3-mini-4k-instruct-onnx) ; [[GGUF]](https://huggingface.co/microsoft/Phi-3-mini-4k-instruct-gguf) | 128K [[HF]](https://huggingface.co/microsoft/Phi-3-mini-128k-instruct) ; [[ONNX]](https://huggingface.co/microsoft/Phi-3-mini-128k-instruct-onnx)| | Small | 8K [[HF]](https://huggingface.co/microsoft/Phi-3-small-8k-instruct) ; [[ONNX]](https://huggingface.co/microsoft/Phi-3-small-8k-instruct-onnx-cuda) | 128K [[HF]](https://huggingface.co/microsoft/Phi-3-small-128k-instruct) ; [[ONNX]](https://huggingface.co/microsoft/Phi-3-small-128k-instruct-onnx-cuda)| | Medium | 4K [[HF]](https://huggingface.co/microsoft/Phi-3-medium-4k-instruct) ; [[ONNX]](https://huggingface.co/microsoft/Phi-3-medium-4k-instruct-onnx-cuda) | 128K [[HF]](https://huggingface.co/microsoft/Phi-3-medium-128k-instruct) ; [[ONNX]](https://huggingface.co/microsoft/Phi-3-medium-128k-instruct-onnx-cuda)| | Vision | | 128K [[HF]](https://huggingface.co/microsoft/Phi-3-vision-128k-instruct) ; [[ONNX]](https://huggingface.co/microsoft/Phi-3-vision-128k-instruct-onnx-cuda)| ## Intended Uses **Primary use cases** The model is intended for commercial and research use in English. The model provides uses for applications which require: 1) Memory/compute constrained environments 2) Latency bound scenarios 3) Strong reasoning (especially code, math and logic) Our model is designed to accelerate research on language and multimodal models, for use as a building block for generative AI powered features. **Use case considerations** Our models are not specifically designed or evaluated for all downstream purposes. Developers should consider common limitations of language models as they select use cases, and evaluate and mitigate for accuracy, safety, and fariness before using within a specific downstream use case, particularly for high risk scenarios. Developers should be aware of and adhere to applicable laws or regulations (including privacy, trade compliance laws, etc.) that are relevant to their use case. Nothing contained in this Model Card should be interpreted as or deemed a restriction or modification to the license the model is released under. ## Release Notes This is an update over the original instruction-tuned Phi-3-mini release based on valuable customer feedback. The model used additional post-training data leading to substantial gains on long-context understanding, instruction following, and structure output. We also improve multi-turn conversation quality, explicitly support <|system|> tag, and significantly improve reasoning capability. We believe most use cases will benefit from this release, but we encourage users to test in their particular AI applications. We appreciate the enthusiastic adoption of the Phi-3 model family, and continue to welcome all feedback from the community. These tables below highlights improvements on instruction following, structure output, reasoning, and long-context understanding of the new release on our public and internal benchmark datasets. | Benchmarks | Original | June 2024 Update | | :- | :- | :- | | Instruction Extra Hard | 5.7 | 5.9 | | Instruction Hard | 5.0 | 5.2 | | JSON Structure Output | 1.9 | 60.1 | | XML Structure Output | 47.8 | 52.9 | | GPQA | 25.9 | 29.7 | | MMLU | 68.1 | 69.7 | | **Average** | **25.7** | **37.3** | RULER: a retrieval-based benchmark for long context understanding | Model | 4K | 8K | 16K | 32K | 64K | 128K | Average | | :-------------------| :------| :------| :------| :------| :------| :------| :---------| | Original | 86.7 | 78.1 | 75.6 | 70.3 | 58.9 | 43.3 | **68.8** | | June 2024 Update | 92.4 | 91.1 | 90.8 | 87.9 | 79.8 | 65.6 | **84.6** | RepoQA: a benchmark for long context code understanding | Model | Python | C++ | Rust | Java | TypeScript | Average | | :-------------------| :--------| :-----| :------| :------| :------------| :---------| | Original | 27 | 29 | 40 | 33 | 33 | **32.4** | | June 2024 Update | 85 | 63 | 72 | 93 | 72 | **77** | Notes: if users would like to check out the previous version, use the git commit id **bb5bf1e4001277a606e11debca0ef80323e5f824**. For the model conversion, e.g. GGUF and other formats, we invite the community to experiment with various approaches and share your valuable feedback. Let's innovate together! ## How to Use Phi-3 Mini-128K-Instruct has been integrated in the development version (4.41.3) of `transformers`. Until the official version is released through `pip`, ensure that you are doing one of the following: * When loading the model, ensure that `trust_remote_code=True` is passed as an argument of the `from_pretrained()` function. * Update your local `transformers` to the development version: `pip uninstall -y transformers && pip install git+https://github.com/huggingface/transformers`. The previous command is an alternative to cloning and installing from the source. The current `transformers` version can be verified with: `pip list | grep transformers`. Examples of required packages: ``` flash_attn==2.5.8 torch==2.3.1 accelerate==0.31.0 transformers==4.41.2 ``` Phi-3 Mini-128K-Instruct is also available in [Azure AI Studio](https://aka.ms/try-phi3) ### Tokenizer Phi-3 Mini-128K-Instruct supports a vocabulary size of up to `32064` tokens. The [tokenizer files](https://huggingface.co/microsoft/Phi-3-mini-128k-instruct/blob/main/added_tokens.json) already provide placeholder tokens that can be used for downstream fine-tuning, but they can also be extended up to the model's vocabulary size. ### Chat Format Given the nature of the training data, the Phi-3 Mini-128K-Instruct model is best suited for prompts using the chat format as follows. You can provide the prompt as a question with a generic template as follow: ```markdown <|system|> You are a helpful assistant.<|end|> <|user|> Question?<|end|> <|assistant|> ``` For example: ```markdown <|system|> You are a helpful assistant.<|end|> <|user|> How to explain Internet for a medieval knight?<|end|> <|assistant|> ``` where the model generates the text after `<|assistant|>` . In case of few-shots prompt, the prompt can be formatted as the following: ```markdown <|system|> You are a helpful travel assistant.<|end|> <|user|> I am going to Paris, what should I see?<|end|> <|assistant|> Paris, the capital of France, is known for its stunning architecture, art museums, historical landmarks, and romantic atmosphere. Here are some of the top attractions to see in Paris:\n\n1. The Eiffel Tower: The iconic Eiffel Tower is one of the most recognizable landmarks in the world and offers breathtaking views of the city.\n2. The Louvre Museum: The Louvre is one of the world's largest and most famous museums, housing an impressive collection of art and artifacts, including the Mona Lisa.\n3. Notre-Dame Cathedral: This beautiful cathedral is one of the most famous landmarks in Paris and is known for its Gothic architecture and stunning stained glass windows.\n\nThese are just a few of the many attractions that Paris has to offer. With so much to see and do, it's no wonder that Paris is one of the most popular tourist destinations in the world."<|end|> <|user|> What is so great about #1?<|end|> <|assistant|> ``` ### Sample inference code This code snippets show how to get quickly started with running the model on a GPU: ```python import torch from transformers import AutoModelForCausalLM, AutoTokenizer, pipeline torch.random.manual_seed(0) model = AutoModelForCausalLM.from_pretrained( "microsoft/Phi-3-mini-128k-instruct", device_map="cuda", torch_dtype="auto", trust_remote_code=True, ) tokenizer = AutoTokenizer.from_pretrained("microsoft/Phi-3-mini-128k-instruct") messages = [ {"role": "system", "content": "You are a helpful AI assistant."}, {"role": "user", "content": "Can you provide ways to eat combinations of bananas and dragonfruits?"}, {"role": "assistant", "content": "Sure! Here are some ways to eat bananas and dragonfruits together: 1. Banana and dragonfruit smoothie: Blend bananas and dragonfruits together with some milk and honey. 2. Banana and dragonfruit salad: Mix sliced bananas and dragonfruits together with some lemon juice and honey."}, {"role": "user", "content": "What about solving an 2x + 3 = 7 equation?"}, ] pipe = pipeline( "text-generation", model=model, tokenizer=tokenizer, ) generation_args = { "max_new_tokens": 500, "return_full_text": False, "temperature": 0.0, "do_sample": False, } output = pipe(messages, **generation_args) print(output[0]['generated_text']) ``` Notes: If you want to use flash attention, call _AutoModelForCausalLM.from_pretrained()_ with _attn_implementation="flash_attention_2"_ ## Responsible AI Considerations Like other language models, the Phi series models can potentially behave in ways that are unfair, unreliable, or offensive. Some of the limiting behaviors to be aware of include: + Quality of Service: the Phi models are trained primarily on English text. Languages other than English will experience worse performance. English language varieties with less representation in the training data might experience worse performance than standard American English. + Representation of Harms & Perpetuation of Stereotypes: These models can over- or under-represent groups of people, erase representation of some groups, or reinforce demeaning or negative stereotypes. Despite safety post-training, these limitations may still be present due to differing levels of representation of different groups or prevalence of examples of negative stereotypes in training data that reflect real-world patterns and societal biases. + Inappropriate or Offensive Content: these models may produce other types of inappropriate or offensive content, which may make it inappropriate to deploy for sensitive contexts without additional mitigations that are specific to the use case. + Information Reliability: Language models can generate nonsensical content or fabricate content that might sound reasonable but is inaccurate or outdated. + Limited Scope for Code: Majority of Phi-3 training data is based in Python and use common packages such as "typing, math, random, collections, datetime, itertools". If the model generates Python scripts that utilize other packages or scripts in other languages, we strongly recommend users manually verify all API uses. Developers should apply responsible AI best practices and are responsible for ensuring that a specific use case complies with relevant laws and regulations (e.g. privacy, trade, etc.). Important areas for consideration include: + Allocation: Models may not be suitable for scenarios that could have consequential impact on legal status or the allocation of resources or life opportunities (ex: housing, employment, credit, etc.) without further assessments and additional debiasing techniques. + High-Risk Scenarios: Developers should assess suitability of using models in high-risk scenarios where unfair, unreliable or offensive outputs might be extremely costly or lead to harm. This includes providing advice in sensitive or expert domains where accuracy and reliability are critical (ex: legal or health advice). Additional safeguards should be implemented at the application level according to the deployment context. + Misinformation: Models may produce inaccurate information. Developers should follow transparency best practices and inform end-users they are interacting with an AI system. At the application level, developers can build feedback mechanisms and pipelines to ground responses in use-case specific, contextual information, a technique known as Retrieval Augmented Generation (RAG). + Generation of Harmful Content: Developers should assess outputs for their context and use available safety classifiers or custom solutions appropriate for their use case. + Misuse: Other forms of misuse such as fraud, spam, or malware production may be possible, and developers should ensure that their applications do not violate applicable laws and regulations. ## Training ### Model * Architecture: Phi-3 Mini-128K-Instruct has 3.8B parameters and is a dense decoder-only Transformer model. The model is fine-tuned with Supervised fine-tuning (SFT) and Direct Preference Optimization (DPO) to ensure alignment with human preferences and safety guidlines. * Inputs: Text. It is best suited for prompts using chat format. * Context length: 128K tokens * GPUs: 512 H100-80G * Training time: 10 days * Training data: 4.9T tokens * Outputs: Generated text in response to the input * Dates: Our models were trained between May and June 2024 * Status: This is a static model trained on an offline dataset with cutoff date October 2023. Future versions of the tuned models may be released as we improve models. * Release dates: June, 2024. ### Datasets Our training data includes a wide variety of sources, totaling 4.9 trillion tokens, and is a combination of 1) Publicly available documents filtered rigorously for quality, selected high-quality educational data, and code; 2) Newly created synthetic, “textbook-like” data for the purpose of teaching math, coding, common sense reasoning, general knowledge of the world (science, daily activities, theory of mind, etc.); 3) High quality chat format supervised data covering various topics to reflect human preferences on different aspects such as instruct-following, truthfulness, honesty and helpfulness. We are focusing on the quality of data that could potentially improve the reasoning ability for the model, and we filter the publicly available documents to contain the correct level of knowledge. As an example, the result of a game in premier league in a particular day might be good training data for frontier models, but we need to remove such information to leave more model capacity for reasoning for the small size models. More details about data can be found in the [Phi-3 Technical Report](https://aka.ms/phi3-tech-report). ### Fine-tuning A basic example of multi-GPUs supervised fine-tuning (SFT) with TRL and Accelerate modules is provided [here](https://huggingface.co/microsoft/Phi-3-mini-128k-instruct/resolve/main/sample_finetune.py). ## Benchmarks We report the results under completion format for Phi-3-Mini-128K-Instruct on standard open-source benchmarks measuring the model's reasoning ability (both common sense reasoning and logical reasoning). We compare to Mistral-7b-v0.1, Mixtral-8x7b, Gemma 7B, Llama-3-8B-Instruct, and GPT-3.5. All the reported numbers are produced with the exact same pipeline to ensure that the numbers are comparable. These numbers might differ from other published numbers due to slightly different choices in the evaluation. As is now standard, we use few-shot prompts to evaluate the models, at temperature 0. The prompts and number of shots are part of a Microsoft internal tool to evaluate language models, and in particular we did no optimization to the pipeline for Phi-3. More specifically, we do not change prompts, pick different few-shot examples, change prompt format, or do any other form of optimization for the model. The number of k–shot examples is listed per-benchmark. | Category | Benchmark | Phi-3-Mini-128K-Ins | Gemma-7B | Mistral-7B | Mixtral-8x7B | Llama-3-8B-Ins | GPT3.5-Turbo-1106 | | :----------| :-----------| :---------------------| :----------| :------------| :--------------| :----------------| :-------------------| | Popular aggregated benchmark | AGI Eval <br>5-shot| 39.5 | 42.1 | 35.1 | 45.2 | 42 | 48.4 | | | MMLU <br>5-shot | 69.7 | 63.6 | 61.7 | 70.5 | 66.5 | 71.4 | | | BigBench Hard <br>3-shot | 72.1 | 59.6 | 57.3 | 69.7 | 51.5 | 68.3 | | Language Understanding | ANLI <br>7-shot | 52.3 | 48.7 | 47.1 | 55.2 | 57.3 | 58.1 | | | HellaSwag <br>5-shot | 70.5 | 49.8 | 58.5 | 70.4 | 71.1 | 78.8 | | Reasoning | ARC Challenge <br>10-shot | 85.5 | 78.3 | 78.6 | 87.3 | 82.8 | 87.4 | | | BoolQ <br>0-shot | 77.1 | 66 | 72.2 | 76.6 | 80.9 | 79.1 | | | MedQA <br>2-shot | 56.4 | 49.6 | 50 | 62.2 | 60.5 | 63.4 | | | OpenBookQA <br>10-shot | 78.8 | 78.6 | 79.8 | 85.8 | 82.6 | 86 | | | PIQA <br>5-shot | 80.1 | 78.1 | 77.7 | 86 | 75.7 | 86.6 | | | GPQA <br>0-shot | 29.7 | 2.9 | 15 | 6.9 | 32.4 | 29.9 | | | Social IQA <br>5-shot | 74.7 | 65.5 | 74.6 | 75.9 | 73.9 | 68.3 | | | TruthfulQA (MC2) <br>10-shot | 64.8 | 52.1 | 53 | 60.1 | 63.2 | 67.7 | | | WinoGrande <br>5-shot | 71.0 | 55.6 | 54.2 | 62 | 65 | 68.8 | | Factual Knowledge | TriviaQA <br>5-shot | 57.8 | 72.3 | 75.2 | 82.2 | 67.7 | 85.8 | | Math | GSM8K CoTT <br>8-shot | 85.3 | 59.8 | 46.4 | 64.7 | 77.4 | 78.1 | | Code Generation | HumanEval <br>0-shot | 60.4 | 34.1 | 28.0 | 37.8 | 60.4 | 62.2 | | | MBPP <br>3-shot | 70.0 | 51.5 | 50.8 | 60.2 | 67.7 | 77.8 | | **Average** | | **66.4** | **56.0** | **56.4** | **64.4** | **65.5** | **70.3** | **Long Context**: Phi-3 Mini-128K-Instruct supports 128K context length, therefore the model is capable of several long context tasks including long document/meeting summarization, long document QA. | Benchmark | Phi-3 Mini-128K-Instruct | Mistral-7B | Mixtral 8x7B | LLaMA-3-8B-Instruct | | :---------------| :--------------------------|:------------|:--------------|:---------------------| | GovReport | 25.3 | 4.9 | 20.3 | 10.3 | | QMSum | 21.9 | 15.5 | 20.6 | 2.9 | | Qasper | 41.6 | 23.5 | 26.6 | 8.1 | | SQuALITY | 24.1 | 14.7 | 16.2 | 25 | | SummScreenFD | 16.8 | 9.3 | 11.3 | 5.1 | | **Average** | **25.9** | **13.6** | **19.0** | **10.3** | We take a closer look at different categories across 100 public benchmark datasets at the table below: | Category | Phi-3-Mini-128K-Instruct | Gemma-7B | Mistral-7B | Mixtral 8x7B | Llama-3-8B-Instruct | GPT-3.5-Turbo | |:----------|:--------------------------|:----------|:------------|:--------------|:---------------------|:---------------| | Popular aggregated benchmark | 60.6 | 59.4 | 56.5 | 66.2 | 59.9 | 67.0 | | Reasoning | 69.4 | 60.3 | 62.8 | 68.1 | 69.6 | 71.7 | | Language understanding | 57.5 | 57.6 | 52.5 | 66.1 | 63.2 | 67.7 | | Code generation | 61.0 | 45.6 | 42.9 | 52.7 | 56.4 | 70.4 | | Math | 51.6 | 35.8 | 25.4 | 40.3 | 41.1 | 52.8 | | Factual knowledge | 35.8 | 46.7 | 49.8 | 58.6 | 43.1 | 63.4 | | Multilingual | 56.4 | 66.5 | 57.4 | 66.7 | 66.6 | 71.0 | | Robustness | 61.1 | 38.4 | 40.6 | 51.0 | 64.5 | 69.3 | Overall, the model with only 3.8B-param achieves a similar level of language understanding and reasoning ability as much larger models. However, it is still fundamentally limited by its size for certain tasks. The model simply does not have the capacity to store too much world knowledge, which can be seen for example with low performance on TriviaQA. However, we believe such weakness can be resolved by augmenting Phi-3-Mini with a search engine. ## Cross Platform Support [ONNX runtime](https://onnxruntime.ai/blogs/accelerating-phi-3) now supports Phi-3 mini models across platforms and hardware. Optimized phi-3 models are also published here in ONNX format, to run with ONNX Runtime on CPU and GPU across devices, including server platforms, Windows, Linux and Mac desktops, and mobile CPUs, with the precision best suited to each of these targets. DirectML GPU acceleration is supported for Windows desktops GPUs (AMD, Intel, and NVIDIA). Along with DML, ONNX Runtime provides cross platform support for Phi3 mini across a range of devices CPU, GPU, and mobile. Here are some of the optimized configurations we have added: 1. ONNX models for int4 DML: Quantized to int4 via AWQ 2. ONNX model for fp16 CUDA 3. ONNX model for int4 CUDA: Quantized to int4 via RTN 4. ONNX model for int4 CPU and Mobile: Quantized to int4 via RTN ## Software * [PyTorch](https://github.com/pytorch/pytorch) * [Transformers](https://github.com/huggingface/transformers) * [Flash-Attention](https://github.com/HazyResearch/flash-attention) ## Hardware Note that by default, the Phi-3 Mini-128K-Instruct model uses flash attention, which requires certain types of GPU hardware to run. We have tested on the following GPU types: * NVIDIA A100 * NVIDIA A6000 * NVIDIA H100 If you want to run the model on: * NVIDIA V100 or earlier generation GPUs: call AutoModelForCausalLM.from_pretrained() with attn_implementation="eager" * Optimized inference on GPU, CPU, and Mobile: use the **ONNX** models [128K](https://aka.ms/phi3-mini-128k-instruct-onnx) ## License The model is licensed under the [MIT license](https://huggingface.co/microsoft/Phi-3-mini-128k/resolve/main/LICENSE). ## Trademarks This project may contain trademarks or logos for projects, products, or services. Authorized use of Microsoft trademarks or logos is subject to and must follow [Microsoft’s Trademark & Brand Guidelines](https://www.microsoft.com/en-us/legal/intellectualproperty/trademarks). Use of Microsoft trademarks or logos in modified versions of this project must not cause confusion or imply Microsoft sponsorship. Any use of third-party trademarks or logos are subject to those third-party’s policies.
[ "SUMMARIZATION" ]
[ "MEDQA" ]
mav23/Llama3-OpenBioLLM-70B-GGUF
mav23
null
[ "gguf", "llama-3", "llama", "Mixtral", "instruct", "finetune", "chatml", "DPO", "RLHF", "gpt4", "distillation", "heathcare", "medical", "clinical", "med", "lifescience", "Pharmaceutical", "Pharma", "en", "arxiv:2305.18290", "arxiv:2303.13375", "arxiv:2212.13138", "arxiv:2305.09617", "arxiv:2402.07023", "base_model:meta-llama/Meta-Llama-3-70B-Instruct", "base_model:quantized:meta-llama/Meta-Llama-3-70B-Instruct", "license:llama3", "endpoints_compatible", "region:us", "conversational" ]
2024-10-09T09:13:34
2024-10-09T15:34:27
109
0
--- base_model: meta-llama/Meta-Llama-3-70B-Instruct language: - en license: llama3 tags: - llama-3 - llama - Mixtral - instruct - finetune - chatml - DPO - RLHF - gpt4 - distillation - heathcare - medical - clinical - med - lifescience - Pharmaceutical - Pharma widget: - example_title: OpenBioLLM-70B messages: - role: system content: You are an expert and experienced from the healthcare and biomedical domain with extensive medical knowledge and practical experience. - role: user content: How long does it take for newborn jaundice to go away? output: text: 'Newborn jaundice, also known as neonatal jaundice, is a common condition in newborns where the yellowing of the skin and eyes occurs due to an elevated level of bilirubin in the blood. Bilirubin is a yellow pigment that forms when red blood cells break down. In most cases, newborn jaundice resolves on its own without any specific treatment. The duration of newborn jaundice can vary depending on several factors such as the underlying cause, gestational age at birth, and individual variations in bilirubin metabolism. Here are some general guidelines: 1. Physiological jaundice: This is the most common type of newborn jaundice and usually appears within 24-72 hours after birth. It tends to peak between the second and fifth day of life and gradually improves over the next week or two. By the time the baby is one week old, the jaundice should have mostly resolved. 2. Breast milk jaundice: This type of jaundice occurs in breastfed babies and may appear later than physiological jaundice, typically between the fifth and fourteenth day of life. It tends to persist for a longer duration but usually resolves within six weeks after birth. 3. Pathological jaundice: This type of jaundice is less common and occurs due to an underlying medical condition that affects bilirubin metabolism or liver function. The duration of pathological jaundice depends on the specific cause and may require treatment. It''s important for parents to monitor their newborn''s jaundice closely and seek medical advice if the jaundice progresses rapidly, becomes severe, or is accompanied by other symptoms such as poor feeding, lethargy, or excessive sleepiness. In these cases, further evaluation and management may be necessary. Remember that each baby is unique, and the timing of jaundice resolution can vary. If you have concerns about your newborn''s jaundice, it''s always best to consult with a healthcare professional for personalized advice and guidance.' model-index: - name: OpenBioLLM-70B results: [] --- <div align="center"> <img width="260px" src="https://cdn-uploads.huggingface.co/production/uploads/5f3fe13d79c1ba4c353d0c19/BrQCb95lmEIFz79QAmoNA.png"></div> ![image/png](https://cdn-uploads.huggingface.co/production/uploads/5f3fe13d79c1ba4c353d0c19/fJIOPJnY6Ff6fUiSIuMEt.png) <div align="center"> <h1>Advancing Open-source Large Language Models in Medical Domain</h1> </div> <p align="center" style="margin-top: 0px;"> <a href="https://colab.research.google.com/drive/1F5oV20InEYeAJGmBwYF9NM_QhLmjBkKJ?usp=sharing"> <img src="https://colab.research.google.com/assets/colab-badge.svg" alt="OpenChat Logo" style="width:20px; vertical-align: middle; display: inline-block; margin-right: 5px; margin-left: 10px; margin-top: 0px; margin-bottom: 0px;"/> <span class="link-text" style=" margin-right: 5px;">Online Demo</span> </a> | <a href="https://github.com/openlifescience-ai"> <img src="https://github.githubassets.com/assets/GitHub-Mark-ea2971cee799.png" alt="GitHub Logo" style="width:20px; vertical-align: middle; display: inline-block; margin-right: 5px; margin-left: 5px; margin-top: 0px; margin-bottom: 0px;"/> <span class="link-text" style=" margin-right: 5px;">GitHub</span> </a> | <a href="#"> <img src="https://github.com/alpayariyak/openchat/blob/master/assets/arxiv-logomark-small-square-border.png?raw=true" alt="ArXiv Logo" style="width:20px; vertical-align: middle; display: inline-block; margin-right: 5px; margin-left: 5px; margin-top: 0px; margin-bottom: 0px;"/> <span class="link-text" style="margin-right: 5px;">Paper</span> </a> | <a href="https://discord.gg/A5Fjf5zC69"> <img src="https://cloud.githubusercontent.com/assets/6291467/26705903/96c2d66e-477c-11e7-9f4e-f3c0efe96c9a.png" alt="Discord Logo" style="width:20px; vertical-align: middle; display: inline-block; margin-right: 5px; margin-left: 5px; margin-top: 0px; margin-bottom: 0px;"/> <span class="link-text">Discord</span> </a> </p> ![image/jpeg](https://cdn-uploads.huggingface.co/production/uploads/5f3fe13d79c1ba4c353d0c19/KGmRE5w2sepNtwsEu8t7K.jpeg) Introducing OpenBioLLM-70B: A State-of-the-Art Open Source Biomedical Large Language Model OpenBioLLM-70B is an advanced open source language model designed specifically for the biomedical domain. Developed by Saama AI Labs, this model leverages cutting-edge techniques to achieve state-of-the-art performance on a wide range of biomedical tasks. 🏥 **Biomedical Specialization**: OpenBioLLM-70B is tailored for the unique language and knowledge requirements of the medical and life sciences fields. It was fine-tuned on a vast corpus of high-quality biomedical data, enabling it to understand and generate text with domain-specific accuracy and fluency. 🎓 **Superior Performance**: With 70 billion parameters, OpenBioLLM-70B outperforms other open source biomedical language models of similar scale. It has also demonstrated better results compared to larger proprietary & open-source models like GPT-4, Gemini, Meditron-70B, Med-PaLM-1 & Med-PaLM-2 on biomedical benchmarks. 🧠 **Advanced Training Techniques**: OpenBioLLM-70B builds upon the powerful foundations of the **Meta-Llama-3-70B-Instruct** and [Meta-Llama-3-70B-Instruct](meta-llama/Meta-Llama-3-70B-Instruct) models. It incorporates the DPO dataset and fine-tuning recipe along with a custom diverse medical instruction dataset. Key components of the training pipeline include: <div align="center"> <img width="1200px" src="https://cdn-uploads.huggingface.co/production/uploads/5f3fe13d79c1ba4c353d0c19/oPchsJsEpQoGcGXVbh7YS.png"> </div> - **Policy Optimization**: [Direct Preference Optimization: Your Language Model is Secretly a Reward Model (DPO)](https://arxiv.org/abs/2305.18290) - **Fine-tuning dataset**: Custom Medical Instruct dataset (We plan to release a sample training dataset in our upcoming paper; please stay updated) This combination of cutting-edge techniques enables OpenBioLLM-70B to align with key capabilities and preferences for biomedical applications. ⚙️ **Release Details**: - **Model Size**: 70 billion parameters - **Quantization**: Optimized quantized versions available [Here](https://huggingface.co/aaditya/OpenBioLLM-70B-GGUF) - **Language(s) (NLP):** en - **Developed By**: [Ankit Pal (Aaditya Ura)](https://aadityaura.github.io/) from Saama AI Labs - **License:** Meta-Llama License - **Fine-tuned from models:** [Meta-Llama-3-70B-Instruct](meta-llama/Meta-Llama-3-70B-Instruct) - **Resources for more information:** - Paper: Coming soon The model can be fine-tuned for more specialized tasks and datasets as needed. OpenBioLLM-70B represents an important step forward in democratizing advanced language AI for the biomedical community. By leveraging state-of-the-art architectures and training techniques from leading open source efforts like Llama-3, we have created a powerful tool to accelerate innovation and discovery in healthcare and the life sciences. We are excited to share OpenBioLLM-70B with researchers and developers around the world. ### Use with transformers **Important: Please use the exact chat template provided by Llama-3 instruct version. Otherwise there will be a degradation in the performance. The model output can be verbose in rare cases. Please consider setting temperature = 0 to make this happen less.** See the snippet below for usage with Transformers: ```python import transformers import torch model_id = "aaditya/OpenBioLLM-Llama3-70B" pipeline = transformers.pipeline( "text-generation", model=model_id, model_kwargs={"torch_dtype": torch.bfloat16}, device="auto", ) messages = [ {"role": "system", "content": "You are an expert and experienced from the healthcare and biomedical domain with extensive medical knowledge and practical experience. Your name is OpenBioLLM, and you were developed by Saama AI Labs. who's willing to help answer the user's query with explanation. In your explanation, leverage your deep medical expertise such as relevant anatomical structures, physiological processes, diagnostic criteria, treatment guidelines, or other pertinent medical concepts. Use precise medical terminology while still aiming to make the explanation clear and accessible to a general audience."}, {"role": "user", "content": "How can i split a 3mg or 4mg waefin pill so i can get a 2.5mg pill?"}, ] prompt = pipeline.tokenizer.apply_chat_template( messages, tokenize=False, add_generation_prompt=True ) terminators = [ pipeline.tokenizer.eos_token_id, pipeline.tokenizer.convert_tokens_to_ids("<|eot_id|>") ] outputs = pipeline( prompt, max_new_tokens=256, eos_token_id=terminators, do_sample=True, temperature=0.0, top_p=0.9, ) print(outputs[0]["generated_text"][len(prompt):]) ``` ## **Training procedure** ### **Training hyperparameters** <details> <summary>Click to see details</summary> - learning_rate: 0.0002 - lr_scheduler: cosine - train_batch_size: 12 - eval_batch_size: 8 - GPU: H100 80GB SXM5 - num_devices: 8 - optimizer: adamw_bnb_8bit - lr_scheduler_warmup_steps: 100 - num_epochs: 4 </details> ### **Peft hyperparameters** <details> <summary>Click to see details</summary> - adapter: qlora - lora_r: 128 - lora_alpha: 256 - lora_dropout: 0.05 - lora_target_linear: true -lora_target_modules: - q_proj - v_proj - k_proj - o_proj - gate_proj - down_proj - up_proj </details> ### **Training results** ### **Framework versions** - Transformers 4.39.3 - Pytorch 2.1.2+cu121 - Datasets 2.18.0 - Tokenizers 0.15.1 - Axolotl - Lm harness for evaluation # Benchmark Results 🔥 OpenBioLLM-70B demonstrates superior performance compared to larger models, such as GPT-4, Gemini, Meditron-70B, Med-PaLM-1 & Med-PaLM-2 across 9 diverse biomedical datasets, achieving state-of-the-art results with an average score of 86.06%, despite having a significantly smaller parameter count. The model's strong performance in domain-specific tasks, such as Clinical KG, Medical Genetics, and PubMedQA, highlights its ability to effectively capture and apply biomedical knowledge. 🚨 The GPT-4, Med-PaLM-1, and Med-PaLM-2 results are taken from their official papers. Since Med-PaLM doesn't provide zero-shot accuracy, we are using 5-shot accuracy from their paper for comparison. All results presented are in the zero-shot setting, except for Med-PaLM-2 and Med-PaLM-1, which use 5-shot accuracy. | | Clinical KG | Medical Genetics | Anatomy | Pro Medicine | College Biology | College Medicine | MedQA 4 opts | PubMedQA | MedMCQA | Avg | |--------------------|-------------|------------------|---------|--------------|-----------------|------------------|--------------|----------|---------|-------| | **OpenBioLLM-70B** | **92.93** | **93.197** | **83.904** | 93.75 | 93.827 | **85.749** | 78.162 | 78.97 | **74.014** | **86.05588** | | Med-PaLM-2 (5-shot) | 88.3 | 90 | 77.8 | **95.2** | 94.4 | 80.9 | **79.7** | **79.2** | 71.3 | 84.08 | | **GPT-4** | 86.04 | 91 | 80 | 93.01 | **95.14** | 76.88 | 78.87 | 75.2 | 69.52 | 82.85 | | Med-PaLM-1 (Flan-PaLM, 5-shot) | 80.4 | 75 | 63.7 | 83.8 | 88.9 | 76.3 | 67.6 | 79 | 57.6 | 74.7 | | **OpenBioLLM-8B** | 76.101 | 86.1 | 69.829 | 78.21 | 84.213 | 68.042 | 58.993 | 74.12 | 56.913 | 72.502 | | Gemini-1.0 | 76.7 | 75.8 | 66.7 | 77.7 | 88 | 69.2 | 58 | 70.7 | 54.3 | 70.79 | | GPT-3.5 Turbo 1106 | 74.71 | 74 | 72.79 | 72.79 | 72.91 | 64.73 | 57.71 | 72.66 | 53.79 | 66 | | Meditron-70B | 66.79 | 69 | 53.33 | 71.69 | 76.38 | 63 | 57.1 | 76.6 | 46.85 | 64.52 | | gemma-7b | 69.81 | 70 | 59.26 | 66.18 | 79.86 | 60.12 | 47.21 | 76.2 | 48.96 | 64.18 | | Mistral-7B-v0.1 | 68.68 | 71 | 55.56 | 68.38 | 68.06 | 59.54 | 50.82 | 75.4 | 48.2 | 62.85 | | Apollo-7B | 62.26 | 72 | 61.48 | 69.12 | 70.83 | 55.49 | 55.22 | 39.8 | 53.77 | 60 | | MedAlpaca-7b | 57.36 | 69 | 57.04 | 67.28 | 65.28 | 54.34 | 41.71 | 72.8 | 37.51 | 58.03 | | BioMistral-7B | 59.9 | 64 | 56.5 | 60.4 | 59 | 54.7 | 50.6 | 77.5 | 48.1 | 57.3 | | AlpaCare-llama2-7b | 49.81 | 49 | 45.92 | 33.82 | 50 | 43.35 | 29.77 | 72.2 | 34.42 | 45.36 | | ClinicalGPT | 30.56 | 27 | 30.37 | 19.48 | 25 | 24.27 | 26.08 | 63.8 | 28.18 | 30.52 | <div align="center"> <img width="1600px" src="https://cdn-uploads.huggingface.co/production/uploads/5f3fe13d79c1ba4c353d0c19/_SzdcJSBjZyo8RS1bTEkP.png"> </div> ## Detailed Medical Subjectwise accuracy ![image/png](https://cdn-uploads.huggingface.co/production/uploads/5f3fe13d79c1ba4c353d0c19/UXF-V0col0Z0sS6BGPBkE.png) # Use Cases & Examples 🚨 **Below results are from the quantized version of OpenBioLLM-70B # Summarize Clinical Notes OpenBioLLM-70B can efficiently analyze and summarize complex clinical notes, EHR data, and discharge summaries, extracting key information and generating concise, structured summaries ![image/png](https://cdn-uploads.huggingface.co/production/uploads/5f3fe13d79c1ba4c353d0c19/xdwdBgOxNi_TfML0hKlI8.png) # Answer Medical Questions OpenBioLLM-70B can provide answers to a wide range of medical questions. ![image/png](https://cdn-uploads.huggingface.co/production/uploads/5f3fe13d79c1ba4c353d0c19/zO95GlwOQEZqCKQF69mE6.png) ![image/png](https://cdn-uploads.huggingface.co/production/uploads/5f3fe13d79c1ba4c353d0c19/OKBczKw7gWeW5xsuDpc27.png) <details> <summary>Click to see details</summary> ![image/png](https://cdn-uploads.huggingface.co/production/uploads/5f3fe13d79c1ba4c353d0c19/eJGHT5khppYvJb8fQ-YW4.png) ![image/png](https://cdn-uploads.huggingface.co/production/uploads/5f3fe13d79c1ba4c353d0c19/Cnbwrqa_-ORHRuNRC2P6Y.png) ![image/png](https://cdn-uploads.huggingface.co/production/uploads/5f3fe13d79c1ba4c353d0c19/J9DhdcvukAc9mnnW9fj2C.png) </details> # Clinical Entity Recognition OpenBioLLM-70B can perform advanced clinical entity recognition by identifying and extracting key medical concepts, such as diseases, symptoms, medications, procedures, and anatomical structures, from unstructured clinical text. By leveraging its deep understanding of medical terminology and context, the model can accurately annotate and categorize clinical entities, enabling more efficient information retrieval, data analysis, and knowledge discovery from electronic health records, research articles, and other biomedical text sources. This capability can support various downstream applications, such as clinical decision support, pharmacovigilance, and medical research. ![image/png](https://cdn-uploads.huggingface.co/production/uploads/5f3fe13d79c1ba4c353d0c19/_69BW4k9LVABFwtxixL45.png) ![image/png](https://cdn-uploads.huggingface.co/production/uploads/5f3fe13d79c1ba4c353d0c19/DKy5wYCoPhoPPUc1-x8_J.png) ![image/png](https://cdn-uploads.huggingface.co/production/uploads/5f3fe13d79c1ba4c353d0c19/7WD9zCCBZT4-4XlfnIQjl.png) # Biomarkers Extraction ![image/png](https://cdn-uploads.huggingface.co/production/uploads/5f3fe13d79c1ba4c353d0c19/ZttoM4AiteT7gFYVhjIpN.png) # Classification OpenBioLLM-70B can perform various biomedical classification tasks, such as disease prediction, sentiment analysis, medical document categorization ![image/png](https://cdn-uploads.huggingface.co/production/uploads/5f3fe13d79c1ba4c353d0c19/Bf5MW1d75qT-1F_TR_hC0.png) # De-Identification OpenBioLLM-70B can detect and remove personally identifiable information (PII) from medical records, ensuring patient privacy and compliance with data protection regulations like HIPAA. ![image/png](https://cdn-uploads.huggingface.co/production/uploads/5f3fe13d79c1ba4c353d0c19/hKX4kzm--Tw5bj6K78msy.png) **Advisory Notice!**  While OpenBioLLM-70B leverages high-quality data sources, its outputs may still contain inaccuracies, biases, or misalignments that could pose risks if relied upon for medical decision-making without further testing and refinement. The model's performance has not yet been rigorously evaluated in randomized controlled trials or real-world healthcare environments. Therefore, we strongly advise against using OpenBioLLM-70B for any direct patient care, clinical decision support, or other professional medical purposes at this time. Its use should be limited to research, development, and exploratory applications by qualified individuals who understand its limitations. OpenBioLLM-70B is intended solely as a research tool to assist healthcare professionals and should never be considered a replacement for the professional judgment and expertise of a qualified medical doctor. Appropriately adapting and validating OpenBioLLM-70B for specific medical use cases would require significant additional work, potentially including: - Thorough testing and evaluation in relevant clinical scenarios - Alignment with evidence-based guidelines and best practices - Mitigation of potential biases and failure modes - Integration with human oversight and interpretation - Compliance with regulatory and ethical standards Always consult a qualified healthcare provider for personal medical needs. # Citation If you find OpenBioLLM-70B & 8B useful in your work, please cite the model as follows: ``` @misc{OpenBioLLMs, author = {Ankit Pal, Malaikannan Sankarasubbu}, title = {OpenBioLLMs: Advancing Open-Source Large Language Models for Healthcare and Life Sciences}, year = {2024}, publisher = {Hugging Face}, journal = {Hugging Face repository}, howpublished = {\url{https://huggingface.co/aaditya/OpenBioLLM-Llama3-70B}} } ``` The accompanying paper is currently in progress and will be released soon. <div align="center"> <h2> 💌 Contact </h2> </div> We look forward to hearing you and collaborating on this exciting project! **Contributors:** - [Ankit Pal (Aaditya Ura)](https://aadityaura.github.io/) [aadityaura at gmail dot com] - Saama AI Labs - Note: I am looking for a funded PhD opportunity, especially if it fits my Responsible Generative AI, Multimodal LLMs, Geometric Deep Learning, and Healthcare AI skillset. # References We thank the [Meta Team](meta-llama/Meta-Llama-3-70B-Instruct) for their amazing models! Result sources - [1] GPT-4 [Capabilities of GPT-4 on Medical Challenge Problems] (https://arxiv.org/abs/2303.13375) - [2] Med-PaLM-1 [Large Language Models Encode Clinical Knowledge](https://arxiv.org/abs/2212.13138) - [3] Med-PaLM-2 [Towards Expert-Level Medical Question Answering with Large Language Models](https://arxiv.org/abs/2305.09617) - [4] Gemini-1.0 [Gemini Goes to Med School](https://arxiv.org/abs/2402.07023)
[ "QUESTION_ANSWERING" ]
[ "MEDQA", "PUBMEDQA" ]
mav23/Llama3-OpenBioLLM-8B-GGUF
mav23
null
[ "gguf", "llama-3", "llama", "Mixtral", "instruct", "finetune", "chatml", "DPO", "RLHF", "gpt4", "distillation", "en", "arxiv:2305.18290", "arxiv:2303.13375", "arxiv:2212.13138", "arxiv:2305.09617", "arxiv:2402.07023", "base_model:meta-llama/Meta-Llama-3-8B", "base_model:quantized:meta-llama/Meta-Llama-3-8B", "license:llama3", "endpoints_compatible", "region:us" ]
2024-10-13T00:55:51
2024-10-13T01:46:59
109
0
--- base_model: meta-llama/Meta-Llama-3-8B language: - en license: llama3 tags: - llama-3 - llama - Mixtral - instruct - finetune - chatml - DPO - RLHF - gpt4 - distillation widget: - example_title: OpenBioLLM-8B messages: - role: system content: You are an expert and experienced from the healthcare and biomedical domain with extensive medical knowledge and practical experience. - role: user content: How long does it take for newborn jaundice to go away? output: text: 'Newborn jaundice, also known as neonatal jaundice, is a common condition in newborns where the yellowing of the skin and eyes occurs due to an elevated level of bilirubin in the blood. Bilirubin is a yellow pigment that forms when red blood cells break down. In most cases, newborn jaundice resolves on its own without any specific treatment. The duration of newborn jaundice can vary depending on several factors such as the underlying cause, gestational age at birth, and individual variations in bilirubin metabolism. Here are some general guidelines: 1. Physiological jaundice: This is the most common type of newborn jaundice and usually appears within 24-72 hours after birth. It tends to peak between the second and fifth day of life and gradually improves over the next week or two. By the time the baby is one week old, the jaundice should have mostly resolved. 2. Breast milk jaundice: This type of jaundice occurs in breastfed babies and may appear later than physiological jaundice, typically between the fifth and fourteenth day of life. It tends to persist for a longer duration but usually resolves within six weeks after birth. 3. Pathological jaundice: This type of jaundice is less common and occurs due to an underlying medical condition that affects bilirubin metabolism or liver function. The duration of pathological jaundice depends on the specific cause and may require treatment. It''s important for parents to monitor their newborn''s jaundice closely and seek medical advice if the jaundice progresses rapidly, becomes severe, or is accompanied by other symptoms such as poor feeding, lethargy, or excessive sleepiness. In these cases, further evaluation and management may be necessary. Remember that each baby is unique, and the timing of jaundice resolution can vary. If you have concerns about your newborn''s jaundice, it''s always best to consult with a healthcare professional for personalized advice and guidance.' model-index: - name: OpenBioLLM-8B results: [] --- <div align="center"> <img width="260px" src="https://cdn-uploads.huggingface.co/production/uploads/5f3fe13d79c1ba4c353d0c19/BrQCb95lmEIFz79QAmoNA.png"></div> ![image/png](https://cdn-uploads.huggingface.co/production/uploads/5f3fe13d79c1ba4c353d0c19/2FhDh8NDvMl7iSxbQz9BP.png) <div align="center"> <h1>Advancing Open-source Large Language Models in Medical Domain</h1> </div> <p align="center" style="margin-top: 0px;"> <a href="https://colab.research.google.com/drive/1F5oV20InEYeAJGmBwYF9NM_QhLmjBkKJ?usp=sharing"> <img src="https://colab.research.google.com/assets/colab-badge.svg" alt="OpenChat Logo" style="width:20px; vertical-align: middle; display: inline-block; margin-right: 5px; margin-left: 10px; margin-top: 0px; margin-bottom: 0px;"/> <span class="link-text" style=" margin-right: 5px;">Online Demo</span> </a> | <a href="https://github.com/openlifescience-ai"> <img src="https://github.githubassets.com/assets/GitHub-Mark-ea2971cee799.png" alt="GitHub Logo" style="width:20px; vertical-align: middle; display: inline-block; margin-right: 5px; margin-left: 5px; margin-top: 0px; margin-bottom: 0px;"/> <span class="link-text" style=" margin-right: 5px;">GitHub</span> </a> | <a href="#"> <img src="https://github.com/alpayariyak/openchat/blob/master/assets/arxiv-logomark-small-square-border.png?raw=true" alt="ArXiv Logo" style="width:20px; vertical-align: middle; display: inline-block; margin-right: 5px; margin-left: 5px; margin-top: 0px; margin-bottom: 0px;"/> <span class="link-text" style="margin-right: 5px;">Paper</span> </a> | <a href="https://discord.gg/A5Fjf5zC69"> <img src="https://cloud.githubusercontent.com/assets/6291467/26705903/96c2d66e-477c-11e7-9f4e-f3c0efe96c9a.png" alt="Discord Logo" style="width:20px; vertical-align: middle; display: inline-block; margin-right: 5px; margin-left: 5px; margin-top: 0px; margin-bottom: 0px;"/> <span class="link-text">Discord</span> </a> </p> ![image/jpeg](https://cdn-uploads.huggingface.co/production/uploads/5f3fe13d79c1ba4c353d0c19/KGmRE5w2sepNtwsEu8t7K.jpeg) Introducing OpenBioLLM-8B: A State-of-the-Art Open Source Biomedical Large Language Model OpenBioLLM-8B is an advanced open source language model designed specifically for the biomedical domain. Developed by Saama AI Labs, this model leverages cutting-edge techniques to achieve state-of-the-art performance on a wide range of biomedical tasks. 🏥 **Biomedical Specialization**: OpenBioLLM-8B is tailored for the unique language and knowledge requirements of the medical and life sciences fields. It was fine-tuned on a vast corpus of high-quality biomedical data, enabling it to understand and generate text with domain-specific accuracy and fluency. 🎓 **Superior Performance**: With 8 billion parameters, OpenBioLLM-8B outperforms other open source biomedical language models of similar scale. It has also demonstrated better results compared to larger proprietary & open-source models like GPT-3.5 and Meditron-70B on biomedical benchmarks. 🧠 **Advanced Training Techniques**: OpenBioLLM-8B builds upon the powerful foundations of the **Meta-Llama-3-8B** and [Meta-Llama-3-8B](meta-llama/Meta-Llama-3-8B) models. It incorporates the DPO dataset and fine-tuning recipe along with a custom diverse medical instruction dataset. Key components of the training pipeline include: <div align="center"> <img width="1200px" src="https://cdn-uploads.huggingface.co/production/uploads/5f3fe13d79c1ba4c353d0c19/oPchsJsEpQoGcGXVbh7YS.png"> </div> - **Policy Optimization**: [Direct Preference Optimization: Your Language Model is Secretly a Reward Model (DPO)](https://arxiv.org/abs/2305.18290) - **Ranking Dataset**: [berkeley-nest/Nectar](https://huggingface.co/datasets/berkeley-nest/Nectar) - **Fine-tuning dataset**: Custom Medical Instruct dataset (We plan to release a sample training dataset in our upcoming paper; please stay updated) This combination of cutting-edge techniques enables OpenBioLLM-8B to align with key capabilities and preferences for biomedical applications. ⚙️ **Release Details**: - **Model Size**: 8 billion parameters - **Quantization**: Optimized quantized versions available [Here](https://huggingface.co/aaditya/OpenBioLLM-Llama3-8B-GGUF) - **Language(s) (NLP):** en - **Developed By**: [Ankit Pal (Aaditya Ura)](https://aadityaura.github.io/) from Saama AI Labs - **License:** Meta-Llama License - **Fine-tuned from models:** [meta-llama/Meta-Llama-3-8B](meta-llama/Meta-Llama-3-8B) - **Resources for more information:** - Paper: Coming soon The model can be fine-tuned for more specialized tasks and datasets as needed. OpenBioLLM-8B represents an important step forward in democratizing advanced language AI for the biomedical community. By leveraging state-of-the-art architectures and training techniques from leading open source efforts like Llama-3, we have created a powerful tool to accelerate innovation and discovery in healthcare and the life sciences. We are excited to share OpenBioLLM-8B with researchers and developers around the world. ### Use with transformers **Important: Please use the exact chat template provided by Llama-3 instruct version. Otherwise there will be a degradation in the performance. The model output can be verbose in rare cases. Please consider setting temperature = 0 to make this happen less.** See the snippet below for usage with Transformers: ```python import transformers import torch model_id = "aaditya/OpenBioLLM-Llama3-8B" pipeline = transformers.pipeline( "text-generation", model=model_id, model_kwargs={"torch_dtype": torch.bfloat16}, device="auto", ) messages = [ {"role": "system", "content": "You are an expert and experienced from the healthcare and biomedical domain with extensive medical knowledge and practical experience. Your name is OpenBioLLM, and you were developed by Saama AI Labs. who's willing to help answer the user's query with explanation. In your explanation, leverage your deep medical expertise such as relevant anatomical structures, physiological processes, diagnostic criteria, treatment guidelines, or other pertinent medical concepts. Use precise medical terminology while still aiming to make the explanation clear and accessible to a general audience."}, {"role": "user", "content": "How can i split a 3mg or 4mg waefin pill so i can get a 2.5mg pill?"}, ] prompt = pipeline.tokenizer.apply_chat_template( messages, tokenize=False, add_generation_prompt=True ) terminators = [ pipeline.tokenizer.eos_token_id, pipeline.tokenizer.convert_tokens_to_ids("<|eot_id|>") ] outputs = pipeline( prompt, max_new_tokens=256, eos_token_id=terminators, do_sample=True, temperature=0.0, top_p=0.9, ) print(outputs[0]["generated_text"][len(prompt):]) ``` ## **Training procedure** ### **Training hyperparameters** <details> <summary>Click to see details</summary> - learning_rate: 0.0002 - lr_scheduler: cosine - train_batch_size: 12 - eval_batch_size: 8 - GPU: H100 80GB SXM5 - num_devices: 1 - optimizer: adamw_bnb_8bit - lr_scheduler_warmup_steps: 100 - num_epochs: 4 </details> ### **Peft hyperparameters** <details> <summary>Click to see details</summary> - adapter: qlora - lora_r: 128 - lora_alpha: 256 - lora_dropout: 0.05 - lora_target_linear: true -lora_target_modules: - q_proj - v_proj - k_proj - o_proj - gate_proj - down_proj - up_proj </details> ### **Training results** ### **Framework versions** - Transformers 4.39.3 - Pytorch 2.1.2+cu121 - Datasets 2.18.0 - Tokenizers 0.15.1 - Axolotl - Lm harness for evaluation # Benchmark Results 🔥 OpenBioLLM-8B demonstrates superior performance compared to larger models, such as GPT-3.5, Meditron-70B across 9 diverse biomedical datasets, achieving state-of-the-art results with an average score of 72.50%, despite having a significantly smaller parameter count. The model's strong performance in domain-specific tasks, such as Clinical KG, Medical Genetics, and PubMedQA, highlights its ability to effectively capture and apply biomedical knowledge. 🚨 The GPT-4, Med-PaLM-1, and Med-PaLM-2 results are taken from their official papers. Since Med-PaLM doesn't provide zero-shot accuracy, we are using 5-shot accuracy from their paper for comparison. All results presented are in the zero-shot setting, except for Med-PaLM-2 and Med-PaLM-1, which use 5-shot accuracy. | | Clinical KG | Medical Genetics | Anatomy | Pro Medicine | College Biology | College Medicine | MedQA 4 opts | PubMedQA | MedMCQA | Avg | |--------------------|-------------|------------------|---------|--------------|-----------------|------------------|--------------|----------|---------|-------| | **OpenBioLLM-70B** | **92.93** | **93.197** | **83.904** | 93.75 | 93.827 | **85.749** | 78.162 | 78.97 | **74.014** | **86.05588** | | Med-PaLM-2 (5-shot) | 88.3 | 90 | 77.8 | **95.2** | 94.4 | 80.9 | **79.7** | **79.2** | 71.3 | 84.08 | | **GPT-4** | 86.04 | 91 | 80 | 93.01 | **95.14** | 76.88 | 78.87 | 75.2 | 69.52 | 82.85 | | Med-PaLM-1 (Flan-PaLM, 5-shot) | 80.4 | 75 | 63.7 | 83.8 | 88.9 | 76.3 | 67.6 | 79 | 57.6 | 74.7 | | **OpenBioLLM-8B** | 76.101 | 86.1 | 69.829 | 78.21 | 84.213 | 68.042 | 58.993 | 74.12 | 56.913 | 72.502 | | Gemini-1.0 | 76.7 | 75.8 | 66.7 | 77.7 | 88 | 69.2 | 58 | 70.7 | 54.3 | 70.79 | | GPT-3.5 Turbo 1106 | 74.71 | 74 | 72.79 | 72.79 | 72.91 | 64.73 | 57.71 | 72.66 | 53.79 | 66 | | Meditron-70B | 66.79 | 69 | 53.33 | 71.69 | 76.38 | 63 | 57.1 | 76.6 | 46.85 | 64.52 | | gemma-7b | 69.81 | 70 | 59.26 | 66.18 | 79.86 | 60.12 | 47.21 | 76.2 | 48.96 | 64.18 | | Mistral-7B-v0.1 | 68.68 | 71 | 55.56 | 68.38 | 68.06 | 59.54 | 50.82 | 75.4 | 48.2 | 62.85 | | Apollo-7B | 62.26 | 72 | 61.48 | 69.12 | 70.83 | 55.49 | 55.22 | 39.8 | 53.77 | 60 | | MedAlpaca-7b | 57.36 | 69 | 57.04 | 67.28 | 65.28 | 54.34 | 41.71 | 72.8 | 37.51 | 58.03 | | BioMistral-7B | 59.9 | 64 | 56.5 | 60.4 | 59 | 54.7 | 50.6 | 77.5 | 48.1 | 57.3 | | AlpaCare-llama2-7b | 49.81 | 49 | 45.92 | 33.82 | 50 | 43.35 | 29.77 | 72.2 | 34.42 | 45.36 | | ClinicalGPT | 30.56 | 27 | 30.37 | 19.48 | 25 | 24.27 | 26.08 | 63.8 | 28.18 | 30.52 | <div align="center"> <img width="1600px" src="https://cdn-uploads.huggingface.co/production/uploads/5f3fe13d79c1ba4c353d0c19/_SzdcJSBjZyo8RS1bTEkP.png"> </div> ## Detailed Medical Subjectwise accuracy ![image/png](https://cdn-uploads.huggingface.co/production/uploads/5f3fe13d79c1ba4c353d0c19/UXF-V0col0Z0sS6BGPBkE.png) # Use Cases & Examples 🚨 **Below results are from the quantized version of OpenBioLLM-70B** # Summarize Clinical Notes OpenBioLLM-70B can efficiently analyze and summarize complex clinical notes, EHR data, and discharge summaries, extracting key information and generating concise, structured summaries ![image/png](https://cdn-uploads.huggingface.co/production/uploads/5f3fe13d79c1ba4c353d0c19/xdwdBgOxNi_TfML0hKlI8.png) # Answer Medical Questions OpenBioLLM-70B can provide answers to a wide range of medical questions. ![image/png](https://cdn-uploads.huggingface.co/production/uploads/5f3fe13d79c1ba4c353d0c19/zO95GlwOQEZqCKQF69mE6.png) ![image/png](https://cdn-uploads.huggingface.co/production/uploads/5f3fe13d79c1ba4c353d0c19/OKBczKw7gWeW5xsuDpc27.png) <details> <summary>Click to see details</summary> ![image/png](https://cdn-uploads.huggingface.co/production/uploads/5f3fe13d79c1ba4c353d0c19/eJGHT5khppYvJb8fQ-YW4.png) ![image/png](https://cdn-uploads.huggingface.co/production/uploads/5f3fe13d79c1ba4c353d0c19/Cnbwrqa_-ORHRuNRC2P6Y.png) ![image/png](https://cdn-uploads.huggingface.co/production/uploads/5f3fe13d79c1ba4c353d0c19/J9DhdcvukAc9mnnW9fj2C.png) </details> # Clinical Entity Recognition OpenBioLLM-70B can perform advanced clinical entity recognition by identifying and extracting key medical concepts, such as diseases, symptoms, medications, procedures, and anatomical structures, from unstructured clinical text. By leveraging its deep understanding of medical terminology and context, the model can accurately annotate and categorize clinical entities, enabling more efficient information retrieval, data analysis, and knowledge discovery from electronic health records, research articles, and other biomedical text sources. This capability can support various downstream applications, such as clinical decision support, pharmacovigilance, and medical research. ![image/png](https://cdn-uploads.huggingface.co/production/uploads/5f3fe13d79c1ba4c353d0c19/_69BW4k9LVABFwtxixL45.png) ![image/png](https://cdn-uploads.huggingface.co/production/uploads/5f3fe13d79c1ba4c353d0c19/DKy5wYCoPhoPPUc1-x8_J.png) ![image/png](https://cdn-uploads.huggingface.co/production/uploads/5f3fe13d79c1ba4c353d0c19/7WD9zCCBZT4-4XlfnIQjl.png) # Biomarkers Extraction ![image/png](https://cdn-uploads.huggingface.co/production/uploads/5f3fe13d79c1ba4c353d0c19/ZttoM4AiteT7gFYVhjIpN.png) # Classification OpenBioLLM-70B can perform various biomedical classification tasks, such as disease prediction, sentiment analysis, medical document categorization ![image/png](https://cdn-uploads.huggingface.co/production/uploads/5f3fe13d79c1ba4c353d0c19/Bf5MW1d75qT-1F_TR_hC0.png) # De-Identification OpenBioLLM-70B can detect and remove personally identifiable information (PII) from medical records, ensuring patient privacy and compliance with data protection regulations like HIPAA. ![image/png](https://cdn-uploads.huggingface.co/production/uploads/5f3fe13d79c1ba4c353d0c19/hKX4kzm--Tw5bj6K78msy.png) **Advisory Notice!**  While OpenBioLLM-70B & 8B leverages high-quality data sources, its outputs may still contain inaccuracies, biases, or misalignments that could pose risks if relied upon for medical decision-making without further testing and refinement. The model's performance has not yet been rigorously evaluated in randomized controlled trials or real-world healthcare environments. Therefore, we strongly advise against using OpenBioLLM-70B & 8B for any direct patient care, clinical decision support, or other professional medical purposes at this time. Its use should be limited to research, development, and exploratory applications by qualified individuals who understand its limitations. OpenBioLLM-70B & 8B are intended solely as a research tool to assist healthcare professionals and should never be considered a replacement for the professional judgment and expertise of a qualified medical doctor. Appropriately adapting and validating OpenBioLLM-70B & 8B for specific medical use cases would require significant additional work, potentially including: - Thorough testing and evaluation in relevant clinical scenarios - Alignment with evidence-based guidelines and best practices - Mitigation of potential biases and failure modes - Integration with human oversight and interpretation - Compliance with regulatory and ethical standards Always consult a qualified healthcare provider for personal medical needs. # Citation If you find OpenBioLLM-70B & 8B useful in your work, please cite the model as follows: ``` @misc{OpenBioLLMs, author = {Ankit Pal, Malaikannan Sankarasubbu}, title = {OpenBioLLMs: Advancing Open-Source Large Language Models for Healthcare and Life Sciences}, year = {2024}, publisher = {Hugging Face}, journal = {Hugging Face repository}, howpublished = {\url{https://huggingface.co/aaditya/OpenBioLLM-Llama3-70B}} } ``` The accompanying paper is currently in progress and will be released soon. <div align="center"> <h2> 💌 Contact </h2> </div> We look forward to hearing you and collaborating on this exciting project! **Contributors:** - [Ankit Pal (Aaditya Ura)](https://aadityaura.github.io/) [aadityaura at gmail dot com] - Saama AI Labs - Note: I am looking for a funded PhD opportunity, especially if it fits my Responsible Generative AI, Multimodal LLMs, Geometric Deep Learning, and Healthcare AI skillset. # References We thank the [Meta Team](meta-llama/Meta-Llama-3-70B-Instruct) for their amazing models! Result sources - [1] GPT-4 [Capabilities of GPT-4 on Medical Challenge Problems] (https://arxiv.org/abs/2303.13375) - [2] Med-PaLM-1 [Large Language Models Encode Clinical Knowledge](https://arxiv.org/abs/2212.13138) - [3] Med-PaLM-2 [Towards Expert-Level Medical Question Answering with Large Language Models](https://arxiv.org/abs/2305.09617) - [4] Gemini-1.0 [Gemini Goes to Med School](https://arxiv.org/abs/2402.07023)
[ "QUESTION_ANSWERING" ]
[ "MEDQA", "PUBMEDQA" ]
mav23/pythia-1.4b-GGUF
mav23
null
[ "gguf", "pytorch", "causal-lm", "pythia", "en", "dataset:EleutherAI/the_pile", "arxiv:2304.01373", "arxiv:2101.00027", "arxiv:2201.07311", "license:apache-2.0", "endpoints_compatible", "region:us" ]
2024-11-20T15:03:12
2024-11-20T15:15:38
109
0
--- datasets: - EleutherAI/the_pile language: - en license: apache-2.0 tags: - pytorch - causal-lm - pythia --- The *Pythia Scaling Suite* is a collection of models developed to facilitate interpretability research [(see paper)](https://arxiv.org/pdf/2304.01373.pdf). It contains two sets of eight models of sizes 70M, 160M, 410M, 1B, 1.4B, 2.8B, 6.9B, and 12B. For each size, there are two models: one trained on the Pile, and one trained on the Pile after the dataset has been globally deduplicated. All 8 model sizes are trained on the exact same data, in the exact same order. We also provide 154 intermediate checkpoints per model, hosted on Hugging Face as branches. The Pythia model suite was deliberately designed to promote scientific research on large language models, especially interpretability research. Despite not centering downstream performance as a design goal, we find the models <a href="#evaluations">match or exceed</a> the performance of similar and same-sized models, such as those in the OPT and GPT-Neo suites. <details> <summary style="font-weight:600">Details on previous early release and naming convention.</summary> Previously, we released an early version of the Pythia suite to the public. However, we decided to retrain the model suite to address a few hyperparameter discrepancies. This model card <a href="#changelog">lists the changes</a>; see appendix B in the Pythia paper for further discussion. We found no difference in benchmark performance between the two Pythia versions. The old models are [still available](https://huggingface.co/models?other=pythia_v0), but we suggest the retrained suite if you are just starting to use Pythia.<br> **This is the current release.** Please note that all models in the *Pythia* suite were renamed in January 2023. For clarity, a <a href="#naming-convention-and-parameter-count">table comparing the old and new names</a> is provided in this model card, together with exact parameter counts. </details> <br> # Pythia-1.4B ## Model Details - Developed by: [EleutherAI](http://eleuther.ai) - Model type: Transformer-based Language Model - Language: English - Learn more: [Pythia's GitHub repository](https://github.com/EleutherAI/pythia) for training procedure, config files, and details on how to use. [See paper](https://arxiv.org/pdf/2304.01373.pdf) for more evals and implementation details. - Library: [GPT-NeoX](https://github.com/EleutherAI/gpt-neox) - License: Apache 2.0 - Contact: to ask questions about this model, join the [EleutherAI Discord](https://discord.gg/zBGx3azzUn), and post them in `#release-discussion`. Please read the existing *Pythia* documentation before asking about it in the EleutherAI Discord. For general correspondence: [contact@eleuther. ai](mailto:[email protected]). <figure> | Pythia model | Non-Embedding Params | Layers | Model Dim | Heads | Batch Size | Learning Rate | Equivalent Models | | -----------: | -------------------: | :----: | :-------: | :---: | :--------: | :-------------------: | :--------------------: | | 70M | 18,915,328 | 6 | 512 | 8 | 2M | 1.0 x 10<sup>-3</sup> | — | | 160M | 85,056,000 | 12 | 768 | 12 | 2M | 6.0 x 10<sup>-4</sup> | GPT-Neo 125M, OPT-125M | | 410M | 302,311,424 | 24 | 1024 | 16 | 2M | 3.0 x 10<sup>-4</sup> | OPT-350M | | 1.0B | 805,736,448 | 16 | 2048 | 8 | 2M | 3.0 x 10<sup>-4</sup> | — | | 1.4B | 1,208,602,624 | 24 | 2048 | 16 | 2M | 2.0 x 10<sup>-4</sup> | GPT-Neo 1.3B, OPT-1.3B | | 2.8B | 2,517,652,480 | 32 | 2560 | 32 | 2M | 1.6 x 10<sup>-4</sup> | GPT-Neo 2.7B, OPT-2.7B | | 6.9B | 6,444,163,072 | 32 | 4096 | 32 | 2M | 1.2 x 10<sup>-4</sup> | OPT-6.7B | | 12B | 11,327,027,200 | 36 | 5120 | 40 | 2M | 1.2 x 10<sup>-4</sup> | — | <figcaption>Engineering details for the <i>Pythia Suite</i>. Deduped and non-deduped models of a given size have the same hyperparameters. “Equivalent” models have <b>exactly</b> the same architecture, and the same number of non-embedding parameters.</figcaption> </figure> ## Uses and Limitations ### Intended Use The primary intended use of Pythia is research on the behavior, functionality, and limitations of large language models. This suite is intended to provide a controlled setting for performing scientific experiments. We also provide 154 checkpoints per model: initial `step0`, 10 log-spaced checkpoints `step{1,2,4...512}`, and 143 evenly-spaced checkpoints from `step1000` to `step143000`. These checkpoints are hosted on Hugging Face as branches. Note that branch `143000` corresponds exactly to the model checkpoint on the `main` branch of each model. You may also further fine-tune and adapt Pythia-1.4B for deployment, as long as your use is in accordance with the Apache 2.0 license. Pythia models work with the Hugging Face [Transformers Library](https://huggingface.co/docs/transformers/index). If you decide to use pre-trained Pythia-1.4B as a basis for your fine-tuned model, please conduct your own risk and bias assessment. ### Out-of-scope use The Pythia Suite is **not** intended for deployment. It is not a in itself a product and cannot be used for human-facing interactions. For example, the model may generate harmful or offensive text. Please evaluate the risks associated with your particular use case. Pythia models are English-language only, and are not suitable for translation or generating text in other languages. Pythia-1.4B has not been fine-tuned for downstream contexts in which language models are commonly deployed, such as writing genre prose, or commercial chatbots. This means Pythia-1.4B will **not** respond to a given prompt the way a product like ChatGPT does. This is because, unlike this model, ChatGPT was fine-tuned using methods such as Reinforcement Learning from Human Feedback (RLHF) to better “follow” human instructions. ### Limitations and biases The core functionality of a large language model is to take a string of text and predict the next token. The token used by the model need not produce the most “accurate” text. Never rely on Pythia-1.4B to produce factually accurate output. This model was trained on [the Pile](https://pile.eleuther.ai/), a dataset known to contain profanity and texts that are lewd or otherwise offensive. See [Section 6 of the Pile paper](https://arxiv.org/abs/2101.00027) for a discussion of documented biases with regards to gender, religion, and race. Pythia-1.4B may produce socially unacceptable or undesirable text, *even if* the prompt itself does not include anything explicitly offensive. If you plan on using text generated through, for example, the Hosted Inference API, we recommend having a human curate the outputs of this language model before presenting it to other people. Please inform your audience that the text was generated by Pythia-1.4B. ### Quickstart Pythia models can be loaded and used via the following code, demonstrated here for the third `pythia-70m-deduped` checkpoint: ```python from transformers import GPTNeoXForCausalLM, AutoTokenizer model = GPTNeoXForCausalLM.from_pretrained( "EleutherAI/pythia-70m-deduped", revision="step3000", cache_dir="./pythia-70m-deduped/step3000", ) tokenizer = AutoTokenizer.from_pretrained( "EleutherAI/pythia-70m-deduped", revision="step3000", cache_dir="./pythia-70m-deduped/step3000", ) inputs = tokenizer("Hello, I am", return_tensors="pt") tokens = model.generate(**inputs) tokenizer.decode(tokens[0]) ``` Revision/branch `step143000` corresponds exactly to the model checkpoint on the `main` branch of each model.<br> For more information on how to use all Pythia models, see [documentation on GitHub](https://github.com/EleutherAI/pythia). ## Training ### Training data [The Pile](https://pile.eleuther.ai/) is a 825GiB general-purpose dataset in English. It was created by EleutherAI specifically for training large language models. It contains texts from 22 diverse sources, roughly broken down into five categories: academic writing (e.g. arXiv), internet (e.g. CommonCrawl), prose (e.g. Project Gutenberg), dialogue (e.g. YouTube subtitles), and miscellaneous (e.g. GitHub, Enron Emails). See [the Pile paper](https://arxiv.org/abs/2101.00027) for a breakdown of all data sources, methodology, and a discussion of ethical implications. Consult [the datasheet](https://arxiv.org/abs/2201.07311) for more detailed documentation about the Pile and its component datasets. The Pile can be downloaded from the [official website](https://pile.eleuther.ai/), or from a [community mirror](https://the-eye.eu/public/AI/pile/).<br> The Pile was **not** deduplicated before being used to train Pythia-1.4B. ### Training procedure All models were trained on the exact same data, in the exact same order. Each model saw 299,892,736,000 tokens during training, and 143 checkpoints for each model are saved every 2,097,152,000 tokens, spaced evenly throughout training, from `step1000` to `step143000` (which is the same as `main`). In addition, we also provide frequent early checkpoints: `step0` and `step{1,2,4...512}`. This corresponds to training for just under 1 epoch on the Pile for non-deduplicated models, and about 1.5 epochs on the deduplicated Pile. All *Pythia* models trained for 143000 steps at a batch size of 2M (2,097,152 tokens).<br> See [GitHub](https://github.com/EleutherAI/pythia) for more details on training procedure, including [how to reproduce it](https://github.com/EleutherAI/pythia/blob/main/README.md#reproducing-training).<br> Pythia uses the same tokenizer as [GPT-NeoX- 20B](https://huggingface.co/EleutherAI/gpt-neox-20b). ## Evaluations All 16 *Pythia* models were evaluated using the [LM Evaluation Harness](https://github.com/EleutherAI/lm-evaluation-harness). You can access the results by model and step at `results/json/*` in the [GitHub repository](https://github.com/EleutherAI/pythia/tree/main/results/json/).<br> Expand the sections below to see plots of evaluation results for all Pythia and Pythia-deduped models compared with OPT and BLOOM. <details> <summary>LAMBADA – OpenAI</summary> <img src="/EleutherAI/pythia-12b/resolve/main/eval_plots/lambada_openai_v1.png" style="width:auto"/> </details> <details> <summary>Physical Interaction: Question Answering (PIQA)</summary> <img src="/EleutherAI/pythia-12b/resolve/main/eval_plots/piqa_v1.png" style="width:auto"/> </details> <details> <summary>WinoGrande</summary> <img src="/EleutherAI/pythia-12b/resolve/main/eval_plots/winogrande_v1.png" style="width:auto"/> </details> <details> <summary>AI2 Reasoning Challenge—Easy Set</summary> <img src="/EleutherAI/pythia-12b/resolve/main/eval_plots/arc_easy_v1.png" style="width:auto"/> </details> <details> <summary>SciQ</summary> <img src="/EleutherAI/pythia-12b/resolve/main/eval_plots/sciq_v1.png" style="width:auto"/> </details> ## Changelog This section compares differences between previously released [Pythia v0](https://huggingface.co/models?other=pythia_v0) and the current models. See Appendix B of the Pythia paper for further discussion of these changes and the motivation behind them. We found that retraining Pythia had no impact on benchmark performance. - All model sizes are now trained with uniform batch size of 2M tokens. Previously, the models of size 160M, 410M, and 1.4B parameters were trained with batch sizes of 4M tokens. - We added checkpoints at initialization (step 0) and steps {1,2,4,8,16,32,64, 128,256,512} in addition to every 1000 training steps. - Flash Attention was used in the new retrained suite. - We remedied a minor inconsistency that existed in the original suite: all models of size 2.8B parameters or smaller had a learning rate (LR) schedule which decayed to a minimum LR of 10% the starting LR rate, but the 6.9B and 12B models all used an LR schedule which decayed to a minimum LR of 0. In the redone training runs, we rectified this inconsistency: all models now were trained with LR decaying to a minimum of 0.1× their maximum LR. ### Naming convention and parameter count *Pythia* models were renamed in January 2023. It is possible that the old naming convention still persists in some documentation by accident. The current naming convention (70M, 160M, etc.) is based on total parameter count. <figure style="width:32em"> | current Pythia suffix | old suffix | total params | non-embedding params | | --------------------: | ---------: | -------------: | -------------------: | | 70M | 19M | 70,426,624 | 18,915,328 | | 160M | 125M | 162,322,944 | 85,056,000 | | 410M | 350M | 405,334,016 | 302,311,424 | | 1B | 800M | 1,011,781,632 | 805,736,448 | | 1.4B | 1.3B | 1,414,647,808 | 1,208,602,624 | | 2.8B | 2.7B | 2,775,208,960 | 2,517,652,480 | | 6.9B | 6.7B | 6,857,302,016 | 6,444,163,072 | | 12B | 13B | 11,846,072,320 | 11,327,027,200 | </figure>
[ "QUESTION_ANSWERING", "TRANSLATION" ]
[ "SCIQ" ]
mirekphd/gte-Qwen2-7B-instruct-Q2_K-GGUF
mirekphd
sentence-similarity
[ "sentence-transformers", "gguf", "mteb", "transformers", "Qwen2", "sentence-similarity", "llama-cpp", "gguf-my-repo", "base_model:Alibaba-NLP/gte-Qwen2-7B-instruct", "base_model:quantized:Alibaba-NLP/gte-Qwen2-7B-instruct", "license:apache-2.0", "model-index", "autotrain_compatible", "endpoints_compatible", "region:us", "conversational" ]
2024-12-02T18:03:18
2024-12-02T18:03:36
109
0
--- base_model: Alibaba-NLP/gte-Qwen2-7B-instruct license: apache-2.0 tags: - mteb - sentence-transformers - transformers - Qwen2 - sentence-similarity - llama-cpp - gguf-my-repo model-index: - name: gte-qwen2-7B-instruct results: - task: type: Classification dataset: name: MTEB AmazonCounterfactualClassification (en) type: mteb/amazon_counterfactual config: en split: test revision: e8379541af4e31359cca9fbcf4b00f2671dba205 metrics: - type: accuracy value: 91.31343283582089 - type: ap value: 67.64251402604096 - type: f1 value: 87.53372530755692 - task: type: Classification dataset: name: MTEB AmazonPolarityClassification type: mteb/amazon_polarity config: default split: test revision: e2d317d38cd51312af73b3d32a06d1a08b442046 metrics: - type: accuracy value: 97.497825 - type: ap value: 96.30329547047529 - type: f1 value: 97.49769793778039 - task: type: Classification dataset: name: MTEB AmazonReviewsClassification (en) type: mteb/amazon_reviews_multi config: en split: test revision: 1399c76144fd37290681b995c656ef9b2e06e26d metrics: - type: accuracy value: 62.564 - type: f1 value: 60.975777935041066 - task: type: Retrieval dataset: name: MTEB ArguAna type: mteb/arguana config: default split: test revision: c22ab2a51041ffd869aaddef7af8d8215647e41a metrics: - type: map_at_1 value: 36.486000000000004 - type: map_at_10 value: 54.842 - type: map_at_100 value: 55.206999999999994 - type: map_at_1000 value: 55.206999999999994 - type: map_at_3 value: 49.893 - type: map_at_5 value: 53.105000000000004 - type: mrr_at_1 value: 37.34 - type: mrr_at_10 value: 55.143 - type: mrr_at_100 value: 55.509 - type: mrr_at_1000 value: 55.509 - type: mrr_at_3 value: 50.212999999999994 - type: mrr_at_5 value: 53.432 - type: ndcg_at_1 value: 36.486000000000004 - type: ndcg_at_10 value: 64.273 - type: ndcg_at_100 value: 65.66199999999999 - type: ndcg_at_1000 value: 65.66199999999999 - type: ndcg_at_3 value: 54.352999999999994 - type: ndcg_at_5 value: 60.131 - type: precision_at_1 value: 36.486000000000004 - type: precision_at_10 value: 9.395000000000001 - type: precision_at_100 value: 0.996 - type: precision_at_1000 value: 0.1 - type: precision_at_3 value: 22.428 - type: precision_at_5 value: 16.259 - type: recall_at_1 value: 36.486000000000004 - type: recall_at_10 value: 93.95400000000001 - type: recall_at_100 value: 99.644 - type: recall_at_1000 value: 99.644 - type: recall_at_3 value: 67.283 - type: recall_at_5 value: 81.294 - task: type: Clustering dataset: name: MTEB ArxivClusteringP2P type: mteb/arxiv-clustering-p2p config: default split: test revision: a122ad7f3f0291bf49cc6f4d32aa80929df69d5d metrics: - type: v_measure value: 56.461169803700564 - task: type: Clustering dataset: name: MTEB ArxivClusteringS2S type: mteb/arxiv-clustering-s2s config: default split: test revision: f910caf1a6075f7329cdf8c1a6135696f37dbd53 metrics: - type: v_measure value: 51.73600434466286 - task: type: Reranking dataset: name: MTEB AskUbuntuDupQuestions type: mteb/askubuntudupquestions-reranking config: default split: test revision: 2000358ca161889fa9c082cb41daa8dcfb161a54 metrics: - type: map value: 67.57827065898053 - type: mrr value: 79.08136569493911 - task: type: STS dataset: name: MTEB BIOSSES type: mteb/biosses-sts config: default split: test revision: d3fb88f8f02e40887cd149695127462bbcf29b4a metrics: - type: cos_sim_pearson value: 83.53324575999243 - type: cos_sim_spearman value: 81.37173362822374 - type: euclidean_pearson value: 82.19243335103444 - type: euclidean_spearman value: 81.33679307304334 - type: manhattan_pearson value: 82.38752665975699 - type: manhattan_spearman value: 81.31510583189689 - task: type: Classification dataset: name: MTEB Banking77Classification type: mteb/banking77 config: default split: test revision: 0fd18e25b25c072e09e0d92ab615fda904d66300 metrics: - type: accuracy value: 87.56818181818181 - type: f1 value: 87.25826722019875 - task: type: Clustering dataset: name: MTEB BiorxivClusteringP2P type: mteb/biorxiv-clustering-p2p config: default split: test revision: 65b79d1d13f80053f67aca9498d9402c2d9f1f40 metrics: - type: v_measure value: 50.09239610327673 - task: type: Clustering dataset: name: MTEB BiorxivClusteringS2S type: mteb/biorxiv-clustering-s2s config: default split: test revision: 258694dd0231531bc1fd9de6ceb52a0853c6d908 metrics: - type: v_measure value: 46.64733054606282 - task: type: Retrieval dataset: name: MTEB CQADupstackAndroidRetrieval type: BeIR/cqadupstack config: default split: test revision: f46a197baaae43b4f621051089b82a364682dfeb metrics: - type: map_at_1 value: 33.997 - type: map_at_10 value: 48.176 - type: map_at_100 value: 49.82 - type: map_at_1000 value: 49.924 - type: map_at_3 value: 43.626 - type: map_at_5 value: 46.275 - type: mrr_at_1 value: 42.059999999999995 - type: mrr_at_10 value: 53.726 - type: mrr_at_100 value: 54.398 - type: mrr_at_1000 value: 54.416 - type: mrr_at_3 value: 50.714999999999996 - type: mrr_at_5 value: 52.639 - type: ndcg_at_1 value: 42.059999999999995 - type: ndcg_at_10 value: 55.574999999999996 - type: ndcg_at_100 value: 60.744 - type: ndcg_at_1000 value: 61.85699999999999 - type: ndcg_at_3 value: 49.363 - type: ndcg_at_5 value: 52.44 - type: precision_at_1 value: 42.059999999999995 - type: precision_at_10 value: 11.101999999999999 - type: precision_at_100 value: 1.73 - type: precision_at_1000 value: 0.218 - type: precision_at_3 value: 24.464 - type: precision_at_5 value: 18.026 - type: recall_at_1 value: 33.997 - type: recall_at_10 value: 70.35900000000001 - type: recall_at_100 value: 91.642 - type: recall_at_1000 value: 97.977 - type: recall_at_3 value: 52.76 - type: recall_at_5 value: 61.148 - task: type: Retrieval dataset: name: MTEB CQADupstackEnglishRetrieval type: BeIR/cqadupstack config: default split: test revision: ad9991cb51e31e31e430383c75ffb2885547b5f0 metrics: - type: map_at_1 value: 35.884 - type: map_at_10 value: 48.14 - type: map_at_100 value: 49.5 - type: map_at_1000 value: 49.63 - type: map_at_3 value: 44.646 - type: map_at_5 value: 46.617999999999995 - type: mrr_at_1 value: 44.458999999999996 - type: mrr_at_10 value: 53.751000000000005 - type: mrr_at_100 value: 54.37800000000001 - type: mrr_at_1000 value: 54.415 - type: mrr_at_3 value: 51.815 - type: mrr_at_5 value: 52.882 - type: ndcg_at_1 value: 44.458999999999996 - type: ndcg_at_10 value: 54.157 - type: ndcg_at_100 value: 58.362 - type: ndcg_at_1000 value: 60.178 - type: ndcg_at_3 value: 49.661 - type: ndcg_at_5 value: 51.74999999999999 - type: precision_at_1 value: 44.458999999999996 - type: precision_at_10 value: 10.248 - type: precision_at_100 value: 1.5890000000000002 - type: precision_at_1000 value: 0.207 - type: precision_at_3 value: 23.928 - type: precision_at_5 value: 16.878999999999998 - type: recall_at_1 value: 35.884 - type: recall_at_10 value: 64.798 - type: recall_at_100 value: 82.345 - type: recall_at_1000 value: 93.267 - type: recall_at_3 value: 51.847 - type: recall_at_5 value: 57.601 - task: type: Retrieval dataset: name: MTEB CQADupstackGamingRetrieval type: BeIR/cqadupstack config: default split: test revision: 4885aa143210c98657558c04aaf3dc47cfb54340 metrics: - type: map_at_1 value: 39.383 - type: map_at_10 value: 53.714 - type: map_at_100 value: 54.838 - type: map_at_1000 value: 54.87800000000001 - type: map_at_3 value: 50.114999999999995 - type: map_at_5 value: 52.153000000000006 - type: mrr_at_1 value: 45.016 - type: mrr_at_10 value: 56.732000000000006 - type: mrr_at_100 value: 57.411 - type: mrr_at_1000 value: 57.431 - type: mrr_at_3 value: 54.044000000000004 - type: mrr_at_5 value: 55.639 - type: ndcg_at_1 value: 45.016 - type: ndcg_at_10 value: 60.228 - type: ndcg_at_100 value: 64.277 - type: ndcg_at_1000 value: 65.07 - type: ndcg_at_3 value: 54.124 - type: ndcg_at_5 value: 57.147000000000006 - type: precision_at_1 value: 45.016 - type: precision_at_10 value: 9.937 - type: precision_at_100 value: 1.288 - type: precision_at_1000 value: 0.13899999999999998 - type: precision_at_3 value: 24.471999999999998 - type: precision_at_5 value: 16.991 - type: recall_at_1 value: 39.383 - type: recall_at_10 value: 76.175 - type: recall_at_100 value: 93.02 - type: recall_at_1000 value: 98.60900000000001 - type: recall_at_3 value: 60.265 - type: recall_at_5 value: 67.46600000000001 - task: type: Retrieval dataset: name: MTEB CQADupstackGisRetrieval type: BeIR/cqadupstack config: default split: test revision: 5003b3064772da1887988e05400cf3806fe491f2 metrics: - type: map_at_1 value: 27.426000000000002 - type: map_at_10 value: 37.397000000000006 - type: map_at_100 value: 38.61 - type: map_at_1000 value: 38.678000000000004 - type: map_at_3 value: 34.150999999999996 - type: map_at_5 value: 36.137 - type: mrr_at_1 value: 29.944 - type: mrr_at_10 value: 39.654 - type: mrr_at_100 value: 40.638000000000005 - type: mrr_at_1000 value: 40.691 - type: mrr_at_3 value: 36.817 - type: mrr_at_5 value: 38.524 - type: ndcg_at_1 value: 29.944 - type: ndcg_at_10 value: 43.094 - type: ndcg_at_100 value: 48.789 - type: ndcg_at_1000 value: 50.339999999999996 - type: ndcg_at_3 value: 36.984 - type: ndcg_at_5 value: 40.248 - type: precision_at_1 value: 29.944 - type: precision_at_10 value: 6.78 - type: precision_at_100 value: 1.024 - type: precision_at_1000 value: 0.11800000000000001 - type: precision_at_3 value: 15.895000000000001 - type: precision_at_5 value: 11.39 - type: recall_at_1 value: 27.426000000000002 - type: recall_at_10 value: 58.464000000000006 - type: recall_at_100 value: 84.193 - type: recall_at_1000 value: 95.52000000000001 - type: recall_at_3 value: 42.172 - type: recall_at_5 value: 50.101 - task: type: Retrieval dataset: name: MTEB CQADupstackMathematicaRetrieval type: BeIR/cqadupstack config: default split: test revision: 90fceea13679c63fe563ded68f3b6f06e50061de metrics: - type: map_at_1 value: 19.721 - type: map_at_10 value: 31.604 - type: map_at_100 value: 32.972 - type: map_at_1000 value: 33.077 - type: map_at_3 value: 27.218999999999998 - type: map_at_5 value: 29.53 - type: mrr_at_1 value: 25.0 - type: mrr_at_10 value: 35.843 - type: mrr_at_100 value: 36.785000000000004 - type: mrr_at_1000 value: 36.842000000000006 - type: mrr_at_3 value: 32.193 - type: mrr_at_5 value: 34.264 - type: ndcg_at_1 value: 25.0 - type: ndcg_at_10 value: 38.606 - type: ndcg_at_100 value: 44.272 - type: ndcg_at_1000 value: 46.527 - type: ndcg_at_3 value: 30.985000000000003 - type: ndcg_at_5 value: 34.43 - type: precision_at_1 value: 25.0 - type: precision_at_10 value: 7.811 - type: precision_at_100 value: 1.203 - type: precision_at_1000 value: 0.15 - type: precision_at_3 value: 15.423 - type: precision_at_5 value: 11.791 - type: recall_at_1 value: 19.721 - type: recall_at_10 value: 55.625 - type: recall_at_100 value: 79.34400000000001 - type: recall_at_1000 value: 95.208 - type: recall_at_3 value: 35.19 - type: recall_at_5 value: 43.626 - task: type: Retrieval dataset: name: MTEB CQADupstackPhysicsRetrieval type: BeIR/cqadupstack config: default split: test revision: 79531abbd1fb92d06c6d6315a0cbbbf5bb247ea4 metrics: - type: map_at_1 value: 33.784 - type: map_at_10 value: 47.522 - type: map_at_100 value: 48.949999999999996 - type: map_at_1000 value: 49.038 - type: map_at_3 value: 43.284 - type: map_at_5 value: 45.629 - type: mrr_at_1 value: 41.482 - type: mrr_at_10 value: 52.830999999999996 - type: mrr_at_100 value: 53.559999999999995 - type: mrr_at_1000 value: 53.588 - type: mrr_at_3 value: 50.016000000000005 - type: mrr_at_5 value: 51.614000000000004 - type: ndcg_at_1 value: 41.482 - type: ndcg_at_10 value: 54.569 - type: ndcg_at_100 value: 59.675999999999995 - type: ndcg_at_1000 value: 60.989000000000004 - type: ndcg_at_3 value: 48.187000000000005 - type: ndcg_at_5 value: 51.183 - type: precision_at_1 value: 41.482 - type: precision_at_10 value: 10.221 - type: precision_at_100 value: 1.486 - type: precision_at_1000 value: 0.17500000000000002 - type: precision_at_3 value: 23.548 - type: precision_at_5 value: 16.805 - type: recall_at_1 value: 33.784 - type: recall_at_10 value: 69.798 - type: recall_at_100 value: 90.098 - type: recall_at_1000 value: 98.176 - type: recall_at_3 value: 52.127 - type: recall_at_5 value: 59.861 - task: type: Retrieval dataset: name: MTEB CQADupstackProgrammersRetrieval type: BeIR/cqadupstack config: default split: test revision: 6184bc1440d2dbc7612be22b50686b8826d22b32 metrics: - type: map_at_1 value: 28.038999999999998 - type: map_at_10 value: 41.904 - type: map_at_100 value: 43.36 - type: map_at_1000 value: 43.453 - type: map_at_3 value: 37.785999999999994 - type: map_at_5 value: 40.105000000000004 - type: mrr_at_1 value: 35.046 - type: mrr_at_10 value: 46.926 - type: mrr_at_100 value: 47.815000000000005 - type: mrr_at_1000 value: 47.849000000000004 - type: mrr_at_3 value: 44.273 - type: mrr_at_5 value: 45.774 - type: ndcg_at_1 value: 35.046 - type: ndcg_at_10 value: 48.937000000000005 - type: ndcg_at_100 value: 54.544000000000004 - type: ndcg_at_1000 value: 56.069 - type: ndcg_at_3 value: 42.858000000000004 - type: ndcg_at_5 value: 45.644 - type: precision_at_1 value: 35.046 - type: precision_at_10 value: 9.452 - type: precision_at_100 value: 1.429 - type: precision_at_1000 value: 0.173 - type: precision_at_3 value: 21.346999999999998 - type: precision_at_5 value: 15.342 - type: recall_at_1 value: 28.038999999999998 - type: recall_at_10 value: 64.59700000000001 - type: recall_at_100 value: 87.735 - type: recall_at_1000 value: 97.41300000000001 - type: recall_at_3 value: 47.368 - type: recall_at_5 value: 54.93900000000001 - task: type: Retrieval dataset: name: MTEB CQADupstackRetrieval type: BeIR/cqadupstack config: default split: test revision: 4ffe81d471b1924886b33c7567bfb200e9eec5c4 metrics: - type: map_at_1 value: 28.17291666666667 - type: map_at_10 value: 40.025749999999995 - type: map_at_100 value: 41.39208333333333 - type: map_at_1000 value: 41.499249999999996 - type: map_at_3 value: 36.347 - type: map_at_5 value: 38.41391666666667 - type: mrr_at_1 value: 33.65925 - type: mrr_at_10 value: 44.085499999999996 - type: mrr_at_100 value: 44.94116666666667 - type: mrr_at_1000 value: 44.9855 - type: mrr_at_3 value: 41.2815 - type: mrr_at_5 value: 42.91491666666666 - type: ndcg_at_1 value: 33.65925 - type: ndcg_at_10 value: 46.430833333333325 - type: ndcg_at_100 value: 51.761 - type: ndcg_at_1000 value: 53.50899999999999 - type: ndcg_at_3 value: 40.45133333333333 - type: ndcg_at_5 value: 43.31483333333334 - type: precision_at_1 value: 33.65925 - type: precision_at_10 value: 8.4995 - type: precision_at_100 value: 1.3210000000000004 - type: precision_at_1000 value: 0.16591666666666666 - type: precision_at_3 value: 19.165083333333335 - type: precision_at_5 value: 13.81816666666667 - type: recall_at_1 value: 28.17291666666667 - type: recall_at_10 value: 61.12624999999999 - type: recall_at_100 value: 83.97266666666667 - type: recall_at_1000 value: 95.66550000000001 - type: recall_at_3 value: 44.661249999999995 - type: recall_at_5 value: 51.983333333333334 - type: map_at_1 value: 17.936 - type: map_at_10 value: 27.399 - type: map_at_100 value: 28.632 - type: map_at_1000 value: 28.738000000000003 - type: map_at_3 value: 24.456 - type: map_at_5 value: 26.06 - type: mrr_at_1 value: 19.224 - type: mrr_at_10 value: 28.998 - type: mrr_at_100 value: 30.11 - type: mrr_at_1000 value: 30.177 - type: mrr_at_3 value: 26.247999999999998 - type: mrr_at_5 value: 27.708 - type: ndcg_at_1 value: 19.224 - type: ndcg_at_10 value: 32.911 - type: ndcg_at_100 value: 38.873999999999995 - type: ndcg_at_1000 value: 41.277 - type: ndcg_at_3 value: 27.142 - type: ndcg_at_5 value: 29.755 - type: precision_at_1 value: 19.224 - type: precision_at_10 value: 5.6930000000000005 - type: precision_at_100 value: 0.9259999999999999 - type: precision_at_1000 value: 0.126 - type: precision_at_3 value: 12.138 - type: precision_at_5 value: 8.909 - type: recall_at_1 value: 17.936 - type: recall_at_10 value: 48.096 - type: recall_at_100 value: 75.389 - type: recall_at_1000 value: 92.803 - type: recall_at_3 value: 32.812999999999995 - type: recall_at_5 value: 38.851 - task: type: Retrieval dataset: name: MTEB CQADupstackStatsRetrieval type: BeIR/cqadupstack config: default split: test revision: 65ac3a16b8e91f9cee4c9828cc7c335575432a2a metrics: - type: map_at_1 value: 24.681 - type: map_at_10 value: 34.892 - type: map_at_100 value: 35.996 - type: map_at_1000 value: 36.083 - type: map_at_3 value: 31.491999999999997 - type: map_at_5 value: 33.632 - type: mrr_at_1 value: 28.528 - type: mrr_at_10 value: 37.694 - type: mrr_at_100 value: 38.613 - type: mrr_at_1000 value: 38.668 - type: mrr_at_3 value: 34.714 - type: mrr_at_5 value: 36.616 - type: ndcg_at_1 value: 28.528 - type: ndcg_at_10 value: 40.703 - type: ndcg_at_100 value: 45.993 - type: ndcg_at_1000 value: 47.847 - type: ndcg_at_3 value: 34.622 - type: ndcg_at_5 value: 38.035999999999994 - type: precision_at_1 value: 28.528 - type: precision_at_10 value: 6.902 - type: precision_at_100 value: 1.0370000000000001 - type: precision_at_1000 value: 0.126 - type: precision_at_3 value: 15.798000000000002 - type: precision_at_5 value: 11.655999999999999 - type: recall_at_1 value: 24.681 - type: recall_at_10 value: 55.81 - type: recall_at_100 value: 79.785 - type: recall_at_1000 value: 92.959 - type: recall_at_3 value: 39.074 - type: recall_at_5 value: 47.568 - task: type: Retrieval dataset: name: MTEB CQADupstackTexRetrieval type: BeIR/cqadupstack config: default split: test revision: 46989137a86843e03a6195de44b09deda022eec7 metrics: - type: map_at_1 value: 18.627 - type: map_at_10 value: 27.872000000000003 - type: map_at_100 value: 29.237999999999996 - type: map_at_1000 value: 29.363 - type: map_at_3 value: 24.751 - type: map_at_5 value: 26.521 - type: mrr_at_1 value: 23.021 - type: mrr_at_10 value: 31.924000000000003 - type: mrr_at_100 value: 32.922000000000004 - type: mrr_at_1000 value: 32.988 - type: mrr_at_3 value: 29.192 - type: mrr_at_5 value: 30.798 - type: ndcg_at_1 value: 23.021 - type: ndcg_at_10 value: 33.535 - type: ndcg_at_100 value: 39.732 - type: ndcg_at_1000 value: 42.201 - type: ndcg_at_3 value: 28.153 - type: ndcg_at_5 value: 30.746000000000002 - type: precision_at_1 value: 23.021 - type: precision_at_10 value: 6.459 - type: precision_at_100 value: 1.1320000000000001 - type: precision_at_1000 value: 0.153 - type: precision_at_3 value: 13.719000000000001 - type: precision_at_5 value: 10.193000000000001 - type: recall_at_1 value: 18.627 - type: recall_at_10 value: 46.463 - type: recall_at_100 value: 74.226 - type: recall_at_1000 value: 91.28500000000001 - type: recall_at_3 value: 31.357000000000003 - type: recall_at_5 value: 38.067 - task: type: Retrieval dataset: name: MTEB CQADupstackUnixRetrieval type: BeIR/cqadupstack config: default split: test revision: 6c6430d3a6d36f8d2a829195bc5dc94d7e063e53 metrics: - type: map_at_1 value: 31.457 - type: map_at_10 value: 42.888 - type: map_at_100 value: 44.24 - type: map_at_1000 value: 44.327 - type: map_at_3 value: 39.588 - type: map_at_5 value: 41.423 - type: mrr_at_1 value: 37.126999999999995 - type: mrr_at_10 value: 47.083000000000006 - type: mrr_at_100 value: 47.997 - type: mrr_at_1000 value: 48.044 - type: mrr_at_3 value: 44.574000000000005 - type: mrr_at_5 value: 46.202 - type: ndcg_at_1 value: 37.126999999999995 - type: ndcg_at_10 value: 48.833 - type: ndcg_at_100 value: 54.327000000000005 - type: ndcg_at_1000 value: 56.011 - type: ndcg_at_3 value: 43.541999999999994 - type: ndcg_at_5 value: 46.127 - type: precision_at_1 value: 37.126999999999995 - type: precision_at_10 value: 8.376999999999999 - type: precision_at_100 value: 1.2309999999999999 - type: precision_at_1000 value: 0.146 - type: precision_at_3 value: 20.211000000000002 - type: precision_at_5 value: 14.16 - type: recall_at_1 value: 31.457 - type: recall_at_10 value: 62.369 - type: recall_at_100 value: 85.444 - type: recall_at_1000 value: 96.65599999999999 - type: recall_at_3 value: 47.961 - type: recall_at_5 value: 54.676 - task: type: Retrieval dataset: name: MTEB CQADupstackWebmastersRetrieval type: BeIR/cqadupstack config: default split: test revision: 160c094312a0e1facb97e55eeddb698c0abe3571 metrics: - type: map_at_1 value: 27.139999999999997 - type: map_at_10 value: 38.801 - type: map_at_100 value: 40.549 - type: map_at_1000 value: 40.802 - type: map_at_3 value: 35.05 - type: map_at_5 value: 36.884 - type: mrr_at_1 value: 33.004 - type: mrr_at_10 value: 43.864 - type: mrr_at_100 value: 44.667 - type: mrr_at_1000 value: 44.717 - type: mrr_at_3 value: 40.777 - type: mrr_at_5 value: 42.319 - type: ndcg_at_1 value: 33.004 - type: ndcg_at_10 value: 46.022 - type: ndcg_at_100 value: 51.542 - type: ndcg_at_1000 value: 53.742000000000004 - type: ndcg_at_3 value: 39.795 - type: ndcg_at_5 value: 42.272 - type: precision_at_1 value: 33.004 - type: precision_at_10 value: 9.012 - type: precision_at_100 value: 1.7770000000000001 - type: precision_at_1000 value: 0.26 - type: precision_at_3 value: 19.038 - type: precision_at_5 value: 13.675999999999998 - type: recall_at_1 value: 27.139999999999997 - type: recall_at_10 value: 60.961 - type: recall_at_100 value: 84.451 - type: recall_at_1000 value: 98.113 - type: recall_at_3 value: 43.001 - type: recall_at_5 value: 49.896 - task: type: Retrieval dataset: name: MTEB ClimateFEVER type: mteb/climate-fever config: default split: test revision: 47f2ac6acb640fc46020b02a5b59fdda04d39380 metrics: - type: map_at_1 value: 22.076999999999998 - type: map_at_10 value: 35.44 - type: map_at_100 value: 37.651 - type: map_at_1000 value: 37.824999999999996 - type: map_at_3 value: 30.764999999999997 - type: map_at_5 value: 33.26 - type: mrr_at_1 value: 50.163000000000004 - type: mrr_at_10 value: 61.207 - type: mrr_at_100 value: 61.675000000000004 - type: mrr_at_1000 value: 61.692 - type: mrr_at_3 value: 58.60999999999999 - type: mrr_at_5 value: 60.307 - type: ndcg_at_1 value: 50.163000000000004 - type: ndcg_at_10 value: 45.882 - type: ndcg_at_100 value: 53.239999999999995 - type: ndcg_at_1000 value: 55.852000000000004 - type: ndcg_at_3 value: 40.514 - type: ndcg_at_5 value: 42.038 - type: precision_at_1 value: 50.163000000000004 - type: precision_at_10 value: 13.466000000000001 - type: precision_at_100 value: 2.164 - type: precision_at_1000 value: 0.266 - type: precision_at_3 value: 29.707 - type: precision_at_5 value: 21.694 - type: recall_at_1 value: 22.076999999999998 - type: recall_at_10 value: 50.193 - type: recall_at_100 value: 74.993 - type: recall_at_1000 value: 89.131 - type: recall_at_3 value: 35.472 - type: recall_at_5 value: 41.814 - task: type: Retrieval dataset: name: MTEB DBPedia type: mteb/dbpedia config: default split: test revision: c0f706b76e590d620bd6618b3ca8efdd34e2d659 metrics: - type: map_at_1 value: 9.953 - type: map_at_10 value: 24.515 - type: map_at_100 value: 36.173 - type: map_at_1000 value: 38.351 - type: map_at_3 value: 16.592000000000002 - type: map_at_5 value: 20.036 - type: mrr_at_1 value: 74.25 - type: mrr_at_10 value: 81.813 - type: mrr_at_100 value: 82.006 - type: mrr_at_1000 value: 82.011 - type: mrr_at_3 value: 80.875 - type: mrr_at_5 value: 81.362 - type: ndcg_at_1 value: 62.5 - type: ndcg_at_10 value: 52.42 - type: ndcg_at_100 value: 56.808 - type: ndcg_at_1000 value: 63.532999999999994 - type: ndcg_at_3 value: 56.654 - type: ndcg_at_5 value: 54.18300000000001 - type: precision_at_1 value: 74.25 - type: precision_at_10 value: 42.699999999999996 - type: precision_at_100 value: 13.675 - type: precision_at_1000 value: 2.664 - type: precision_at_3 value: 60.5 - type: precision_at_5 value: 52.800000000000004 - type: recall_at_1 value: 9.953 - type: recall_at_10 value: 30.253999999999998 - type: recall_at_100 value: 62.516000000000005 - type: recall_at_1000 value: 84.163 - type: recall_at_3 value: 18.13 - type: recall_at_5 value: 22.771 - task: type: Classification dataset: name: MTEB EmotionClassification type: mteb/emotion config: default split: test revision: 4f58c6b202a23cf9a4da393831edf4f9183cad37 metrics: - type: accuracy value: 79.455 - type: f1 value: 74.16798697647569 - task: type: Retrieval dataset: name: MTEB FEVER type: mteb/fever config: default split: test revision: bea83ef9e8fb933d90a2f1d5515737465d613e12 metrics: - type: map_at_1 value: 87.531 - type: map_at_10 value: 93.16799999999999 - type: map_at_100 value: 93.341 - type: map_at_1000 value: 93.349 - type: map_at_3 value: 92.444 - type: map_at_5 value: 92.865 - type: mrr_at_1 value: 94.014 - type: mrr_at_10 value: 96.761 - type: mrr_at_100 value: 96.762 - type: mrr_at_1000 value: 96.762 - type: mrr_at_3 value: 96.672 - type: mrr_at_5 value: 96.736 - type: ndcg_at_1 value: 94.014 - type: ndcg_at_10 value: 95.112 - type: ndcg_at_100 value: 95.578 - type: ndcg_at_1000 value: 95.68900000000001 - type: ndcg_at_3 value: 94.392 - type: ndcg_at_5 value: 94.72500000000001 - type: precision_at_1 value: 94.014 - type: precision_at_10 value: 11.065 - type: precision_at_100 value: 1.157 - type: precision_at_1000 value: 0.11800000000000001 - type: precision_at_3 value: 35.259 - type: precision_at_5 value: 21.599 - type: recall_at_1 value: 87.531 - type: recall_at_10 value: 97.356 - type: recall_at_100 value: 98.965 - type: recall_at_1000 value: 99.607 - type: recall_at_3 value: 95.312 - type: recall_at_5 value: 96.295 - task: type: Retrieval dataset: name: MTEB FiQA2018 type: mteb/fiqa config: default split: test revision: 27a168819829fe9bcd655c2df245fb19452e8e06 metrics: - type: map_at_1 value: 32.055 - type: map_at_10 value: 53.114 - type: map_at_100 value: 55.235 - type: map_at_1000 value: 55.345 - type: map_at_3 value: 45.854 - type: map_at_5 value: 50.025 - type: mrr_at_1 value: 60.34 - type: mrr_at_10 value: 68.804 - type: mrr_at_100 value: 69.309 - type: mrr_at_1000 value: 69.32199999999999 - type: mrr_at_3 value: 66.40899999999999 - type: mrr_at_5 value: 67.976 - type: ndcg_at_1 value: 60.34 - type: ndcg_at_10 value: 62.031000000000006 - type: ndcg_at_100 value: 68.00500000000001 - type: ndcg_at_1000 value: 69.286 - type: ndcg_at_3 value: 56.355999999999995 - type: ndcg_at_5 value: 58.687 - type: precision_at_1 value: 60.34 - type: precision_at_10 value: 17.176 - type: precision_at_100 value: 2.36 - type: precision_at_1000 value: 0.259 - type: precision_at_3 value: 37.14 - type: precision_at_5 value: 27.809 - type: recall_at_1 value: 32.055 - type: recall_at_10 value: 70.91 - type: recall_at_100 value: 91.83 - type: recall_at_1000 value: 98.871 - type: recall_at_3 value: 51.202999999999996 - type: recall_at_5 value: 60.563 - task: type: Retrieval dataset: name: MTEB HotpotQA type: mteb/hotpotqa config: default split: test revision: ab518f4d6fcca38d87c25209f94beba119d02014 metrics: - type: map_at_1 value: 43.68 - type: map_at_10 value: 64.389 - type: map_at_100 value: 65.24 - type: map_at_1000 value: 65.303 - type: map_at_3 value: 61.309000000000005 - type: map_at_5 value: 63.275999999999996 - type: mrr_at_1 value: 87.36 - type: mrr_at_10 value: 91.12 - type: mrr_at_100 value: 91.227 - type: mrr_at_1000 value: 91.229 - type: mrr_at_3 value: 90.57600000000001 - type: mrr_at_5 value: 90.912 - type: ndcg_at_1 value: 87.36 - type: ndcg_at_10 value: 73.076 - type: ndcg_at_100 value: 75.895 - type: ndcg_at_1000 value: 77.049 - type: ndcg_at_3 value: 68.929 - type: ndcg_at_5 value: 71.28 - type: precision_at_1 value: 87.36 - type: precision_at_10 value: 14.741000000000001 - type: precision_at_100 value: 1.694 - type: precision_at_1000 value: 0.185 - type: precision_at_3 value: 43.043 - type: precision_at_5 value: 27.681 - type: recall_at_1 value: 43.68 - type: recall_at_10 value: 73.707 - type: recall_at_100 value: 84.7 - type: recall_at_1000 value: 92.309 - type: recall_at_3 value: 64.564 - type: recall_at_5 value: 69.203 - task: type: Classification dataset: name: MTEB ImdbClassification type: mteb/imdb config: default split: test revision: 3d86128a09e091d6018b6d26cad27f2739fc2db7 metrics: - type: accuracy value: 96.75399999999999 - type: ap value: 95.29389839242187 - type: f1 value: 96.75348377433475 - task: type: Retrieval dataset: name: MTEB MSMARCO type: mteb/msmarco config: default split: dev revision: c5a29a104738b98a9e76336939199e264163d4a0 metrics: - type: map_at_1 value: 25.176 - type: map_at_10 value: 38.598 - type: map_at_100 value: 39.707 - type: map_at_1000 value: 39.744 - type: map_at_3 value: 34.566 - type: map_at_5 value: 36.863 - type: mrr_at_1 value: 25.874000000000002 - type: mrr_at_10 value: 39.214 - type: mrr_at_100 value: 40.251 - type: mrr_at_1000 value: 40.281 - type: mrr_at_3 value: 35.291 - type: mrr_at_5 value: 37.545 - type: ndcg_at_1 value: 25.874000000000002 - type: ndcg_at_10 value: 45.98 - type: ndcg_at_100 value: 51.197 - type: ndcg_at_1000 value: 52.073 - type: ndcg_at_3 value: 37.785999999999994 - type: ndcg_at_5 value: 41.870000000000005 - type: precision_at_1 value: 25.874000000000002 - type: precision_at_10 value: 7.181 - type: precision_at_100 value: 0.979 - type: precision_at_1000 value: 0.106 - type: precision_at_3 value: 16.051000000000002 - type: precision_at_5 value: 11.713 - type: recall_at_1 value: 25.176 - type: recall_at_10 value: 68.67699999999999 - type: recall_at_100 value: 92.55 - type: recall_at_1000 value: 99.164 - type: recall_at_3 value: 46.372 - type: recall_at_5 value: 56.16 - task: type: Classification dataset: name: MTEB MTOPDomainClassification (en) type: mteb/mtop_domain config: en split: test revision: d80d48c1eb48d3562165c59d59d0034df9fff0bf metrics: - type: accuracy value: 99.03784769721841 - type: f1 value: 98.97791641821495 - task: type: Classification dataset: name: MTEB MTOPIntentClassification (en) type: mteb/mtop_intent config: en split: test revision: ae001d0e6b1228650b7bd1c2c65fb50ad11a8aba metrics: - type: accuracy value: 91.88326493388054 - type: f1 value: 73.74809928034335 - task: type: Classification dataset: name: MTEB MassiveIntentClassification (en) type: mteb/amazon_massive_intent config: en split: test revision: 31efe3c427b0bae9c22cbb560b8f15491cc6bed7 metrics: - type: accuracy value: 85.41358439811701 - type: f1 value: 83.503679460639 - task: type: Classification dataset: name: MTEB MassiveScenarioClassification (en) type: mteb/amazon_massive_scenario config: en split: test revision: 7d571f92784cd94a019292a1f45445077d0ef634 metrics: - type: accuracy value: 89.77135171486215 - type: f1 value: 88.89843747468366 - task: type: Clustering dataset: name: MTEB MedrxivClusteringP2P type: mteb/medrxiv-clustering-p2p config: default split: test revision: e7a26af6f3ae46b30dde8737f02c07b1505bcc73 metrics: - type: v_measure value: 46.22695362087359 - task: type: Clustering dataset: name: MTEB MedrxivClusteringS2S type: mteb/medrxiv-clustering-s2s config: default split: test revision: 35191c8c0dca72d8ff3efcd72aa802307d469663 metrics: - type: v_measure value: 44.132372165849425 - task: type: Reranking dataset: name: MTEB MindSmallReranking type: mteb/mind_small config: default split: test revision: 3bdac13927fdc888b903db93b2ffdbd90b295a69 metrics: - type: map value: 33.35680810650402 - type: mrr value: 34.72625715637218 - task: type: Retrieval dataset: name: MTEB NFCorpus type: mteb/nfcorpus config: default split: test revision: ec0fa4fe99da2ff19ca1214b7966684033a58814 metrics: - type: map_at_1 value: 7.165000000000001 - type: map_at_10 value: 15.424 - type: map_at_100 value: 20.28 - type: map_at_1000 value: 22.065 - type: map_at_3 value: 11.236 - type: map_at_5 value: 13.025999999999998 - type: mrr_at_1 value: 51.702999999999996 - type: mrr_at_10 value: 59.965 - type: mrr_at_100 value: 60.667 - type: mrr_at_1000 value: 60.702999999999996 - type: mrr_at_3 value: 58.772000000000006 - type: mrr_at_5 value: 59.267 - type: ndcg_at_1 value: 49.536 - type: ndcg_at_10 value: 40.6 - type: ndcg_at_100 value: 37.848 - type: ndcg_at_1000 value: 46.657 - type: ndcg_at_3 value: 46.117999999999995 - type: ndcg_at_5 value: 43.619 - type: precision_at_1 value: 51.393 - type: precision_at_10 value: 30.31 - type: precision_at_100 value: 9.972 - type: precision_at_1000 value: 2.329 - type: precision_at_3 value: 43.137 - type: precision_at_5 value: 37.585 - type: recall_at_1 value: 7.165000000000001 - type: recall_at_10 value: 19.689999999999998 - type: recall_at_100 value: 39.237 - type: recall_at_1000 value: 71.417 - type: recall_at_3 value: 12.247 - type: recall_at_5 value: 14.902999999999999 - task: type: Retrieval dataset: name: MTEB NQ type: mteb/nq config: default split: test revision: b774495ed302d8c44a3a7ea25c90dbce03968f31 metrics: - type: map_at_1 value: 42.653999999999996 - type: map_at_10 value: 59.611999999999995 - type: map_at_100 value: 60.32300000000001 - type: map_at_1000 value: 60.336 - type: map_at_3 value: 55.584999999999994 - type: map_at_5 value: 58.19 - type: mrr_at_1 value: 47.683 - type: mrr_at_10 value: 62.06700000000001 - type: mrr_at_100 value: 62.537 - type: mrr_at_1000 value: 62.544999999999995 - type: mrr_at_3 value: 59.178 - type: mrr_at_5 value: 61.034 - type: ndcg_at_1 value: 47.654 - type: ndcg_at_10 value: 67.001 - type: ndcg_at_100 value: 69.73899999999999 - type: ndcg_at_1000 value: 69.986 - type: ndcg_at_3 value: 59.95700000000001 - type: ndcg_at_5 value: 64.025 - type: precision_at_1 value: 47.654 - type: precision_at_10 value: 10.367999999999999 - type: precision_at_100 value: 1.192 - type: precision_at_1000 value: 0.121 - type: precision_at_3 value: 26.651000000000003 - type: precision_at_5 value: 18.459 - type: recall_at_1 value: 42.653999999999996 - type: recall_at_10 value: 86.619 - type: recall_at_100 value: 98.04899999999999 - type: recall_at_1000 value: 99.812 - type: recall_at_3 value: 68.987 - type: recall_at_5 value: 78.158 - task: type: Retrieval dataset: name: MTEB QuoraRetrieval type: mteb/quora config: default split: test revision: None metrics: - type: map_at_1 value: 72.538 - type: map_at_10 value: 86.702 - type: map_at_100 value: 87.31 - type: map_at_1000 value: 87.323 - type: map_at_3 value: 83.87 - type: map_at_5 value: 85.682 - type: mrr_at_1 value: 83.31 - type: mrr_at_10 value: 89.225 - type: mrr_at_100 value: 89.30399999999999 - type: mrr_at_1000 value: 89.30399999999999 - type: mrr_at_3 value: 88.44300000000001 - type: mrr_at_5 value: 89.005 - type: ndcg_at_1 value: 83.32000000000001 - type: ndcg_at_10 value: 90.095 - type: ndcg_at_100 value: 91.12 - type: ndcg_at_1000 value: 91.179 - type: ndcg_at_3 value: 87.606 - type: ndcg_at_5 value: 89.031 - type: precision_at_1 value: 83.32000000000001 - type: precision_at_10 value: 13.641 - type: precision_at_100 value: 1.541 - type: precision_at_1000 value: 0.157 - type: precision_at_3 value: 38.377 - type: precision_at_5 value: 25.162000000000003 - type: recall_at_1 value: 72.538 - type: recall_at_10 value: 96.47200000000001 - type: recall_at_100 value: 99.785 - type: recall_at_1000 value: 99.99900000000001 - type: recall_at_3 value: 89.278 - type: recall_at_5 value: 93.367 - task: type: Clustering dataset: name: MTEB RedditClustering type: mteb/reddit-clustering config: default split: test revision: 24640382cdbf8abc73003fb0fa6d111a705499eb metrics: - type: v_measure value: 73.55219145406065 - task: type: Clustering dataset: name: MTEB RedditClusteringP2P type: mteb/reddit-clustering-p2p config: default split: test revision: 282350215ef01743dc01b456c7f5241fa8937f16 metrics: - type: v_measure value: 74.13437105242755 - task: type: Retrieval dataset: name: MTEB SCIDOCS type: mteb/scidocs config: default split: test revision: None metrics: - type: map_at_1 value: 6.873 - type: map_at_10 value: 17.944 - type: map_at_100 value: 21.171 - type: map_at_1000 value: 21.528 - type: map_at_3 value: 12.415 - type: map_at_5 value: 15.187999999999999 - type: mrr_at_1 value: 33.800000000000004 - type: mrr_at_10 value: 46.455 - type: mrr_at_100 value: 47.378 - type: mrr_at_1000 value: 47.394999999999996 - type: mrr_at_3 value: 42.367 - type: mrr_at_5 value: 44.972 - type: ndcg_at_1 value: 33.800000000000004 - type: ndcg_at_10 value: 28.907 - type: ndcg_at_100 value: 39.695 - type: ndcg_at_1000 value: 44.582 - type: ndcg_at_3 value: 26.949 - type: ndcg_at_5 value: 23.988 - type: precision_at_1 value: 33.800000000000004 - type: precision_at_10 value: 15.079999999999998 - type: precision_at_100 value: 3.056 - type: precision_at_1000 value: 0.42100000000000004 - type: precision_at_3 value: 25.167 - type: precision_at_5 value: 21.26 - type: recall_at_1 value: 6.873 - type: recall_at_10 value: 30.568 - type: recall_at_100 value: 62.062 - type: recall_at_1000 value: 85.37700000000001 - type: recall_at_3 value: 15.312999999999999 - type: recall_at_5 value: 21.575 - task: type: STS dataset: name: MTEB SICK-R type: mteb/sickr-sts config: default split: test revision: a6ea5a8cab320b040a23452cc28066d9beae2cee metrics: - type: cos_sim_pearson value: 82.37009118256057 - type: cos_sim_spearman value: 79.27986395671529 - type: euclidean_pearson value: 79.18037715442115 - type: euclidean_spearman value: 79.28004791561621 - type: manhattan_pearson value: 79.34062972800541 - type: manhattan_spearman value: 79.43106695543402 - task: type: STS dataset: name: MTEB STS12 type: mteb/sts12-sts config: default split: test revision: a0d554a64d88156834ff5ae9920b964011b16384 metrics: - type: cos_sim_pearson value: 87.48474767383833 - type: cos_sim_spearman value: 79.54505388752513 - type: euclidean_pearson value: 83.43282704179565 - type: euclidean_spearman value: 79.54579919925405 - type: manhattan_pearson value: 83.77564492427952 - type: manhattan_spearman value: 79.84558396989286 - task: type: STS dataset: name: MTEB STS13 type: mteb/sts13-sts config: default split: test revision: 7e90230a92c190f1bf69ae9002b8cea547a64cca metrics: - type: cos_sim_pearson value: 88.803698035802 - type: cos_sim_spearman value: 88.83451367754881 - type: euclidean_pearson value: 88.28939285711628 - type: euclidean_spearman value: 88.83528996073112 - type: manhattan_pearson value: 88.28017412671795 - type: manhattan_spearman value: 88.9228828016344 - task: type: STS dataset: name: MTEB STS14 type: mteb/sts14-sts config: default split: test revision: 6031580fec1f6af667f0bd2da0a551cf4f0b2375 metrics: - type: cos_sim_pearson value: 85.27469288153428 - type: cos_sim_spearman value: 83.87477064876288 - type: euclidean_pearson value: 84.2601737035379 - type: euclidean_spearman value: 83.87431082479074 - type: manhattan_pearson value: 84.3621547772745 - type: manhattan_spearman value: 84.12094375000423 - task: type: STS dataset: name: MTEB STS15 type: mteb/sts15-sts config: default split: test revision: ae752c7c21bf194d8b67fd573edf7ae58183cbe3 metrics: - type: cos_sim_pearson value: 88.12749863201587 - type: cos_sim_spearman value: 88.54287568368565 - type: euclidean_pearson value: 87.90429700607999 - type: euclidean_spearman value: 88.5437689576261 - type: manhattan_pearson value: 88.19276653356833 - type: manhattan_spearman value: 88.99995393814679 - task: type: STS dataset: name: MTEB STS16 type: mteb/sts16-sts config: default split: test revision: 4d8694f8f0e0100860b497b999b3dbed754a0513 metrics: - type: cos_sim_pearson value: 85.68398747560902 - type: cos_sim_spearman value: 86.48815303460574 - type: euclidean_pearson value: 85.52356631237954 - type: euclidean_spearman value: 86.486391949551 - type: manhattan_pearson value: 85.67267981761788 - type: manhattan_spearman value: 86.7073696332485 - task: type: STS dataset: name: MTEB STS17 (en-en) type: mteb/sts17-crosslingual-sts config: en-en split: test revision: af5e6fb845001ecf41f4c1e033ce921939a2a68d metrics: - type: cos_sim_pearson value: 88.9057107443124 - type: cos_sim_spearman value: 88.7312168757697 - type: euclidean_pearson value: 88.72810439714794 - type: euclidean_spearman value: 88.71976185854771 - type: manhattan_pearson value: 88.50433745949111 - type: manhattan_spearman value: 88.51726175544195 - task: type: STS dataset: name: MTEB STS22 (en) type: mteb/sts22-crosslingual-sts config: en split: test revision: eea2b4fe26a775864c896887d910b76a8098ad3f metrics: - type: cos_sim_pearson value: 67.59391795109886 - type: cos_sim_spearman value: 66.87613008631367 - type: euclidean_pearson value: 69.23198488262217 - type: euclidean_spearman value: 66.85427723013692 - type: manhattan_pearson value: 69.50730124841084 - type: manhattan_spearman value: 67.10404669820792 - task: type: STS dataset: name: MTEB STSBenchmark type: mteb/stsbenchmark-sts config: default split: test revision: b0fddb56ed78048fa8b90373c8a3cfc37b684831 metrics: - type: cos_sim_pearson value: 87.0820605344619 - type: cos_sim_spearman value: 86.8518089863434 - type: euclidean_pearson value: 86.31087134689284 - type: euclidean_spearman value: 86.8518520517941 - type: manhattan_pearson value: 86.47203796160612 - type: manhattan_spearman value: 87.1080149734421 - task: type: Reranking dataset: name: MTEB SciDocsRR type: mteb/scidocs-reranking config: default split: test revision: d3c5e1fc0b855ab6097bf1cda04dd73947d7caab metrics: - type: map value: 89.09255369305481 - type: mrr value: 97.10323445617563 - task: type: Retrieval dataset: name: MTEB SciFact type: mteb/scifact config: default split: test revision: 0228b52cf27578f30900b9e5271d331663a030d7 metrics: - type: map_at_1 value: 61.260999999999996 - type: map_at_10 value: 74.043 - type: map_at_100 value: 74.37700000000001 - type: map_at_1000 value: 74.384 - type: map_at_3 value: 71.222 - type: map_at_5 value: 72.875 - type: mrr_at_1 value: 64.333 - type: mrr_at_10 value: 74.984 - type: mrr_at_100 value: 75.247 - type: mrr_at_1000 value: 75.25500000000001 - type: mrr_at_3 value: 73.167 - type: mrr_at_5 value: 74.35000000000001 - type: ndcg_at_1 value: 64.333 - type: ndcg_at_10 value: 79.06 - type: ndcg_at_100 value: 80.416 - type: ndcg_at_1000 value: 80.55600000000001 - type: ndcg_at_3 value: 74.753 - type: ndcg_at_5 value: 76.97500000000001 - type: precision_at_1 value: 64.333 - type: precision_at_10 value: 10.567 - type: precision_at_100 value: 1.1199999999999999 - type: precision_at_1000 value: 0.11299999999999999 - type: precision_at_3 value: 29.889 - type: precision_at_5 value: 19.533 - type: recall_at_1 value: 61.260999999999996 - type: recall_at_10 value: 93.167 - type: recall_at_100 value: 99.0 - type: recall_at_1000 value: 100.0 - type: recall_at_3 value: 81.667 - type: recall_at_5 value: 87.394 - task: type: PairClassification dataset: name: MTEB SprintDuplicateQuestions type: mteb/sprintduplicatequestions-pairclassification config: default split: test revision: d66bd1f72af766a5cc4b0ca5e00c162f89e8cc46 metrics: - type: cos_sim_accuracy value: 99.71980198019801 - type: cos_sim_ap value: 92.81616007802704 - type: cos_sim_f1 value: 85.17548454688318 - type: cos_sim_precision value: 89.43894389438944 - type: cos_sim_recall value: 81.3 - type: dot_accuracy value: 99.71980198019801 - type: dot_ap value: 92.81398760591358 - type: dot_f1 value: 85.17548454688318 - type: dot_precision value: 89.43894389438944 - type: dot_recall value: 81.3 - type: euclidean_accuracy value: 99.71980198019801 - type: euclidean_ap value: 92.81560637245072 - type: euclidean_f1 value: 85.17548454688318 - type: euclidean_precision value: 89.43894389438944 - type: euclidean_recall value: 81.3 - type: manhattan_accuracy value: 99.73069306930694 - type: manhattan_ap value: 93.14005487480794 - type: manhattan_f1 value: 85.56263269639068 - type: manhattan_precision value: 91.17647058823529 - type: manhattan_recall value: 80.60000000000001 - type: max_accuracy value: 99.73069306930694 - type: max_ap value: 93.14005487480794 - type: max_f1 value: 85.56263269639068 - task: type: Clustering dataset: name: MTEB StackExchangeClustering type: mteb/stackexchange-clustering config: default split: test revision: 6cbc1f7b2bc0622f2e39d2c77fa502909748c259 metrics: - type: v_measure value: 79.86443362395185 - task: type: Clustering dataset: name: MTEB StackExchangeClusteringP2P type: mteb/stackexchange-clustering-p2p config: default split: test revision: 815ca46b2622cec33ccafc3735d572c266efdb44 metrics: - type: v_measure value: 49.40897096662564 - task: type: Reranking dataset: name: MTEB StackOverflowDupQuestions type: mteb/stackoverflowdupquestions-reranking config: default split: test revision: e185fbe320c72810689fc5848eb6114e1ef5ec69 metrics: - type: map value: 55.66040806627947 - type: mrr value: 56.58670475766064 - task: type: Summarization dataset: name: MTEB SummEval type: mteb/summeval config: default split: test revision: cda12ad7615edc362dbf25a00fdd61d3b1eaf93c metrics: - type: cos_sim_pearson value: 31.51015090598575 - type: cos_sim_spearman value: 31.35016454939226 - type: dot_pearson value: 31.5150068731 - type: dot_spearman value: 31.34790869023487 - task: type: Retrieval dataset: name: MTEB TRECCOVID type: mteb/trec-covid config: default split: test revision: None metrics: - type: map_at_1 value: 0.254 - type: map_at_10 value: 2.064 - type: map_at_100 value: 12.909 - type: map_at_1000 value: 31.761 - type: map_at_3 value: 0.738 - type: map_at_5 value: 1.155 - type: mrr_at_1 value: 96.0 - type: mrr_at_10 value: 98.0 - type: mrr_at_100 value: 98.0 - type: mrr_at_1000 value: 98.0 - type: mrr_at_3 value: 98.0 - type: mrr_at_5 value: 98.0 - type: ndcg_at_1 value: 93.0 - type: ndcg_at_10 value: 82.258 - type: ndcg_at_100 value: 64.34 - type: ndcg_at_1000 value: 57.912 - type: ndcg_at_3 value: 90.827 - type: ndcg_at_5 value: 86.79 - type: precision_at_1 value: 96.0 - type: precision_at_10 value: 84.8 - type: precision_at_100 value: 66.0 - type: precision_at_1000 value: 25.356 - type: precision_at_3 value: 94.667 - type: precision_at_5 value: 90.4 - type: recall_at_1 value: 0.254 - type: recall_at_10 value: 2.1950000000000003 - type: recall_at_100 value: 16.088 - type: recall_at_1000 value: 54.559000000000005 - type: recall_at_3 value: 0.75 - type: recall_at_5 value: 1.191 - task: type: Retrieval dataset: name: MTEB Touche2020 type: mteb/touche2020 config: default split: test revision: a34f9a33db75fa0cbb21bb5cfc3dae8dc8bec93f metrics: - type: map_at_1 value: 2.976 - type: map_at_10 value: 11.389000000000001 - type: map_at_100 value: 18.429000000000002 - type: map_at_1000 value: 20.113 - type: map_at_3 value: 6.483 - type: map_at_5 value: 8.770999999999999 - type: mrr_at_1 value: 40.816 - type: mrr_at_10 value: 58.118 - type: mrr_at_100 value: 58.489999999999995 - type: mrr_at_1000 value: 58.489999999999995 - type: mrr_at_3 value: 53.061 - type: mrr_at_5 value: 57.041 - type: ndcg_at_1 value: 40.816 - type: ndcg_at_10 value: 30.567 - type: ndcg_at_100 value: 42.44 - type: ndcg_at_1000 value: 53.480000000000004 - type: ndcg_at_3 value: 36.016 - type: ndcg_at_5 value: 34.257 - type: precision_at_1 value: 42.857 - type: precision_at_10 value: 25.714 - type: precision_at_100 value: 8.429 - type: precision_at_1000 value: 1.5939999999999999 - type: precision_at_3 value: 36.735 - type: precision_at_5 value: 33.878 - type: recall_at_1 value: 2.976 - type: recall_at_10 value: 17.854999999999997 - type: recall_at_100 value: 51.833 - type: recall_at_1000 value: 86.223 - type: recall_at_3 value: 7.887 - type: recall_at_5 value: 12.026 - task: type: Classification dataset: name: MTEB ToxicConversationsClassification type: mteb/toxic_conversations_50k config: default split: test revision: d7c0de2777da35d6aae2200a62c6e0e5af397c4c metrics: - type: accuracy value: 85.1174 - type: ap value: 30.169441069345748 - type: f1 value: 69.79254701873245 - task: type: Classification dataset: name: MTEB TweetSentimentExtractionClassification type: mteb/tweet_sentiment_extraction config: default split: test revision: d604517c81ca91fe16a244d1248fc021f9ecee7a metrics: - type: accuracy value: 72.58347481607245 - type: f1 value: 72.74877295564937 - task: type: Clustering dataset: name: MTEB TwentyNewsgroupsClustering type: mteb/twentynewsgroups-clustering config: default split: test revision: 6125ec4e24fa026cec8a478383ee943acfbd5449 metrics: - type: v_measure value: 53.90586138221305 - task: type: PairClassification dataset: name: MTEB TwitterSemEval2015 type: mteb/twittersemeval2015-pairclassification config: default split: test revision: 70970daeab8776df92f5ea462b6173c0b46fd2d1 metrics: - type: cos_sim_accuracy value: 87.35769207844072 - type: cos_sim_ap value: 77.9645072410354 - type: cos_sim_f1 value: 71.32352941176471 - type: cos_sim_precision value: 66.5903890160183 - type: cos_sim_recall value: 76.78100263852242 - type: dot_accuracy value: 87.37557370209214 - type: dot_ap value: 77.96250046429908 - type: dot_f1 value: 71.28932757557064 - type: dot_precision value: 66.95249130938586 - type: dot_recall value: 76.22691292875989 - type: euclidean_accuracy value: 87.35173153722357 - type: euclidean_ap value: 77.96520460741593 - type: euclidean_f1 value: 71.32470733210104 - type: euclidean_precision value: 66.91329479768785 - type: euclidean_recall value: 76.35883905013192 - type: manhattan_accuracy value: 87.25636287774931 - type: manhattan_ap value: 77.77752485611796 - type: manhattan_f1 value: 71.18148599269183 - type: manhattan_precision value: 66.10859728506787 - type: manhattan_recall value: 77.0976253298153 - type: max_accuracy value: 87.37557370209214 - type: max_ap value: 77.96520460741593 - type: max_f1 value: 71.32470733210104 - task: type: PairClassification dataset: name: MTEB TwitterURLCorpus type: mteb/twitterurlcorpus-pairclassification config: default split: test revision: 8b6510b0b1fa4e4c4f879467980e9be563ec1cdf metrics: - type: cos_sim_accuracy value: 89.38176737687739 - type: cos_sim_ap value: 86.58811861657401 - type: cos_sim_f1 value: 79.09430644097604 - type: cos_sim_precision value: 75.45085977911366 - type: cos_sim_recall value: 83.10748383122882 - type: dot_accuracy value: 89.38370784336554 - type: dot_ap value: 86.58840606004333 - type: dot_f1 value: 79.10179860068133 - type: dot_precision value: 75.44546153308643 - type: dot_recall value: 83.13058207576223 - type: euclidean_accuracy value: 89.38564830985369 - type: euclidean_ap value: 86.58820721061164 - type: euclidean_f1 value: 79.09070942235888 - type: euclidean_precision value: 75.38729937194697 - type: euclidean_recall value: 83.17677856482906 - type: manhattan_accuracy value: 89.40699344122326 - type: manhattan_ap value: 86.60631843011362 - type: manhattan_f1 value: 79.14949970570925 - type: manhattan_precision value: 75.78191039729502 - type: manhattan_recall value: 82.83030489682784 - type: max_accuracy value: 89.40699344122326 - type: max_ap value: 86.60631843011362 - type: max_f1 value: 79.14949970570925 - task: type: STS dataset: name: MTEB AFQMC type: C-MTEB/AFQMC config: default split: validation revision: b44c3b011063adb25877c13823db83bb193913c4 metrics: - type: cos_sim_pearson value: 65.58442135663871 - type: cos_sim_spearman value: 72.2538631361313 - type: euclidean_pearson value: 70.97255486607429 - type: euclidean_spearman value: 72.25374250228647 - type: manhattan_pearson value: 70.83250199989911 - type: manhattan_spearman value: 72.14819496536272 - task: type: STS dataset: name: MTEB ATEC type: C-MTEB/ATEC config: default split: test revision: 0f319b1142f28d00e055a6770f3f726ae9b7d865 metrics: - type: cos_sim_pearson value: 59.99478404929932 - type: cos_sim_spearman value: 62.61836216999812 - type: euclidean_pearson value: 66.86429811933593 - type: euclidean_spearman value: 62.6183520374191 - type: manhattan_pearson value: 66.8063778911633 - type: manhattan_spearman value: 62.569607573241115 - task: type: Classification dataset: name: MTEB AmazonReviewsClassification (zh) type: mteb/amazon_reviews_multi config: zh split: test revision: 1399c76144fd37290681b995c656ef9b2e06e26d metrics: - type: accuracy value: 53.98400000000001 - type: f1 value: 51.21447361350723 - task: type: STS dataset: name: MTEB BQ type: C-MTEB/BQ config: default split: test revision: e3dda5e115e487b39ec7e618c0c6a29137052a55 metrics: - type: cos_sim_pearson value: 79.11941660686553 - type: cos_sim_spearman value: 81.25029594540435 - type: euclidean_pearson value: 82.06973504238826 - type: euclidean_spearman value: 81.2501989488524 - type: manhattan_pearson value: 82.10094630392753 - type: manhattan_spearman value: 81.27987244392389 - task: type: Clustering dataset: name: MTEB CLSClusteringP2P type: C-MTEB/CLSClusteringP2P config: default split: test revision: 4b6227591c6c1a73bc76b1055f3b7f3588e72476 metrics: - type: v_measure value: 47.07270168705156 - task: type: Clustering dataset: name: MTEB CLSClusteringS2S type: C-MTEB/CLSClusteringS2S config: default split: test revision: e458b3f5414b62b7f9f83499ac1f5497ae2e869f metrics: - type: v_measure value: 45.98511703185043 - task: type: Reranking dataset: name: MTEB CMedQAv1 type: C-MTEB/CMedQAv1-reranking config: default split: test revision: 8d7f1e942507dac42dc58017c1a001c3717da7df metrics: - type: map value: 88.19895157194931 - type: mrr value: 90.21424603174603 - task: type: Reranking dataset: name: MTEB CMedQAv2 type: C-MTEB/CMedQAv2-reranking config: default split: test revision: 23d186750531a14a0357ca22cd92d712fd512ea0 metrics: - type: map value: 88.03317320980119 - type: mrr value: 89.9461507936508 - task: type: Retrieval dataset: name: MTEB CmedqaRetrieval type: C-MTEB/CmedqaRetrieval config: default split: dev revision: cd540c506dae1cf9e9a59c3e06f42030d54e7301 metrics: - type: map_at_1 value: 29.037000000000003 - type: map_at_10 value: 42.001 - type: map_at_100 value: 43.773 - type: map_at_1000 value: 43.878 - type: map_at_3 value: 37.637 - type: map_at_5 value: 40.034 - type: mrr_at_1 value: 43.136 - type: mrr_at_10 value: 51.158 - type: mrr_at_100 value: 52.083 - type: mrr_at_1000 value: 52.12 - type: mrr_at_3 value: 48.733 - type: mrr_at_5 value: 50.025 - type: ndcg_at_1 value: 43.136 - type: ndcg_at_10 value: 48.685 - type: ndcg_at_100 value: 55.513 - type: ndcg_at_1000 value: 57.242000000000004 - type: ndcg_at_3 value: 43.329 - type: ndcg_at_5 value: 45.438 - type: precision_at_1 value: 43.136 - type: precision_at_10 value: 10.56 - type: precision_at_100 value: 1.6129999999999998 - type: precision_at_1000 value: 0.184 - type: precision_at_3 value: 24.064 - type: precision_at_5 value: 17.269000000000002 - type: recall_at_1 value: 29.037000000000003 - type: recall_at_10 value: 59.245000000000005 - type: recall_at_100 value: 87.355 - type: recall_at_1000 value: 98.74000000000001 - type: recall_at_3 value: 42.99 - type: recall_at_5 value: 49.681999999999995 - task: type: PairClassification dataset: name: MTEB Cmnli type: C-MTEB/CMNLI config: default split: validation revision: 41bc36f332156f7adc9e38f53777c959b2ae9766 metrics: - type: cos_sim_accuracy value: 82.68190018039687 - type: cos_sim_ap value: 90.18017125327886 - type: cos_sim_f1 value: 83.64080906868193 - type: cos_sim_precision value: 79.7076890489303 - type: cos_sim_recall value: 87.98223053542202 - type: dot_accuracy value: 82.68190018039687 - type: dot_ap value: 90.18782350103646 - type: dot_f1 value: 83.64242087729039 - type: dot_precision value: 79.65313028764805 - type: dot_recall value: 88.05237315875614 - type: euclidean_accuracy value: 82.68190018039687 - type: euclidean_ap value: 90.1801957900632 - type: euclidean_f1 value: 83.63636363636364 - type: euclidean_precision value: 79.52772506852203 - type: euclidean_recall value: 88.19265840542437 - type: manhattan_accuracy value: 82.14070956103427 - type: manhattan_ap value: 89.96178420101427 - type: manhattan_f1 value: 83.21087838578791 - type: manhattan_precision value: 78.35605121850475 - type: manhattan_recall value: 88.70703764320785 - type: max_accuracy value: 82.68190018039687 - type: max_ap value: 90.18782350103646 - type: max_f1 value: 83.64242087729039 - task: type: Retrieval dataset: name: MTEB CovidRetrieval type: C-MTEB/CovidRetrieval config: default split: dev revision: 1271c7809071a13532e05f25fb53511ffce77117 metrics: - type: map_at_1 value: 72.234 - type: map_at_10 value: 80.10000000000001 - type: map_at_100 value: 80.36 - type: map_at_1000 value: 80.363 - type: map_at_3 value: 78.315 - type: map_at_5 value: 79.607 - type: mrr_at_1 value: 72.392 - type: mrr_at_10 value: 80.117 - type: mrr_at_100 value: 80.36999999999999 - type: mrr_at_1000 value: 80.373 - type: mrr_at_3 value: 78.469 - type: mrr_at_5 value: 79.633 - type: ndcg_at_1 value: 72.392 - type: ndcg_at_10 value: 83.651 - type: ndcg_at_100 value: 84.749 - type: ndcg_at_1000 value: 84.83000000000001 - type: ndcg_at_3 value: 80.253 - type: ndcg_at_5 value: 82.485 - type: precision_at_1 value: 72.392 - type: precision_at_10 value: 9.557 - type: precision_at_100 value: 1.004 - type: precision_at_1000 value: 0.101 - type: precision_at_3 value: 28.732000000000003 - type: precision_at_5 value: 18.377 - type: recall_at_1 value: 72.234 - type: recall_at_10 value: 94.573 - type: recall_at_100 value: 99.368 - type: recall_at_1000 value: 100.0 - type: recall_at_3 value: 85.669 - type: recall_at_5 value: 91.01700000000001 - task: type: Retrieval dataset: name: MTEB DuRetrieval type: C-MTEB/DuRetrieval config: default split: dev revision: a1a333e290fe30b10f3f56498e3a0d911a693ced metrics: - type: map_at_1 value: 26.173999999999996 - type: map_at_10 value: 80.04 - type: map_at_100 value: 82.94500000000001 - type: map_at_1000 value: 82.98100000000001 - type: map_at_3 value: 55.562999999999995 - type: map_at_5 value: 69.89800000000001 - type: mrr_at_1 value: 89.5 - type: mrr_at_10 value: 92.996 - type: mrr_at_100 value: 93.06400000000001 - type: mrr_at_1000 value: 93.065 - type: mrr_at_3 value: 92.658 - type: mrr_at_5 value: 92.84599999999999 - type: ndcg_at_1 value: 89.5 - type: ndcg_at_10 value: 87.443 - type: ndcg_at_100 value: 90.253 - type: ndcg_at_1000 value: 90.549 - type: ndcg_at_3 value: 85.874 - type: ndcg_at_5 value: 84.842 - type: precision_at_1 value: 89.5 - type: precision_at_10 value: 41.805 - type: precision_at_100 value: 4.827 - type: precision_at_1000 value: 0.49 - type: precision_at_3 value: 76.85 - type: precision_at_5 value: 64.8 - type: recall_at_1 value: 26.173999999999996 - type: recall_at_10 value: 89.101 - type: recall_at_100 value: 98.08099999999999 - type: recall_at_1000 value: 99.529 - type: recall_at_3 value: 57.902 - type: recall_at_5 value: 74.602 - task: type: Retrieval dataset: name: MTEB EcomRetrieval type: C-MTEB/EcomRetrieval config: default split: dev revision: 687de13dc7294d6fd9be10c6945f9e8fec8166b9 metrics: - type: map_at_1 value: 56.10000000000001 - type: map_at_10 value: 66.15299999999999 - type: map_at_100 value: 66.625 - type: map_at_1000 value: 66.636 - type: map_at_3 value: 63.632999999999996 - type: map_at_5 value: 65.293 - type: mrr_at_1 value: 56.10000000000001 - type: mrr_at_10 value: 66.15299999999999 - type: mrr_at_100 value: 66.625 - type: mrr_at_1000 value: 66.636 - type: mrr_at_3 value: 63.632999999999996 - type: mrr_at_5 value: 65.293 - type: ndcg_at_1 value: 56.10000000000001 - type: ndcg_at_10 value: 71.146 - type: ndcg_at_100 value: 73.27799999999999 - type: ndcg_at_1000 value: 73.529 - type: ndcg_at_3 value: 66.09 - type: ndcg_at_5 value: 69.08999999999999 - type: precision_at_1 value: 56.10000000000001 - type: precision_at_10 value: 8.68 - type: precision_at_100 value: 0.964 - type: precision_at_1000 value: 0.098 - type: precision_at_3 value: 24.4 - type: precision_at_5 value: 16.1 - type: recall_at_1 value: 56.10000000000001 - type: recall_at_10 value: 86.8 - type: recall_at_100 value: 96.39999999999999 - type: recall_at_1000 value: 98.3 - type: recall_at_3 value: 73.2 - type: recall_at_5 value: 80.5 - task: type: Classification dataset: name: MTEB IFlyTek type: C-MTEB/IFlyTek-classification config: default split: validation revision: 421605374b29664c5fc098418fe20ada9bd55f8a metrics: - type: accuracy value: 54.52096960369373 - type: f1 value: 40.930845295808695 - task: type: Classification dataset: name: MTEB JDReview type: C-MTEB/JDReview-classification config: default split: test revision: b7c64bd89eb87f8ded463478346f76731f07bf8b metrics: - type: accuracy value: 86.51031894934334 - type: ap value: 55.9516014323483 - type: f1 value: 81.54813679326381 - task: type: STS dataset: name: MTEB LCQMC type: C-MTEB/LCQMC config: default split: test revision: 17f9b096f80380fce5ed12a9be8be7784b337daf metrics: - type: cos_sim_pearson value: 69.67437838574276 - type: cos_sim_spearman value: 73.81314174653045 - type: euclidean_pearson value: 72.63430276680275 - type: euclidean_spearman value: 73.81358736777001 - type: manhattan_pearson value: 72.58743833842829 - type: manhattan_spearman value: 73.7590419009179 - task: type: Reranking dataset: name: MTEB MMarcoReranking type: C-MTEB/Mmarco-reranking config: default split: dev revision: None metrics: - type: map value: 31.648613483640254 - type: mrr value: 30.37420634920635 - task: type: Retrieval dataset: name: MTEB MMarcoRetrieval type: C-MTEB/MMarcoRetrieval config: default split: dev revision: 539bbde593d947e2a124ba72651aafc09eb33fc2 metrics: - type: map_at_1 value: 73.28099999999999 - type: map_at_10 value: 81.977 - type: map_at_100 value: 82.222 - type: map_at_1000 value: 82.22699999999999 - type: map_at_3 value: 80.441 - type: map_at_5 value: 81.46600000000001 - type: mrr_at_1 value: 75.673 - type: mrr_at_10 value: 82.41000000000001 - type: mrr_at_100 value: 82.616 - type: mrr_at_1000 value: 82.621 - type: mrr_at_3 value: 81.094 - type: mrr_at_5 value: 81.962 - type: ndcg_at_1 value: 75.673 - type: ndcg_at_10 value: 85.15599999999999 - type: ndcg_at_100 value: 86.151 - type: ndcg_at_1000 value: 86.26899999999999 - type: ndcg_at_3 value: 82.304 - type: ndcg_at_5 value: 84.009 - type: precision_at_1 value: 75.673 - type: precision_at_10 value: 10.042 - type: precision_at_100 value: 1.052 - type: precision_at_1000 value: 0.106 - type: precision_at_3 value: 30.673000000000002 - type: precision_at_5 value: 19.326999999999998 - type: recall_at_1 value: 73.28099999999999 - type: recall_at_10 value: 94.446 - type: recall_at_100 value: 98.737 - type: recall_at_1000 value: 99.649 - type: recall_at_3 value: 86.984 - type: recall_at_5 value: 91.024 - task: type: Classification dataset: name: MTEB MassiveIntentClassification (zh-CN) type: mteb/amazon_massive_intent config: zh-CN split: test revision: 31efe3c427b0bae9c22cbb560b8f15491cc6bed7 metrics: - type: accuracy value: 81.08607935440484 - type: f1 value: 78.24879986066307 - task: type: Classification dataset: name: MTEB MassiveScenarioClassification (zh-CN) type: mteb/amazon_massive_scenario config: zh-CN split: test revision: 7d571f92784cd94a019292a1f45445077d0ef634 metrics: - type: accuracy value: 86.05917955615332 - type: f1 value: 85.05279279434997 - task: type: Retrieval dataset: name: MTEB MedicalRetrieval type: C-MTEB/MedicalRetrieval config: default split: dev revision: 2039188fb5800a9803ba5048df7b76e6fb151fc6 metrics: - type: map_at_1 value: 56.2 - type: map_at_10 value: 62.57899999999999 - type: map_at_100 value: 63.154999999999994 - type: map_at_1000 value: 63.193 - type: map_at_3 value: 61.217 - type: map_at_5 value: 62.012 - type: mrr_at_1 value: 56.3 - type: mrr_at_10 value: 62.629000000000005 - type: mrr_at_100 value: 63.205999999999996 - type: mrr_at_1000 value: 63.244 - type: mrr_at_3 value: 61.267 - type: mrr_at_5 value: 62.062 - type: ndcg_at_1 value: 56.2 - type: ndcg_at_10 value: 65.592 - type: ndcg_at_100 value: 68.657 - type: ndcg_at_1000 value: 69.671 - type: ndcg_at_3 value: 62.808 - type: ndcg_at_5 value: 64.24499999999999 - type: precision_at_1 value: 56.2 - type: precision_at_10 value: 7.5 - type: precision_at_100 value: 0.899 - type: precision_at_1000 value: 0.098 - type: precision_at_3 value: 22.467000000000002 - type: precision_at_5 value: 14.180000000000001 - type: recall_at_1 value: 56.2 - type: recall_at_10 value: 75.0 - type: recall_at_100 value: 89.9 - type: recall_at_1000 value: 97.89999999999999 - type: recall_at_3 value: 67.4 - type: recall_at_5 value: 70.89999999999999 - task: type: Classification dataset: name: MTEB MultilingualSentiment type: C-MTEB/MultilingualSentiment-classification config: default split: validation revision: 46958b007a63fdbf239b7672c25d0bea67b5ea1a metrics: - type: accuracy value: 76.87666666666667 - type: f1 value: 76.7317686219665 - task: type: PairClassification dataset: name: MTEB Ocnli type: C-MTEB/OCNLI config: default split: validation revision: 66e76a618a34d6d565d5538088562851e6daa7ec metrics: - type: cos_sim_accuracy value: 79.64266377910124 - type: cos_sim_ap value: 84.78274442344829 - type: cos_sim_f1 value: 81.16947472745292 - type: cos_sim_precision value: 76.47058823529412 - type: cos_sim_recall value: 86.48363252375924 - type: dot_accuracy value: 79.64266377910124 - type: dot_ap value: 84.7851404063692 - type: dot_f1 value: 81.16947472745292 - type: dot_precision value: 76.47058823529412 - type: dot_recall value: 86.48363252375924 - type: euclidean_accuracy value: 79.64266377910124 - type: euclidean_ap value: 84.78068373762378 - type: euclidean_f1 value: 81.14794656110837 - type: euclidean_precision value: 76.35009310986965 - type: euclidean_recall value: 86.58922914466737 - type: manhattan_accuracy value: 79.48023822414727 - type: manhattan_ap value: 84.72928897427576 - type: manhattan_f1 value: 81.32084770823064 - type: manhattan_precision value: 76.24768946395564 - type: manhattan_recall value: 87.11721224920802 - type: max_accuracy value: 79.64266377910124 - type: max_ap value: 84.7851404063692 - type: max_f1 value: 81.32084770823064 - task: type: Classification dataset: name: MTEB OnlineShopping type: C-MTEB/OnlineShopping-classification config: default split: test revision: e610f2ebd179a8fda30ae534c3878750a96db120 metrics: - type: accuracy value: 94.3 - type: ap value: 92.8664032274438 - type: f1 value: 94.29311102997727 - task: type: STS dataset: name: MTEB PAWSX type: C-MTEB/PAWSX config: default split: test revision: 9c6a90e430ac22b5779fb019a23e820b11a8b5e1 metrics: - type: cos_sim_pearson value: 48.51392279882909 - type: cos_sim_spearman value: 54.06338895994974 - type: euclidean_pearson value: 52.58480559573412 - type: euclidean_spearman value: 54.06417276612201 - type: manhattan_pearson value: 52.69525121721343 - type: manhattan_spearman value: 54.048147455389675 - task: type: STS dataset: name: MTEB QBQTC type: C-MTEB/QBQTC config: default split: test revision: 790b0510dc52b1553e8c49f3d2afb48c0e5c48b7 metrics: - type: cos_sim_pearson value: 29.728387290757325 - type: cos_sim_spearman value: 31.366121633635284 - type: euclidean_pearson value: 29.14588368552961 - type: euclidean_spearman value: 31.36764411112844 - type: manhattan_pearson value: 29.63517350523121 - type: manhattan_spearman value: 31.94157020583762 - task: type: STS dataset: name: MTEB STS22 (zh) type: mteb/sts22-crosslingual-sts config: zh split: test revision: eea2b4fe26a775864c896887d910b76a8098ad3f metrics: - type: cos_sim_pearson value: 63.64868296271406 - type: cos_sim_spearman value: 66.12800618164744 - type: euclidean_pearson value: 63.21405767340238 - type: euclidean_spearman value: 66.12786567790748 - type: manhattan_pearson value: 64.04300276525848 - type: manhattan_spearman value: 66.5066857145652 - task: type: STS dataset: name: MTEB STSB type: C-MTEB/STSB config: default split: test revision: 0cde68302b3541bb8b3c340dc0644b0b745b3dc0 metrics: - type: cos_sim_pearson value: 81.2302623912794 - type: cos_sim_spearman value: 81.16833673266562 - type: euclidean_pearson value: 79.47647843876024 - type: euclidean_spearman value: 81.16944349524972 - type: manhattan_pearson value: 79.84947238492208 - type: manhattan_spearman value: 81.64626599410026 - task: type: Reranking dataset: name: MTEB T2Reranking type: C-MTEB/T2Reranking config: default split: dev revision: 76631901a18387f85eaa53e5450019b87ad58ef9 metrics: - type: map value: 67.80129586475687 - type: mrr value: 77.77402311635554 - task: type: Retrieval dataset: name: MTEB T2Retrieval type: C-MTEB/T2Retrieval config: default split: dev revision: 8731a845f1bf500a4f111cf1070785c793d10e64 metrics: - type: map_at_1 value: 28.666999999999998 - type: map_at_10 value: 81.063 - type: map_at_100 value: 84.504 - type: map_at_1000 value: 84.552 - type: map_at_3 value: 56.897 - type: map_at_5 value: 70.073 - type: mrr_at_1 value: 92.087 - type: mrr_at_10 value: 94.132 - type: mrr_at_100 value: 94.19800000000001 - type: mrr_at_1000 value: 94.19999999999999 - type: mrr_at_3 value: 93.78999999999999 - type: mrr_at_5 value: 94.002 - type: ndcg_at_1 value: 92.087 - type: ndcg_at_10 value: 87.734 - type: ndcg_at_100 value: 90.736 - type: ndcg_at_1000 value: 91.184 - type: ndcg_at_3 value: 88.78 - type: ndcg_at_5 value: 87.676 - type: precision_at_1 value: 92.087 - type: precision_at_10 value: 43.46 - type: precision_at_100 value: 5.07 - type: precision_at_1000 value: 0.518 - type: precision_at_3 value: 77.49000000000001 - type: precision_at_5 value: 65.194 - type: recall_at_1 value: 28.666999999999998 - type: recall_at_10 value: 86.632 - type: recall_at_100 value: 96.646 - type: recall_at_1000 value: 98.917 - type: recall_at_3 value: 58.333999999999996 - type: recall_at_5 value: 72.974 - task: type: Classification dataset: name: MTEB TNews type: C-MTEB/TNews-classification config: default split: validation revision: 317f262bf1e6126357bbe89e875451e4b0938fe4 metrics: - type: accuracy value: 52.971999999999994 - type: f1 value: 50.2898280984929 - task: type: Clustering dataset: name: MTEB ThuNewsClusteringP2P type: C-MTEB/ThuNewsClusteringP2P config: default split: test revision: 5798586b105c0434e4f0fe5e767abe619442cf93 metrics: - type: v_measure value: 86.0797948663824 - task: type: Clustering dataset: name: MTEB ThuNewsClusteringS2S type: C-MTEB/ThuNewsClusteringS2S config: default split: test revision: 8a8b2caeda43f39e13c4bc5bea0f8a667896e10d metrics: - type: v_measure value: 85.10759092255017 - task: type: Retrieval dataset: name: MTEB VideoRetrieval type: C-MTEB/VideoRetrieval config: default split: dev revision: 58c2597a5943a2ba48f4668c3b90d796283c5639 metrics: - type: map_at_1 value: 65.60000000000001 - type: map_at_10 value: 74.773 - type: map_at_100 value: 75.128 - type: map_at_1000 value: 75.136 - type: map_at_3 value: 73.05 - type: map_at_5 value: 74.13499999999999 - type: mrr_at_1 value: 65.60000000000001 - type: mrr_at_10 value: 74.773 - type: mrr_at_100 value: 75.128 - type: mrr_at_1000 value: 75.136 - type: mrr_at_3 value: 73.05 - type: mrr_at_5 value: 74.13499999999999 - type: ndcg_at_1 value: 65.60000000000001 - type: ndcg_at_10 value: 78.84299999999999 - type: ndcg_at_100 value: 80.40899999999999 - type: ndcg_at_1000 value: 80.57 - type: ndcg_at_3 value: 75.40599999999999 - type: ndcg_at_5 value: 77.351 - type: precision_at_1 value: 65.60000000000001 - type: precision_at_10 value: 9.139999999999999 - type: precision_at_100 value: 0.984 - type: precision_at_1000 value: 0.1 - type: precision_at_3 value: 27.400000000000002 - type: precision_at_5 value: 17.380000000000003 - type: recall_at_1 value: 65.60000000000001 - type: recall_at_10 value: 91.4 - type: recall_at_100 value: 98.4 - type: recall_at_1000 value: 99.6 - type: recall_at_3 value: 82.19999999999999 - type: recall_at_5 value: 86.9 - task: type: Classification dataset: name: MTEB Waimai type: C-MTEB/waimai-classification config: default split: test revision: 339287def212450dcaa9df8c22bf93e9980c7023 metrics: - type: accuracy value: 89.47 - type: ap value: 75.59561751845389 - type: f1 value: 87.95207751382563 - task: type: Clustering dataset: name: MTEB AlloProfClusteringP2P type: lyon-nlp/alloprof config: default split: test revision: 392ba3f5bcc8c51f578786c1fc3dae648662cb9b metrics: - type: v_measure value: 76.05592323841036 - type: v_measure value: 64.51718058866508 - task: type: Reranking dataset: name: MTEB AlloprofReranking type: lyon-nlp/mteb-fr-reranking-alloprof-s2p config: default split: test revision: 666fdacebe0291776e86f29345663dfaf80a0db9 metrics: - type: map value: 73.08278490943373 - type: mrr value: 74.66561454570449 - task: type: Retrieval dataset: name: MTEB AlloprofRetrieval type: lyon-nlp/alloprof config: default split: test revision: 392ba3f5bcc8c51f578786c1fc3dae648662cb9b metrics: - type: map_at_1 value: 38.912 - type: map_at_10 value: 52.437999999999995 - type: map_at_100 value: 53.38 - type: map_at_1000 value: 53.427 - type: map_at_3 value: 48.879 - type: map_at_5 value: 50.934000000000005 - type: mrr_at_1 value: 44.085 - type: mrr_at_10 value: 55.337 - type: mrr_at_100 value: 56.016999999999996 - type: mrr_at_1000 value: 56.043 - type: mrr_at_3 value: 52.55499999999999 - type: mrr_at_5 value: 54.20399999999999 - type: ndcg_at_1 value: 44.085 - type: ndcg_at_10 value: 58.876 - type: ndcg_at_100 value: 62.714000000000006 - type: ndcg_at_1000 value: 63.721000000000004 - type: ndcg_at_3 value: 52.444 - type: ndcg_at_5 value: 55.692 - type: precision_at_1 value: 44.085 - type: precision_at_10 value: 9.21 - type: precision_at_100 value: 1.164 - type: precision_at_1000 value: 0.128 - type: precision_at_3 value: 23.043 - type: precision_at_5 value: 15.898000000000001 - type: recall_at_1 value: 38.912 - type: recall_at_10 value: 75.577 - type: recall_at_100 value: 92.038 - type: recall_at_1000 value: 99.325 - type: recall_at_3 value: 58.592 - type: recall_at_5 value: 66.235 - task: type: Classification dataset: name: MTEB AmazonReviewsClassification (fr) type: mteb/amazon_reviews_multi config: fr split: test revision: 1399c76144fd37290681b995c656ef9b2e06e26d metrics: - type: accuracy value: 55.532000000000004 - type: f1 value: 52.5783943471605 - task: type: Retrieval dataset: name: MTEB BSARDRetrieval type: maastrichtlawtech/bsard config: default split: test revision: 5effa1b9b5fa3b0f9e12523e6e43e5f86a6e6d59 metrics: - type: map_at_1 value: 8.108 - type: map_at_10 value: 14.710999999999999 - type: map_at_100 value: 15.891 - type: map_at_1000 value: 15.983 - type: map_at_3 value: 12.237 - type: map_at_5 value: 13.679 - type: mrr_at_1 value: 8.108 - type: mrr_at_10 value: 14.710999999999999 - type: mrr_at_100 value: 15.891 - type: mrr_at_1000 value: 15.983 - type: mrr_at_3 value: 12.237 - type: mrr_at_5 value: 13.679 - type: ndcg_at_1 value: 8.108 - type: ndcg_at_10 value: 18.796 - type: ndcg_at_100 value: 25.098 - type: ndcg_at_1000 value: 27.951999999999998 - type: ndcg_at_3 value: 13.712 - type: ndcg_at_5 value: 16.309 - type: precision_at_1 value: 8.108 - type: precision_at_10 value: 3.198 - type: precision_at_100 value: 0.626 - type: precision_at_1000 value: 0.086 - type: precision_at_3 value: 6.006 - type: precision_at_5 value: 4.865 - type: recall_at_1 value: 8.108 - type: recall_at_10 value: 31.982 - type: recall_at_100 value: 62.613 - type: recall_at_1000 value: 86.036 - type: recall_at_3 value: 18.018 - type: recall_at_5 value: 24.324 - task: type: Clustering dataset: name: MTEB HALClusteringS2S type: lyon-nlp/clustering-hal-s2s config: default split: test revision: e06ebbbb123f8144bef1a5d18796f3dec9ae2915 metrics: - type: v_measure value: 30.833269778867116 - task: type: Clustering dataset: name: MTEB MLSUMClusteringP2P type: mlsum config: default split: test revision: b5d54f8f3b61ae17845046286940f03c6bc79bc7 metrics: - type: v_measure value: 50.0281928004713 - type: v_measure value: 43.699961510636534 - task: type: Classification dataset: name: MTEB MTOPDomainClassification (fr) type: mteb/mtop_domain config: fr split: test revision: d80d48c1eb48d3562165c59d59d0034df9fff0bf metrics: - type: accuracy value: 96.68963357344191 - type: f1 value: 96.45175170820961 - task: type: Classification dataset: name: MTEB MTOPIntentClassification (fr) type: mteb/mtop_intent config: fr split: test revision: ae001d0e6b1228650b7bd1c2c65fb50ad11a8aba metrics: - type: accuracy value: 87.46946445349202 - type: f1 value: 65.79860440988624 - task: type: Classification dataset: name: MTEB MasakhaNEWSClassification (fra) type: masakhane/masakhanews config: fra split: test revision: 8ccc72e69e65f40c70e117d8b3c08306bb788b60 metrics: - type: accuracy value: 82.60663507109005 - type: f1 value: 77.20462646604777 - task: type: Clustering dataset: name: MTEB MasakhaNEWSClusteringP2P (fra) type: masakhane/masakhanews config: fra split: test revision: 8ccc72e69e65f40c70e117d8b3c08306bb788b60 metrics: - type: v_measure value: 60.19311264967803 - type: v_measure value: 63.6235764409785 - task: type: Classification dataset: name: MTEB MassiveIntentClassification (fr) type: mteb/amazon_massive_intent config: fr split: test revision: 31efe3c427b0bae9c22cbb560b8f15491cc6bed7 metrics: - type: accuracy value: 81.65097511768661 - type: f1 value: 78.77796091490924 - task: type: Classification dataset: name: MTEB MassiveScenarioClassification (fr) type: mteb/amazon_massive_scenario config: fr split: test revision: 7d571f92784cd94a019292a1f45445077d0ef634 metrics: - type: accuracy value: 86.64425016812373 - type: f1 value: 85.4912728670017 - task: type: Retrieval dataset: name: MTEB MintakaRetrieval (fr) type: jinaai/mintakaqa config: fr split: test revision: efa78cc2f74bbcd21eff2261f9e13aebe40b814e metrics: - type: map_at_1 value: 35.913000000000004 - type: map_at_10 value: 48.147 - type: map_at_100 value: 48.91 - type: map_at_1000 value: 48.949 - type: map_at_3 value: 45.269999999999996 - type: map_at_5 value: 47.115 - type: mrr_at_1 value: 35.913000000000004 - type: mrr_at_10 value: 48.147 - type: mrr_at_100 value: 48.91 - type: mrr_at_1000 value: 48.949 - type: mrr_at_3 value: 45.269999999999996 - type: mrr_at_5 value: 47.115 - type: ndcg_at_1 value: 35.913000000000004 - type: ndcg_at_10 value: 54.03 - type: ndcg_at_100 value: 57.839 - type: ndcg_at_1000 value: 58.925000000000004 - type: ndcg_at_3 value: 48.217999999999996 - type: ndcg_at_5 value: 51.56699999999999 - type: precision_at_1 value: 35.913000000000004 - type: precision_at_10 value: 7.244000000000001 - type: precision_at_100 value: 0.9039999999999999 - type: precision_at_1000 value: 0.099 - type: precision_at_3 value: 18.905 - type: precision_at_5 value: 12.981000000000002 - type: recall_at_1 value: 35.913000000000004 - type: recall_at_10 value: 72.441 - type: recall_at_100 value: 90.41799999999999 - type: recall_at_1000 value: 99.099 - type: recall_at_3 value: 56.716 - type: recall_at_5 value: 64.90599999999999 - task: type: PairClassification dataset: name: MTEB OpusparcusPC (fr) type: GEM/opusparcus config: fr split: test revision: 9e9b1f8ef51616073f47f306f7f47dd91663f86a metrics: - type: cos_sim_accuracy value: 99.90069513406156 - type: cos_sim_ap value: 100.0 - type: cos_sim_f1 value: 99.95032290114257 - type: cos_sim_precision value: 100.0 - type: cos_sim_recall value: 99.90069513406156 - type: dot_accuracy value: 99.90069513406156 - type: dot_ap value: 100.0 - type: dot_f1 value: 99.95032290114257 - type: dot_precision value: 100.0 - type: dot_recall value: 99.90069513406156 - type: euclidean_accuracy value: 99.90069513406156 - type: euclidean_ap value: 100.0 - type: euclidean_f1 value: 99.95032290114257 - type: euclidean_precision value: 100.0 - type: euclidean_recall value: 99.90069513406156 - type: manhattan_accuracy value: 99.90069513406156 - type: manhattan_ap value: 100.0 - type: manhattan_f1 value: 99.95032290114257 - type: manhattan_precision value: 100.0 - type: manhattan_recall value: 99.90069513406156 - type: max_accuracy value: 99.90069513406156 - type: max_ap value: 100.0 - type: max_f1 value: 99.95032290114257 - task: type: PairClassification dataset: name: MTEB PawsX (fr) type: paws-x config: fr split: test revision: 8a04d940a42cd40658986fdd8e3da561533a3646 metrics: - type: cos_sim_accuracy value: 75.25 - type: cos_sim_ap value: 80.86376001270014 - type: cos_sim_f1 value: 73.65945437441204 - type: cos_sim_precision value: 64.02289452166802 - type: cos_sim_recall value: 86.71096345514951 - type: dot_accuracy value: 75.25 - type: dot_ap value: 80.93686107633002 - type: dot_f1 value: 73.65945437441204 - type: dot_precision value: 64.02289452166802 - type: dot_recall value: 86.71096345514951 - type: euclidean_accuracy value: 75.25 - type: euclidean_ap value: 80.86379136218862 - type: euclidean_f1 value: 73.65945437441204 - type: euclidean_precision value: 64.02289452166802 - type: euclidean_recall value: 86.71096345514951 - type: manhattan_accuracy value: 75.3 - type: manhattan_ap value: 80.87826606097734 - type: manhattan_f1 value: 73.68421052631581 - type: manhattan_precision value: 64.0 - type: manhattan_recall value: 86.82170542635659 - type: max_accuracy value: 75.3 - type: max_ap value: 80.93686107633002 - type: max_f1 value: 73.68421052631581 - task: type: STS dataset: name: MTEB SICKFr type: Lajavaness/SICK-fr config: default split: test revision: e077ab4cf4774a1e36d86d593b150422fafd8e8a metrics: - type: cos_sim_pearson value: 81.42349425981143 - type: cos_sim_spearman value: 78.90454327031226 - type: euclidean_pearson value: 78.39086497435166 - type: euclidean_spearman value: 78.9046133980509 - type: manhattan_pearson value: 78.63743094286502 - type: manhattan_spearman value: 79.12136348449269 - task: type: STS dataset: name: MTEB STS22 (fr) type: mteb/sts22-crosslingual-sts config: fr split: test revision: eea2b4fe26a775864c896887d910b76a8098ad3f metrics: - type: cos_sim_pearson value: 81.452697919749 - type: cos_sim_spearman value: 82.58116836039301 - type: euclidean_pearson value: 81.04038478932786 - type: euclidean_spearman value: 82.58116836039301 - type: manhattan_pearson value: 81.37075396187771 - type: manhattan_spearman value: 82.73678231355368 - task: type: STS dataset: name: MTEB STSBenchmarkMultilingualSTS (fr) type: stsb_multi_mt config: fr split: test revision: 93d57ef91790589e3ce9c365164337a8a78b7632 metrics: - type: cos_sim_pearson value: 85.7419764013806 - type: cos_sim_spearman value: 85.46085808849622 - type: euclidean_pearson value: 83.70449639870063 - type: euclidean_spearman value: 85.46159013076233 - type: manhattan_pearson value: 83.95259510313929 - type: manhattan_spearman value: 85.8029724659458 - task: type: Summarization dataset: name: MTEB SummEvalFr type: lyon-nlp/summarization-summeval-fr-p2p config: default split: test revision: b385812de6a9577b6f4d0f88c6a6e35395a94054 metrics: - type: cos_sim_pearson value: 32.61063271753325 - type: cos_sim_spearman value: 31.454589417353603 - type: dot_pearson value: 32.6106288643431 - type: dot_spearman value: 31.454589417353603 - task: type: Reranking dataset: name: MTEB SyntecReranking type: lyon-nlp/mteb-fr-reranking-syntec-s2p config: default split: test revision: b205c5084a0934ce8af14338bf03feb19499c84d metrics: - type: map value: 84.31666666666666 - type: mrr value: 84.31666666666666 - task: type: Retrieval dataset: name: MTEB SyntecRetrieval type: lyon-nlp/mteb-fr-retrieval-syntec-s2p config: default split: test revision: 77f7e271bf4a92b24fce5119f3486b583ca016ff metrics: - type: map_at_1 value: 63.0 - type: map_at_10 value: 73.471 - type: map_at_100 value: 73.87 - type: map_at_1000 value: 73.87 - type: map_at_3 value: 70.5 - type: map_at_5 value: 73.05 - type: mrr_at_1 value: 63.0 - type: mrr_at_10 value: 73.471 - type: mrr_at_100 value: 73.87 - type: mrr_at_1000 value: 73.87 - type: mrr_at_3 value: 70.5 - type: mrr_at_5 value: 73.05 - type: ndcg_at_1 value: 63.0 - type: ndcg_at_10 value: 78.255 - type: ndcg_at_100 value: 79.88 - type: ndcg_at_1000 value: 79.88 - type: ndcg_at_3 value: 72.702 - type: ndcg_at_5 value: 77.264 - type: precision_at_1 value: 63.0 - type: precision_at_10 value: 9.3 - type: precision_at_100 value: 1.0 - type: precision_at_1000 value: 0.1 - type: precision_at_3 value: 26.333000000000002 - type: precision_at_5 value: 18.0 - type: recall_at_1 value: 63.0 - type: recall_at_10 value: 93.0 - type: recall_at_100 value: 100.0 - type: recall_at_1000 value: 100.0 - type: recall_at_3 value: 79.0 - type: recall_at_5 value: 90.0 - task: type: Retrieval dataset: name: MTEB XPQARetrieval (fr) type: jinaai/xpqa config: fr split: test revision: c99d599f0a6ab9b85b065da6f9d94f9cf731679f metrics: - type: map_at_1 value: 40.338 - type: map_at_10 value: 61.927 - type: map_at_100 value: 63.361999999999995 - type: map_at_1000 value: 63.405 - type: map_at_3 value: 55.479 - type: map_at_5 value: 59.732 - type: mrr_at_1 value: 63.551 - type: mrr_at_10 value: 71.006 - type: mrr_at_100 value: 71.501 - type: mrr_at_1000 value: 71.509 - type: mrr_at_3 value: 69.07 - type: mrr_at_5 value: 70.165 - type: ndcg_at_1 value: 63.551 - type: ndcg_at_10 value: 68.297 - type: ndcg_at_100 value: 73.13199999999999 - type: ndcg_at_1000 value: 73.751 - type: ndcg_at_3 value: 62.999 - type: ndcg_at_5 value: 64.89 - type: precision_at_1 value: 63.551 - type: precision_at_10 value: 15.661 - type: precision_at_100 value: 1.9789999999999999 - type: precision_at_1000 value: 0.207 - type: precision_at_3 value: 38.273 - type: precision_at_5 value: 27.61 - type: recall_at_1 value: 40.338 - type: recall_at_10 value: 77.267 - type: recall_at_100 value: 95.892 - type: recall_at_1000 value: 99.75500000000001 - type: recall_at_3 value: 60.36 - type: recall_at_5 value: 68.825 - task: type: Clustering dataset: name: MTEB 8TagsClustering type: PL-MTEB/8tags-clustering config: default split: test revision: None metrics: - type: v_measure value: 51.36126303874126 - task: type: Classification dataset: name: MTEB AllegroReviews type: PL-MTEB/allegro-reviews config: default split: test revision: None metrics: - type: accuracy value: 67.13717693836979 - type: f1 value: 57.27609848003782 - task: type: Retrieval dataset: name: MTEB ArguAna-PL type: clarin-knext/arguana-pl config: default split: test revision: 63fc86750af76253e8c760fc9e534bbf24d260a2 metrics: - type: map_at_1 value: 35.276999999999994 - type: map_at_10 value: 51.086 - type: map_at_100 value: 51.788000000000004 - type: map_at_1000 value: 51.791 - type: map_at_3 value: 46.147 - type: map_at_5 value: 49.078 - type: mrr_at_1 value: 35.917 - type: mrr_at_10 value: 51.315999999999995 - type: mrr_at_100 value: 52.018 - type: mrr_at_1000 value: 52.022 - type: mrr_at_3 value: 46.349000000000004 - type: mrr_at_5 value: 49.297000000000004 - type: ndcg_at_1 value: 35.276999999999994 - type: ndcg_at_10 value: 59.870999999999995 - type: ndcg_at_100 value: 62.590999999999994 - type: ndcg_at_1000 value: 62.661 - type: ndcg_at_3 value: 49.745 - type: ndcg_at_5 value: 55.067 - type: precision_at_1 value: 35.276999999999994 - type: precision_at_10 value: 8.791 - type: precision_at_100 value: 0.991 - type: precision_at_1000 value: 0.1 - type: precision_at_3 value: 20.057 - type: precision_at_5 value: 14.637 - type: recall_at_1 value: 35.276999999999994 - type: recall_at_10 value: 87.909 - type: recall_at_100 value: 99.14699999999999 - type: recall_at_1000 value: 99.644 - type: recall_at_3 value: 60.171 - type: recall_at_5 value: 73.18599999999999 - task: type: Classification dataset: name: MTEB CBD type: PL-MTEB/cbd config: default split: test revision: None metrics: - type: accuracy value: 78.03000000000002 - type: ap value: 29.12548553897622 - type: f1 value: 66.54857118886073 - task: type: PairClassification dataset: name: MTEB CDSC-E type: PL-MTEB/cdsce-pairclassification config: default split: test revision: None metrics: - type: cos_sim_accuracy value: 89.0 - type: cos_sim_ap value: 76.75437826834582 - type: cos_sim_f1 value: 66.4850136239782 - type: cos_sim_precision value: 68.92655367231639 - type: cos_sim_recall value: 64.21052631578948 - type: dot_accuracy value: 89.0 - type: dot_ap value: 76.75437826834582 - type: dot_f1 value: 66.4850136239782 - type: dot_precision value: 68.92655367231639 - type: dot_recall value: 64.21052631578948 - type: euclidean_accuracy value: 89.0 - type: euclidean_ap value: 76.75437826834582 - type: euclidean_f1 value: 66.4850136239782 - type: euclidean_precision value: 68.92655367231639 - type: euclidean_recall value: 64.21052631578948 - type: manhattan_accuracy value: 89.0 - type: manhattan_ap value: 76.66074220647083 - type: manhattan_f1 value: 66.47058823529412 - type: manhattan_precision value: 75.33333333333333 - type: manhattan_recall value: 59.473684210526315 - type: max_accuracy value: 89.0 - type: max_ap value: 76.75437826834582 - type: max_f1 value: 66.4850136239782 - task: type: STS dataset: name: MTEB CDSC-R type: PL-MTEB/cdscr-sts config: default split: test revision: None metrics: - type: cos_sim_pearson value: 93.12903172428328 - type: cos_sim_spearman value: 92.66381487060741 - type: euclidean_pearson value: 90.37278396708922 - type: euclidean_spearman value: 92.66381487060741 - type: manhattan_pearson value: 90.32503296540962 - type: manhattan_spearman value: 92.6902938354313 - task: type: Retrieval dataset: name: MTEB DBPedia-PL type: clarin-knext/dbpedia-pl config: default split: test revision: 76afe41d9af165cc40999fcaa92312b8b012064a metrics: - type: map_at_1 value: 8.83 - type: map_at_10 value: 18.326 - type: map_at_100 value: 26.496 - type: map_at_1000 value: 28.455000000000002 - type: map_at_3 value: 12.933 - type: map_at_5 value: 15.168000000000001 - type: mrr_at_1 value: 66.0 - type: mrr_at_10 value: 72.76700000000001 - type: mrr_at_100 value: 73.203 - type: mrr_at_1000 value: 73.219 - type: mrr_at_3 value: 71.458 - type: mrr_at_5 value: 72.246 - type: ndcg_at_1 value: 55.375 - type: ndcg_at_10 value: 41.3 - type: ndcg_at_100 value: 45.891 - type: ndcg_at_1000 value: 52.905 - type: ndcg_at_3 value: 46.472 - type: ndcg_at_5 value: 43.734 - type: precision_at_1 value: 66.0 - type: precision_at_10 value: 33.074999999999996 - type: precision_at_100 value: 11.094999999999999 - type: precision_at_1000 value: 2.374 - type: precision_at_3 value: 48.583 - type: precision_at_5 value: 42.0 - type: recall_at_1 value: 8.83 - type: recall_at_10 value: 22.587 - type: recall_at_100 value: 50.61600000000001 - type: recall_at_1000 value: 73.559 - type: recall_at_3 value: 13.688 - type: recall_at_5 value: 16.855 - task: type: Retrieval dataset: name: MTEB FiQA-PL type: clarin-knext/fiqa-pl config: default split: test revision: 2e535829717f8bf9dc829b7f911cc5bbd4e6608e metrics: - type: map_at_1 value: 20.587 - type: map_at_10 value: 33.095 - type: map_at_100 value: 35.24 - type: map_at_1000 value: 35.429 - type: map_at_3 value: 28.626 - type: map_at_5 value: 31.136999999999997 - type: mrr_at_1 value: 40.586 - type: mrr_at_10 value: 49.033 - type: mrr_at_100 value: 49.952999999999996 - type: mrr_at_1000 value: 49.992 - type: mrr_at_3 value: 46.553 - type: mrr_at_5 value: 48.035 - type: ndcg_at_1 value: 40.586 - type: ndcg_at_10 value: 41.046 - type: ndcg_at_100 value: 48.586 - type: ndcg_at_1000 value: 51.634 - type: ndcg_at_3 value: 36.773 - type: ndcg_at_5 value: 38.389 - type: precision_at_1 value: 40.586 - type: precision_at_10 value: 11.466 - type: precision_at_100 value: 1.909 - type: precision_at_1000 value: 0.245 - type: precision_at_3 value: 24.434 - type: precision_at_5 value: 18.426000000000002 - type: recall_at_1 value: 20.587 - type: recall_at_10 value: 47.986000000000004 - type: recall_at_100 value: 75.761 - type: recall_at_1000 value: 94.065 - type: recall_at_3 value: 33.339 - type: recall_at_5 value: 39.765 - task: type: Retrieval dataset: name: MTEB HotpotQA-PL type: clarin-knext/hotpotqa-pl config: default split: test revision: a0bd479ac97b4ccb5bd6ce320c415d0bb4beb907 metrics: - type: map_at_1 value: 40.878 - type: map_at_10 value: 58.775999999999996 - type: map_at_100 value: 59.632 - type: map_at_1000 value: 59.707 - type: map_at_3 value: 56.074 - type: map_at_5 value: 57.629 - type: mrr_at_1 value: 81.756 - type: mrr_at_10 value: 86.117 - type: mrr_at_100 value: 86.299 - type: mrr_at_1000 value: 86.30600000000001 - type: mrr_at_3 value: 85.345 - type: mrr_at_5 value: 85.832 - type: ndcg_at_1 value: 81.756 - type: ndcg_at_10 value: 67.608 - type: ndcg_at_100 value: 70.575 - type: ndcg_at_1000 value: 71.99600000000001 - type: ndcg_at_3 value: 63.723 - type: ndcg_at_5 value: 65.70700000000001 - type: precision_at_1 value: 81.756 - type: precision_at_10 value: 13.619 - type: precision_at_100 value: 1.5939999999999999 - type: precision_at_1000 value: 0.178 - type: precision_at_3 value: 39.604 - type: precision_at_5 value: 25.332 - type: recall_at_1 value: 40.878 - type: recall_at_10 value: 68.096 - type: recall_at_100 value: 79.696 - type: recall_at_1000 value: 89.082 - type: recall_at_3 value: 59.406000000000006 - type: recall_at_5 value: 63.329 - task: type: Retrieval dataset: name: MTEB MSMARCO-PL type: clarin-knext/msmarco-pl config: default split: test revision: 8634c07806d5cce3a6138e260e59b81760a0a640 metrics: - type: map_at_1 value: 2.1839999999999997 - type: map_at_10 value: 11.346 - type: map_at_100 value: 30.325000000000003 - type: map_at_1000 value: 37.806 - type: map_at_3 value: 4.842 - type: map_at_5 value: 6.891 - type: mrr_at_1 value: 86.047 - type: mrr_at_10 value: 89.14699999999999 - type: mrr_at_100 value: 89.46600000000001 - type: mrr_at_1000 value: 89.46600000000001 - type: mrr_at_3 value: 89.14699999999999 - type: mrr_at_5 value: 89.14699999999999 - type: ndcg_at_1 value: 67.829 - type: ndcg_at_10 value: 62.222 - type: ndcg_at_100 value: 55.337 - type: ndcg_at_1000 value: 64.076 - type: ndcg_at_3 value: 68.12700000000001 - type: ndcg_at_5 value: 64.987 - type: precision_at_1 value: 86.047 - type: precision_at_10 value: 69.535 - type: precision_at_100 value: 32.93 - type: precision_at_1000 value: 6.6049999999999995 - type: precision_at_3 value: 79.845 - type: precision_at_5 value: 75.349 - type: recall_at_1 value: 2.1839999999999997 - type: recall_at_10 value: 12.866 - type: recall_at_100 value: 43.505 - type: recall_at_1000 value: 72.366 - type: recall_at_3 value: 4.947 - type: recall_at_5 value: 7.192 - task: type: Classification dataset: name: MTEB MassiveIntentClassification (pl) type: mteb/amazon_massive_intent config: pl split: test revision: 31efe3c427b0bae9c22cbb560b8f15491cc6bed7 metrics: - type: accuracy value: 80.75319435104238 - type: f1 value: 77.58961444860606 - task: type: Classification dataset: name: MTEB MassiveScenarioClassification (pl) type: mteb/amazon_massive_scenario config: pl split: test revision: 7d571f92784cd94a019292a1f45445077d0ef634 metrics: - type: accuracy value: 85.54472091459313 - type: f1 value: 84.29498563572106 - task: type: Retrieval dataset: name: MTEB NFCorpus-PL type: clarin-knext/nfcorpus-pl config: default split: test revision: 9a6f9567fda928260afed2de480d79c98bf0bec0 metrics: - type: map_at_1 value: 4.367 - type: map_at_10 value: 10.38 - type: map_at_100 value: 13.516 - type: map_at_1000 value: 14.982000000000001 - type: map_at_3 value: 7.367 - type: map_at_5 value: 8.59 - type: mrr_at_1 value: 41.486000000000004 - type: mrr_at_10 value: 48.886 - type: mrr_at_100 value: 49.657000000000004 - type: mrr_at_1000 value: 49.713 - type: mrr_at_3 value: 46.904 - type: mrr_at_5 value: 48.065000000000005 - type: ndcg_at_1 value: 40.402 - type: ndcg_at_10 value: 30.885 - type: ndcg_at_100 value: 28.393 - type: ndcg_at_1000 value: 37.428 - type: ndcg_at_3 value: 35.394999999999996 - type: ndcg_at_5 value: 33.391999999999996 - type: precision_at_1 value: 41.486000000000004 - type: precision_at_10 value: 23.437 - type: precision_at_100 value: 7.638 - type: precision_at_1000 value: 2.0389999999999997 - type: precision_at_3 value: 32.817 - type: precision_at_5 value: 28.915999999999997 - type: recall_at_1 value: 4.367 - type: recall_at_10 value: 14.655000000000001 - type: recall_at_100 value: 29.665999999999997 - type: recall_at_1000 value: 62.073 - type: recall_at_3 value: 8.51 - type: recall_at_5 value: 10.689 - task: type: Retrieval dataset: name: MTEB NQ-PL type: clarin-knext/nq-pl config: default split: test revision: f171245712cf85dd4700b06bef18001578d0ca8d metrics: - type: map_at_1 value: 28.616000000000003 - type: map_at_10 value: 41.626000000000005 - type: map_at_100 value: 42.689 - type: map_at_1000 value: 42.733 - type: map_at_3 value: 37.729 - type: map_at_5 value: 39.879999999999995 - type: mrr_at_1 value: 32.068000000000005 - type: mrr_at_10 value: 44.029 - type: mrr_at_100 value: 44.87 - type: mrr_at_1000 value: 44.901 - type: mrr_at_3 value: 40.687 - type: mrr_at_5 value: 42.625 - type: ndcg_at_1 value: 32.068000000000005 - type: ndcg_at_10 value: 48.449999999999996 - type: ndcg_at_100 value: 53.13 - type: ndcg_at_1000 value: 54.186 - type: ndcg_at_3 value: 40.983999999999995 - type: ndcg_at_5 value: 44.628 - type: precision_at_1 value: 32.068000000000005 - type: precision_at_10 value: 7.9750000000000005 - type: precision_at_100 value: 1.061 - type: precision_at_1000 value: 0.116 - type: precision_at_3 value: 18.404999999999998 - type: precision_at_5 value: 13.111 - type: recall_at_1 value: 28.616000000000003 - type: recall_at_10 value: 66.956 - type: recall_at_100 value: 87.657 - type: recall_at_1000 value: 95.548 - type: recall_at_3 value: 47.453 - type: recall_at_5 value: 55.87800000000001 - task: type: Classification dataset: name: MTEB PAC type: laugustyniak/abusive-clauses-pl config: default split: test revision: None metrics: - type: accuracy value: 69.04141326382856 - type: ap value: 77.47589122111044 - type: f1 value: 66.6332277374775 - task: type: PairClassification dataset: name: MTEB PPC type: PL-MTEB/ppc-pairclassification config: default split: test revision: None metrics: - type: cos_sim_accuracy value: 86.4 - type: cos_sim_ap value: 94.1044939667201 - type: cos_sim_f1 value: 88.78048780487805 - type: cos_sim_precision value: 87.22044728434504 - type: cos_sim_recall value: 90.39735099337747 - type: dot_accuracy value: 86.4 - type: dot_ap value: 94.1044939667201 - type: dot_f1 value: 88.78048780487805 - type: dot_precision value: 87.22044728434504 - type: dot_recall value: 90.39735099337747 - type: euclidean_accuracy value: 86.4 - type: euclidean_ap value: 94.1044939667201 - type: euclidean_f1 value: 88.78048780487805 - type: euclidean_precision value: 87.22044728434504 - type: euclidean_recall value: 90.39735099337747 - type: manhattan_accuracy value: 86.4 - type: manhattan_ap value: 94.11438365697387 - type: manhattan_f1 value: 88.77968877968877 - type: manhattan_precision value: 87.84440842787681 - type: manhattan_recall value: 89.73509933774835 - type: max_accuracy value: 86.4 - type: max_ap value: 94.11438365697387 - type: max_f1 value: 88.78048780487805 - task: type: PairClassification dataset: name: MTEB PSC type: PL-MTEB/psc-pairclassification config: default split: test revision: None metrics: - type: cos_sim_accuracy value: 97.86641929499072 - type: cos_sim_ap value: 99.36904211868182 - type: cos_sim_f1 value: 96.56203288490283 - type: cos_sim_precision value: 94.72140762463343 - type: cos_sim_recall value: 98.47560975609755 - type: dot_accuracy value: 97.86641929499072 - type: dot_ap value: 99.36904211868183 - type: dot_f1 value: 96.56203288490283 - type: dot_precision value: 94.72140762463343 - type: dot_recall value: 98.47560975609755 - type: euclidean_accuracy value: 97.86641929499072 - type: euclidean_ap value: 99.36904211868183 - type: euclidean_f1 value: 96.56203288490283 - type: euclidean_precision value: 94.72140762463343 - type: euclidean_recall value: 98.47560975609755 - type: manhattan_accuracy value: 98.14471243042672 - type: manhattan_ap value: 99.43359540492416 - type: manhattan_f1 value: 96.98795180722892 - type: manhattan_precision value: 95.83333333333334 - type: manhattan_recall value: 98.17073170731707 - type: max_accuracy value: 98.14471243042672 - type: max_ap value: 99.43359540492416 - type: max_f1 value: 96.98795180722892 - task: type: Classification dataset: name: MTEB PolEmo2.0-IN type: PL-MTEB/polemo2_in config: default split: test revision: None metrics: - type: accuracy value: 89.39058171745152 - type: f1 value: 86.8552093529568 - task: type: Classification dataset: name: MTEB PolEmo2.0-OUT type: PL-MTEB/polemo2_out config: default split: test revision: None metrics: - type: accuracy value: 74.97975708502024 - type: f1 value: 58.73081628832407 - task: type: Retrieval dataset: name: MTEB Quora-PL type: clarin-knext/quora-pl config: default split: test revision: 0be27e93455051e531182b85e85e425aba12e9d4 metrics: - type: map_at_1 value: 64.917 - type: map_at_10 value: 78.74600000000001 - type: map_at_100 value: 79.501 - type: map_at_1000 value: 79.524 - type: map_at_3 value: 75.549 - type: map_at_5 value: 77.495 - type: mrr_at_1 value: 74.9 - type: mrr_at_10 value: 82.112 - type: mrr_at_100 value: 82.314 - type: mrr_at_1000 value: 82.317 - type: mrr_at_3 value: 80.745 - type: mrr_at_5 value: 81.607 - type: ndcg_at_1 value: 74.83999999999999 - type: ndcg_at_10 value: 83.214 - type: ndcg_at_100 value: 84.997 - type: ndcg_at_1000 value: 85.207 - type: ndcg_at_3 value: 79.547 - type: ndcg_at_5 value: 81.46600000000001 - type: precision_at_1 value: 74.83999999999999 - type: precision_at_10 value: 12.822 - type: precision_at_100 value: 1.506 - type: precision_at_1000 value: 0.156 - type: precision_at_3 value: 34.903 - type: precision_at_5 value: 23.16 - type: recall_at_1 value: 64.917 - type: recall_at_10 value: 92.27199999999999 - type: recall_at_100 value: 98.715 - type: recall_at_1000 value: 99.854 - type: recall_at_3 value: 82.04599999999999 - type: recall_at_5 value: 87.2 - task: type: Retrieval dataset: name: MTEB SCIDOCS-PL type: clarin-knext/scidocs-pl config: default split: test revision: 45452b03f05560207ef19149545f168e596c9337 metrics: - type: map_at_1 value: 3.51 - type: map_at_10 value: 9.046999999999999 - type: map_at_100 value: 10.823 - type: map_at_1000 value: 11.144 - type: map_at_3 value: 6.257 - type: map_at_5 value: 7.648000000000001 - type: mrr_at_1 value: 17.299999999999997 - type: mrr_at_10 value: 27.419 - type: mrr_at_100 value: 28.618 - type: mrr_at_1000 value: 28.685 - type: mrr_at_3 value: 23.817 - type: mrr_at_5 value: 25.927 - type: ndcg_at_1 value: 17.299999999999997 - type: ndcg_at_10 value: 16.084 - type: ndcg_at_100 value: 23.729 - type: ndcg_at_1000 value: 29.476999999999997 - type: ndcg_at_3 value: 14.327000000000002 - type: ndcg_at_5 value: 13.017999999999999 - type: precision_at_1 value: 17.299999999999997 - type: precision_at_10 value: 8.63 - type: precision_at_100 value: 1.981 - type: precision_at_1000 value: 0.336 - type: precision_at_3 value: 13.4 - type: precision_at_5 value: 11.700000000000001 - type: recall_at_1 value: 3.51 - type: recall_at_10 value: 17.518 - type: recall_at_100 value: 40.275 - type: recall_at_1000 value: 68.203 - type: recall_at_3 value: 8.155 - type: recall_at_5 value: 11.875 - task: type: PairClassification dataset: name: MTEB SICK-E-PL type: PL-MTEB/sicke-pl-pairclassification config: default split: test revision: None metrics: - type: cos_sim_accuracy value: 86.30248675091724 - type: cos_sim_ap value: 83.6756734006714 - type: cos_sim_f1 value: 74.97367497367497 - type: cos_sim_precision value: 73.91003460207612 - type: cos_sim_recall value: 76.06837606837607 - type: dot_accuracy value: 86.30248675091724 - type: dot_ap value: 83.6756734006714 - type: dot_f1 value: 74.97367497367497 - type: dot_precision value: 73.91003460207612 - type: dot_recall value: 76.06837606837607 - type: euclidean_accuracy value: 86.30248675091724 - type: euclidean_ap value: 83.67566984333091 - type: euclidean_f1 value: 74.97367497367497 - type: euclidean_precision value: 73.91003460207612 - type: euclidean_recall value: 76.06837606837607 - type: manhattan_accuracy value: 86.28210354667753 - type: manhattan_ap value: 83.64216119130171 - type: manhattan_f1 value: 74.92152075340078 - type: manhattan_precision value: 73.4107997265892 - type: manhattan_recall value: 76.49572649572649 - type: max_accuracy value: 86.30248675091724 - type: max_ap value: 83.6756734006714 - type: max_f1 value: 74.97367497367497 - task: type: STS dataset: name: MTEB SICK-R-PL type: PL-MTEB/sickr-pl-sts config: default split: test revision: None metrics: - type: cos_sim_pearson value: 82.23295940859121 - type: cos_sim_spearman value: 78.89329160768719 - type: euclidean_pearson value: 79.56019107076818 - type: euclidean_spearman value: 78.89330209904084 - type: manhattan_pearson value: 79.76098513973719 - type: manhattan_spearman value: 79.05490162570123 - task: type: STS dataset: name: MTEB STS22 (pl) type: mteb/sts22-crosslingual-sts config: pl split: test revision: eea2b4fe26a775864c896887d910b76a8098ad3f metrics: - type: cos_sim_pearson value: 37.732606308062486 - type: cos_sim_spearman value: 41.01645667030284 - type: euclidean_pearson value: 26.61722556367085 - type: euclidean_spearman value: 41.01645667030284 - type: manhattan_pearson value: 26.60917378970807 - type: manhattan_spearman value: 41.51335727617614 - task: type: Retrieval dataset: name: MTEB SciFact-PL type: clarin-knext/scifact-pl config: default split: test revision: 47932a35f045ef8ed01ba82bf9ff67f6e109207e metrics: - type: map_at_1 value: 54.31700000000001 - type: map_at_10 value: 65.564 - type: map_at_100 value: 66.062 - type: map_at_1000 value: 66.08699999999999 - type: map_at_3 value: 62.592999999999996 - type: map_at_5 value: 63.888 - type: mrr_at_1 value: 56.99999999999999 - type: mrr_at_10 value: 66.412 - type: mrr_at_100 value: 66.85900000000001 - type: mrr_at_1000 value: 66.88 - type: mrr_at_3 value: 64.22200000000001 - type: mrr_at_5 value: 65.206 - type: ndcg_at_1 value: 56.99999999999999 - type: ndcg_at_10 value: 70.577 - type: ndcg_at_100 value: 72.879 - type: ndcg_at_1000 value: 73.45 - type: ndcg_at_3 value: 65.5 - type: ndcg_at_5 value: 67.278 - type: precision_at_1 value: 56.99999999999999 - type: precision_at_10 value: 9.667 - type: precision_at_100 value: 1.083 - type: precision_at_1000 value: 0.11299999999999999 - type: precision_at_3 value: 26.0 - type: precision_at_5 value: 16.933 - type: recall_at_1 value: 54.31700000000001 - type: recall_at_10 value: 85.056 - type: recall_at_100 value: 95.667 - type: recall_at_1000 value: 100.0 - type: recall_at_3 value: 71.0 - type: recall_at_5 value: 75.672 - task: type: Retrieval dataset: name: MTEB TRECCOVID-PL type: clarin-knext/trec-covid-pl config: default split: test revision: 81bcb408f33366c2a20ac54adafad1ae7e877fdd metrics: - type: map_at_1 value: 0.245 - type: map_at_10 value: 2.051 - type: map_at_100 value: 12.009 - type: map_at_1000 value: 27.448 - type: map_at_3 value: 0.721 - type: map_at_5 value: 1.13 - type: mrr_at_1 value: 88.0 - type: mrr_at_10 value: 93.0 - type: mrr_at_100 value: 93.0 - type: mrr_at_1000 value: 93.0 - type: mrr_at_3 value: 93.0 - type: mrr_at_5 value: 93.0 - type: ndcg_at_1 value: 85.0 - type: ndcg_at_10 value: 80.303 - type: ndcg_at_100 value: 61.23499999999999 - type: ndcg_at_1000 value: 52.978 - type: ndcg_at_3 value: 84.419 - type: ndcg_at_5 value: 82.976 - type: precision_at_1 value: 88.0 - type: precision_at_10 value: 83.39999999999999 - type: precision_at_100 value: 61.96 - type: precision_at_1000 value: 22.648 - type: precision_at_3 value: 89.333 - type: precision_at_5 value: 87.2 - type: recall_at_1 value: 0.245 - type: recall_at_10 value: 2.193 - type: recall_at_100 value: 14.938 - type: recall_at_1000 value: 48.563 - type: recall_at_3 value: 0.738 - type: recall_at_5 value: 1.173 --- # mirekphd/gte-Qwen2-7B-instruct-Q2_K-GGUF This model was converted to GGUF format from [`Alibaba-NLP/gte-Qwen2-7B-instruct`](https://huggingface.co/Alibaba-NLP/gte-Qwen2-7B-instruct) using llama.cpp via the ggml.ai's [GGUF-my-repo](https://huggingface.co/spaces/ggml-org/gguf-my-repo) space. Refer to the [original model card](https://huggingface.co/Alibaba-NLP/gte-Qwen2-7B-instruct) for more details on the model. ## Use with llama.cpp Install llama.cpp through brew (works on Mac and Linux) ```bash brew install llama.cpp ``` Invoke the llama.cpp server or the CLI. ### CLI: ```bash llama-cli --hf-repo mirekphd/gte-Qwen2-7B-instruct-Q2_K-GGUF --hf-file gte-qwen2-7b-instruct-q2_k.gguf -p "The meaning to life and the universe is" ``` ### Server: ```bash llama-server --hf-repo mirekphd/gte-Qwen2-7B-instruct-Q2_K-GGUF --hf-file gte-qwen2-7b-instruct-q2_k.gguf -c 2048 ``` Note: You can also use this checkpoint directly through the [usage steps](https://github.com/ggerganov/llama.cpp?tab=readme-ov-file#usage) listed in the Llama.cpp repo as well. Step 1: Clone llama.cpp from GitHub. ``` git clone https://github.com/ggerganov/llama.cpp ``` Step 2: Move into the llama.cpp folder and build it with `LLAMA_CURL=1` flag along with other hardware-specific flags (for ex: LLAMA_CUDA=1 for Nvidia GPUs on Linux). ``` cd llama.cpp && LLAMA_CURL=1 make ``` Step 3: Run inference through the main binary. ``` ./llama-cli --hf-repo mirekphd/gte-Qwen2-7B-instruct-Q2_K-GGUF --hf-file gte-qwen2-7b-instruct-q2_k.gguf -p "The meaning to life and the universe is" ``` or ``` ./llama-server --hf-repo mirekphd/gte-Qwen2-7B-instruct-Q2_K-GGUF --hf-file gte-qwen2-7b-instruct-q2_k.gguf -c 2048 ```
[ "SUMMARIZATION" ]
[ "BIOSSES", "SCIFACT" ]
NoaiGPT/777
NoaiGPT
text2text-generation
[ "transformers", "pytorch", "t5", "text2text-generation", "license:openrail", "autotrain_compatible", "text-generation-inference", "endpoints_compatible", "region:us" ]
2024-07-02T14:35:45
2024-07-02T15:25:48
108
0
--- license: openrail inference: parameters: num_beams: 3 num_beam_groups: 3 num_return_sequences: 1 repetition_penalty: 3 diversity_penalty: 3.01 no_repeat_ngram_size: 2 temperature: 0.8 max_length: 64 widget: - text: 'paraphraser: Software engineering is the discipline of designing, developing, testing, and maintaining software applications. It involves using programming languages, algorithms, and tools to create reliable and efficient software solutions. Key practices include requirements analysis, system architecture, code implementation, and quality assurance, ensuring software meets user needs and performs optimally.' example_title: AWS course - text: 'paraphraser: In healthcare, Generative AI can help generate synthetic medical data to train machine learning models, develop new drug candidates, and design clinical trials.' example_title: Generative AI - text: 'paraphraser: By leveraging prior model training through transfer learning, fine-tuning can reduce the amount of expensive computing power and labeled data needed to obtain large models tailored to niche use cases and business needs.' example_title: Fine Tuning --- # Text Rewriter Paraphraser This repository contains a fine-tuned text-rewriting model based on the T5-Base with 223M parameters. ## Key Features: * **Fine-tuned on t5-base:** Leverages the power of a pre-trained text-to-text transfer model for effective paraphrasing. * **Large Dataset (430k examples):** Trained on a comprehensive dataset combining three open-source sources and cleaned using various techniques for optimal performance. * **High Quality Paraphrases:** Generates paraphrases that significantly alter sentence structure while maintaining accuracy and factual correctness. * **Non-AI Detectable:** Aims to produce paraphrases that appear natural and indistinguishable from human-written text. **Model Performance:** * Train Loss: 1.0645 * Validation Loss: 0.8761 ## Getting Started: T5 model expects a task related prefix: since it is a paraphrasing task, we will add a prefix "paraphraser: " ```python from transformers import AutoTokenizer, AutoModelForSeq2SeqLM device = "cuda" tokenizer = AutoTokenizer.from_pretrained("NoaiGPT/777", token='your_token') model = AutoModelForSeq2SeqLM.from_pretrained("NoaiGPT/777", token='your_token').to(device) def generate_title(text): input_ids = tokenizer(f'paraphraser: {text}', return_tensors="pt", padding="longest", truncation=True, max_length=64).input_ids.to(device) outputs = model.generate( input_ids, num_beams=4, num_beam_groups=4, num_return_sequences=4, repetition_penalty=10.0, diversity_penalty=3.0, no_repeat_ngram_size=2, temperature=0.8, max_length=64 ) return tokenizer.batch_decode(outputs, skip_special_tokens=True) text = 'By leveraging prior model training through transfer learning, fine-tuning can reduce the amount of expensive computing power and labeled data needed to obtain large models tailored to niche use cases and business needs.' generate_title(text) ``` ### Output: ``` ['The fine-tuning can reduce the amount of expensive computing power and labeled data required to obtain large models adapted for niche use cases and business needs by using prior model training through transfer learning.', 'fine-tuning, by utilizing prior model training through transfer learning, can reduce the amount of expensive computing power and labeled data required to obtain large models tailored for niche use cases and business needs.', 'Fine-tunering by using prior model training through transfer learning can reduce the amount of expensive computing power and labeled data required to obtain large models adapted for niche use cases and business needs.', 'Using transfer learning to use prior model training, fine-tuning can reduce the amount of expensive computing power and labeled data required for large models that are suitable in niche usage cases or businesses.'] ```
[ "PARAPHRASING" ]
[ "MEDICAL DATA" ]
smcleod/mxbai-embed-large-v1-Q8_0-GGUF
smcleod
feature-extraction
[ "sentence-transformers", "gguf", "mteb", "transformers.js", "transformers", "llama-cpp", "gguf-my-repo", "feature-extraction", "en", "base_model:mixedbread-ai/mxbai-embed-large-v1", "base_model:quantized:mixedbread-ai/mxbai-embed-large-v1", "license:apache-2.0", "model-index", "autotrain_compatible", "endpoints_compatible", "region:us" ]
2024-11-19T22:28:44
2024-11-19T22:28:48
108
0
--- base_model: mixedbread-ai/mxbai-embed-large-v1 language: - en library_name: sentence-transformers license: apache-2.0 pipeline_tag: feature-extraction tags: - mteb - transformers.js - transformers - llama-cpp - gguf-my-repo model-index: - name: mxbai-angle-large-v1 results: - task: type: Classification dataset: name: MTEB AmazonCounterfactualClassification (en) type: mteb/amazon_counterfactual config: en split: test revision: e8379541af4e31359cca9fbcf4b00f2671dba205 metrics: - type: accuracy value: 75.044776119403 - type: ap value: 37.7362433623053 - type: f1 value: 68.92736573359774 - task: type: Classification dataset: name: MTEB AmazonPolarityClassification type: mteb/amazon_polarity config: default split: test revision: e2d317d38cd51312af73b3d32a06d1a08b442046 metrics: - type: accuracy value: 93.84025000000001 - type: ap value: 90.93190875404055 - type: f1 value: 93.8297833897293 - task: type: Classification dataset: name: MTEB AmazonReviewsClassification (en) type: mteb/amazon_reviews_multi config: en split: test revision: 1399c76144fd37290681b995c656ef9b2e06e26d metrics: - type: accuracy value: 49.184 - type: f1 value: 48.74163227751588 - task: type: Retrieval dataset: name: MTEB ArguAna type: arguana config: default split: test revision: None metrics: - type: map_at_1 value: 41.252 - type: map_at_10 value: 57.778 - type: map_at_100 value: 58.233000000000004 - type: map_at_1000 value: 58.23700000000001 - type: map_at_3 value: 53.449999999999996 - type: map_at_5 value: 56.376000000000005 - type: mrr_at_1 value: 41.679 - type: mrr_at_10 value: 57.92699999999999 - type: mrr_at_100 value: 58.389 - type: mrr_at_1000 value: 58.391999999999996 - type: mrr_at_3 value: 53.651 - type: mrr_at_5 value: 56.521 - type: ndcg_at_1 value: 41.252 - type: ndcg_at_10 value: 66.018 - type: ndcg_at_100 value: 67.774 - type: ndcg_at_1000 value: 67.84400000000001 - type: ndcg_at_3 value: 57.372 - type: ndcg_at_5 value: 62.646 - type: precision_at_1 value: 41.252 - type: precision_at_10 value: 9.189 - type: precision_at_100 value: 0.991 - type: precision_at_1000 value: 0.1 - type: precision_at_3 value: 22.902 - type: precision_at_5 value: 16.302 - type: recall_at_1 value: 41.252 - type: recall_at_10 value: 91.892 - type: recall_at_100 value: 99.14699999999999 - type: recall_at_1000 value: 99.644 - type: recall_at_3 value: 68.706 - type: recall_at_5 value: 81.50800000000001 - task: type: Clustering dataset: name: MTEB ArxivClusteringP2P type: mteb/arxiv-clustering-p2p config: default split: test revision: a122ad7f3f0291bf49cc6f4d32aa80929df69d5d metrics: - type: v_measure value: 48.97294504317859 - task: type: Clustering dataset: name: MTEB ArxivClusteringS2S type: mteb/arxiv-clustering-s2s config: default split: test revision: f910caf1a6075f7329cdf8c1a6135696f37dbd53 metrics: - type: v_measure value: 42.98071077674629 - task: type: Reranking dataset: name: MTEB AskUbuntuDupQuestions type: mteb/askubuntudupquestions-reranking config: default split: test revision: 2000358ca161889fa9c082cb41daa8dcfb161a54 metrics: - type: map value: 65.16477858490782 - type: mrr value: 78.23583080508287 - task: type: STS dataset: name: MTEB BIOSSES type: mteb/biosses-sts config: default split: test revision: d3fb88f8f02e40887cd149695127462bbcf29b4a metrics: - type: cos_sim_pearson value: 89.6277629421789 - type: cos_sim_spearman value: 88.4056288400568 - type: euclidean_pearson value: 87.94871847578163 - type: euclidean_spearman value: 88.4056288400568 - type: manhattan_pearson value: 87.73271254229648 - type: manhattan_spearman value: 87.91826833762677 - task: type: Classification dataset: name: MTEB Banking77Classification type: mteb/banking77 config: default split: test revision: 0fd18e25b25c072e09e0d92ab615fda904d66300 metrics: - type: accuracy value: 87.81818181818181 - type: f1 value: 87.79879337316918 - task: type: Clustering dataset: name: MTEB BiorxivClusteringP2P type: mteb/biorxiv-clustering-p2p config: default split: test revision: 65b79d1d13f80053f67aca9498d9402c2d9f1f40 metrics: - type: v_measure value: 39.91773608582761 - task: type: Clustering dataset: name: MTEB BiorxivClusteringS2S type: mteb/biorxiv-clustering-s2s config: default split: test revision: 258694dd0231531bc1fd9de6ceb52a0853c6d908 metrics: - type: v_measure value: 36.73059477462478 - task: type: Retrieval dataset: name: MTEB CQADupstackAndroidRetrieval type: BeIR/cqadupstack config: default split: test revision: None metrics: - type: map_at_1 value: 32.745999999999995 - type: map_at_10 value: 43.632 - type: map_at_100 value: 45.206 - type: map_at_1000 value: 45.341 - type: map_at_3 value: 39.956 - type: map_at_5 value: 42.031 - type: mrr_at_1 value: 39.485 - type: mrr_at_10 value: 49.537 - type: mrr_at_100 value: 50.249 - type: mrr_at_1000 value: 50.294000000000004 - type: mrr_at_3 value: 46.757 - type: mrr_at_5 value: 48.481 - type: ndcg_at_1 value: 39.485 - type: ndcg_at_10 value: 50.058 - type: ndcg_at_100 value: 55.586 - type: ndcg_at_1000 value: 57.511 - type: ndcg_at_3 value: 44.786 - type: ndcg_at_5 value: 47.339999999999996 - type: precision_at_1 value: 39.485 - type: precision_at_10 value: 9.557 - type: precision_at_100 value: 1.552 - type: precision_at_1000 value: 0.202 - type: precision_at_3 value: 21.412 - type: precision_at_5 value: 15.479000000000001 - type: recall_at_1 value: 32.745999999999995 - type: recall_at_10 value: 62.056 - type: recall_at_100 value: 85.088 - type: recall_at_1000 value: 96.952 - type: recall_at_3 value: 46.959 - type: recall_at_5 value: 54.06999999999999 - type: map_at_1 value: 31.898 - type: map_at_10 value: 42.142 - type: map_at_100 value: 43.349 - type: map_at_1000 value: 43.483 - type: map_at_3 value: 39.18 - type: map_at_5 value: 40.733000000000004 - type: mrr_at_1 value: 39.617999999999995 - type: mrr_at_10 value: 47.922 - type: mrr_at_100 value: 48.547000000000004 - type: mrr_at_1000 value: 48.597 - type: mrr_at_3 value: 45.86 - type: mrr_at_5 value: 46.949000000000005 - type: ndcg_at_1 value: 39.617999999999995 - type: ndcg_at_10 value: 47.739 - type: ndcg_at_100 value: 51.934999999999995 - type: ndcg_at_1000 value: 54.007000000000005 - type: ndcg_at_3 value: 43.748 - type: ndcg_at_5 value: 45.345 - type: precision_at_1 value: 39.617999999999995 - type: precision_at_10 value: 8.962 - type: precision_at_100 value: 1.436 - type: precision_at_1000 value: 0.192 - type: precision_at_3 value: 21.083 - type: precision_at_5 value: 14.752 - type: recall_at_1 value: 31.898 - type: recall_at_10 value: 57.587999999999994 - type: recall_at_100 value: 75.323 - type: recall_at_1000 value: 88.304 - type: recall_at_3 value: 45.275 - type: recall_at_5 value: 49.99 - type: map_at_1 value: 40.458 - type: map_at_10 value: 52.942 - type: map_at_100 value: 53.974 - type: map_at_1000 value: 54.031 - type: map_at_3 value: 49.559999999999995 - type: map_at_5 value: 51.408 - type: mrr_at_1 value: 46.27 - type: mrr_at_10 value: 56.31699999999999 - type: mrr_at_100 value: 56.95099999999999 - type: mrr_at_1000 value: 56.98 - type: mrr_at_3 value: 53.835 - type: mrr_at_5 value: 55.252 - type: ndcg_at_1 value: 46.27 - type: ndcg_at_10 value: 58.964000000000006 - type: ndcg_at_100 value: 62.875 - type: ndcg_at_1000 value: 63.969 - type: ndcg_at_3 value: 53.297000000000004 - type: ndcg_at_5 value: 55.938 - type: precision_at_1 value: 46.27 - type: precision_at_10 value: 9.549000000000001 - type: precision_at_100 value: 1.2409999999999999 - type: precision_at_1000 value: 0.13799999999999998 - type: precision_at_3 value: 23.762 - type: precision_at_5 value: 16.262999999999998 - type: recall_at_1 value: 40.458 - type: recall_at_10 value: 73.446 - type: recall_at_100 value: 90.12400000000001 - type: recall_at_1000 value: 97.795 - type: recall_at_3 value: 58.123000000000005 - type: recall_at_5 value: 64.68 - type: map_at_1 value: 27.443 - type: map_at_10 value: 36.081 - type: map_at_100 value: 37.163000000000004 - type: map_at_1000 value: 37.232 - type: map_at_3 value: 33.308 - type: map_at_5 value: 34.724 - type: mrr_at_1 value: 29.492 - type: mrr_at_10 value: 38.138 - type: mrr_at_100 value: 39.065 - type: mrr_at_1000 value: 39.119 - type: mrr_at_3 value: 35.593 - type: mrr_at_5 value: 36.785000000000004 - type: ndcg_at_1 value: 29.492 - type: ndcg_at_10 value: 41.134 - type: ndcg_at_100 value: 46.300999999999995 - type: ndcg_at_1000 value: 48.106 - type: ndcg_at_3 value: 35.77 - type: ndcg_at_5 value: 38.032 - type: precision_at_1 value: 29.492 - type: precision_at_10 value: 6.249 - type: precision_at_100 value: 0.9299999999999999 - type: precision_at_1000 value: 0.11199999999999999 - type: precision_at_3 value: 15.065999999999999 - type: precision_at_5 value: 10.373000000000001 - type: recall_at_1 value: 27.443 - type: recall_at_10 value: 54.80199999999999 - type: recall_at_100 value: 78.21900000000001 - type: recall_at_1000 value: 91.751 - type: recall_at_3 value: 40.211000000000006 - type: recall_at_5 value: 45.599000000000004 - type: map_at_1 value: 18.731 - type: map_at_10 value: 26.717999999999996 - type: map_at_100 value: 27.897 - type: map_at_1000 value: 28.029 - type: map_at_3 value: 23.91 - type: map_at_5 value: 25.455 - type: mrr_at_1 value: 23.134 - type: mrr_at_10 value: 31.769 - type: mrr_at_100 value: 32.634 - type: mrr_at_1000 value: 32.707 - type: mrr_at_3 value: 28.938999999999997 - type: mrr_at_5 value: 30.531000000000002 - type: ndcg_at_1 value: 23.134 - type: ndcg_at_10 value: 32.249 - type: ndcg_at_100 value: 37.678 - type: ndcg_at_1000 value: 40.589999999999996 - type: ndcg_at_3 value: 26.985999999999997 - type: ndcg_at_5 value: 29.457 - type: precision_at_1 value: 23.134 - type: precision_at_10 value: 5.8709999999999996 - type: precision_at_100 value: 0.988 - type: precision_at_1000 value: 0.13799999999999998 - type: precision_at_3 value: 12.852 - type: precision_at_5 value: 9.428 - type: recall_at_1 value: 18.731 - type: recall_at_10 value: 44.419 - type: recall_at_100 value: 67.851 - type: recall_at_1000 value: 88.103 - type: recall_at_3 value: 29.919 - type: recall_at_5 value: 36.230000000000004 - type: map_at_1 value: 30.324 - type: map_at_10 value: 41.265 - type: map_at_100 value: 42.559000000000005 - type: map_at_1000 value: 42.669000000000004 - type: map_at_3 value: 38.138 - type: map_at_5 value: 39.881 - type: mrr_at_1 value: 36.67 - type: mrr_at_10 value: 46.774 - type: mrr_at_100 value: 47.554 - type: mrr_at_1000 value: 47.593 - type: mrr_at_3 value: 44.338 - type: mrr_at_5 value: 45.723 - type: ndcg_at_1 value: 36.67 - type: ndcg_at_10 value: 47.367 - type: ndcg_at_100 value: 52.623 - type: ndcg_at_1000 value: 54.59 - type: ndcg_at_3 value: 42.323 - type: ndcg_at_5 value: 44.727 - type: precision_at_1 value: 36.67 - type: precision_at_10 value: 8.518 - type: precision_at_100 value: 1.2890000000000001 - type: precision_at_1000 value: 0.163 - type: precision_at_3 value: 19.955000000000002 - type: precision_at_5 value: 14.11 - type: recall_at_1 value: 30.324 - type: recall_at_10 value: 59.845000000000006 - type: recall_at_100 value: 81.77499999999999 - type: recall_at_1000 value: 94.463 - type: recall_at_3 value: 46.019 - type: recall_at_5 value: 52.163000000000004 - type: map_at_1 value: 24.229 - type: map_at_10 value: 35.004000000000005 - type: map_at_100 value: 36.409000000000006 - type: map_at_1000 value: 36.521 - type: map_at_3 value: 31.793 - type: map_at_5 value: 33.432 - type: mrr_at_1 value: 30.365 - type: mrr_at_10 value: 40.502 - type: mrr_at_100 value: 41.372 - type: mrr_at_1000 value: 41.435 - type: mrr_at_3 value: 37.804 - type: mrr_at_5 value: 39.226 - type: ndcg_at_1 value: 30.365 - type: ndcg_at_10 value: 41.305 - type: ndcg_at_100 value: 47.028999999999996 - type: ndcg_at_1000 value: 49.375 - type: ndcg_at_3 value: 35.85 - type: ndcg_at_5 value: 38.12 - type: precision_at_1 value: 30.365 - type: precision_at_10 value: 7.808 - type: precision_at_100 value: 1.228 - type: precision_at_1000 value: 0.161 - type: precision_at_3 value: 17.352 - type: precision_at_5 value: 12.42 - type: recall_at_1 value: 24.229 - type: recall_at_10 value: 54.673 - type: recall_at_100 value: 78.766 - type: recall_at_1000 value: 94.625 - type: recall_at_3 value: 39.602 - type: recall_at_5 value: 45.558 - type: map_at_1 value: 26.695 - type: map_at_10 value: 36.0895 - type: map_at_100 value: 37.309416666666664 - type: map_at_1000 value: 37.42558333333334 - type: map_at_3 value: 33.19616666666666 - type: map_at_5 value: 34.78641666666667 - type: mrr_at_1 value: 31.486083333333337 - type: mrr_at_10 value: 40.34774999999999 - type: mrr_at_100 value: 41.17533333333333 - type: mrr_at_1000 value: 41.231583333333326 - type: mrr_at_3 value: 37.90075 - type: mrr_at_5 value: 39.266999999999996 - type: ndcg_at_1 value: 31.486083333333337 - type: ndcg_at_10 value: 41.60433333333334 - type: ndcg_at_100 value: 46.74525 - type: ndcg_at_1000 value: 48.96166666666667 - type: ndcg_at_3 value: 36.68825 - type: ndcg_at_5 value: 38.966499999999996 - type: precision_at_1 value: 31.486083333333337 - type: precision_at_10 value: 7.29675 - type: precision_at_100 value: 1.1621666666666666 - type: precision_at_1000 value: 0.1545 - type: precision_at_3 value: 16.8815 - type: precision_at_5 value: 11.974583333333333 - type: recall_at_1 value: 26.695 - type: recall_at_10 value: 53.651916666666665 - type: recall_at_100 value: 76.12083333333332 - type: recall_at_1000 value: 91.31191666666668 - type: recall_at_3 value: 40.03575 - type: recall_at_5 value: 45.876666666666665 - type: map_at_1 value: 25.668000000000003 - type: map_at_10 value: 32.486 - type: map_at_100 value: 33.371 - type: map_at_1000 value: 33.458 - type: map_at_3 value: 30.261 - type: map_at_5 value: 31.418000000000003 - type: mrr_at_1 value: 28.988000000000003 - type: mrr_at_10 value: 35.414 - type: mrr_at_100 value: 36.149 - type: mrr_at_1000 value: 36.215 - type: mrr_at_3 value: 33.333 - type: mrr_at_5 value: 34.43 - type: ndcg_at_1 value: 28.988000000000003 - type: ndcg_at_10 value: 36.732 - type: ndcg_at_100 value: 41.331 - type: ndcg_at_1000 value: 43.575 - type: ndcg_at_3 value: 32.413 - type: ndcg_at_5 value: 34.316 - type: precision_at_1 value: 28.988000000000003 - type: precision_at_10 value: 5.7059999999999995 - type: precision_at_100 value: 0.882 - type: precision_at_1000 value: 0.11299999999999999 - type: precision_at_3 value: 13.65 - type: precision_at_5 value: 9.417 - type: recall_at_1 value: 25.668000000000003 - type: recall_at_10 value: 47.147 - type: recall_at_100 value: 68.504 - type: recall_at_1000 value: 85.272 - type: recall_at_3 value: 35.19 - type: recall_at_5 value: 39.925 - type: map_at_1 value: 17.256 - type: map_at_10 value: 24.58 - type: map_at_100 value: 25.773000000000003 - type: map_at_1000 value: 25.899 - type: map_at_3 value: 22.236 - type: map_at_5 value: 23.507 - type: mrr_at_1 value: 20.957 - type: mrr_at_10 value: 28.416000000000004 - type: mrr_at_100 value: 29.447000000000003 - type: mrr_at_1000 value: 29.524 - type: mrr_at_3 value: 26.245 - type: mrr_at_5 value: 27.451999999999998 - type: ndcg_at_1 value: 20.957 - type: ndcg_at_10 value: 29.285 - type: ndcg_at_100 value: 35.003 - type: ndcg_at_1000 value: 37.881 - type: ndcg_at_3 value: 25.063000000000002 - type: ndcg_at_5 value: 26.983 - type: precision_at_1 value: 20.957 - type: precision_at_10 value: 5.344 - type: precision_at_100 value: 0.958 - type: precision_at_1000 value: 0.13799999999999998 - type: precision_at_3 value: 11.918 - type: precision_at_5 value: 8.596 - type: recall_at_1 value: 17.256 - type: recall_at_10 value: 39.644 - type: recall_at_100 value: 65.279 - type: recall_at_1000 value: 85.693 - type: recall_at_3 value: 27.825 - type: recall_at_5 value: 32.792 - type: map_at_1 value: 26.700000000000003 - type: map_at_10 value: 36.205999999999996 - type: map_at_100 value: 37.316 - type: map_at_1000 value: 37.425000000000004 - type: map_at_3 value: 33.166000000000004 - type: map_at_5 value: 35.032999999999994 - type: mrr_at_1 value: 31.436999999999998 - type: mrr_at_10 value: 40.61 - type: mrr_at_100 value: 41.415 - type: mrr_at_1000 value: 41.48 - type: mrr_at_3 value: 37.966 - type: mrr_at_5 value: 39.599000000000004 - type: ndcg_at_1 value: 31.436999999999998 - type: ndcg_at_10 value: 41.771 - type: ndcg_at_100 value: 46.784 - type: ndcg_at_1000 value: 49.183 - type: ndcg_at_3 value: 36.437000000000005 - type: ndcg_at_5 value: 39.291 - type: precision_at_1 value: 31.436999999999998 - type: precision_at_10 value: 6.987 - type: precision_at_100 value: 1.072 - type: precision_at_1000 value: 0.13899999999999998 - type: precision_at_3 value: 16.448999999999998 - type: precision_at_5 value: 11.866 - type: recall_at_1 value: 26.700000000000003 - type: recall_at_10 value: 54.301 - type: recall_at_100 value: 75.871 - type: recall_at_1000 value: 92.529 - type: recall_at_3 value: 40.201 - type: recall_at_5 value: 47.208 - type: map_at_1 value: 24.296 - type: map_at_10 value: 33.116 - type: map_at_100 value: 34.81 - type: map_at_1000 value: 35.032000000000004 - type: map_at_3 value: 30.105999999999998 - type: map_at_5 value: 31.839000000000002 - type: mrr_at_1 value: 29.051 - type: mrr_at_10 value: 37.803 - type: mrr_at_100 value: 38.856 - type: mrr_at_1000 value: 38.903999999999996 - type: mrr_at_3 value: 35.211 - type: mrr_at_5 value: 36.545 - type: ndcg_at_1 value: 29.051 - type: ndcg_at_10 value: 39.007 - type: ndcg_at_100 value: 45.321 - type: ndcg_at_1000 value: 47.665 - type: ndcg_at_3 value: 34.1 - type: ndcg_at_5 value: 36.437000000000005 - type: precision_at_1 value: 29.051 - type: precision_at_10 value: 7.668 - type: precision_at_100 value: 1.542 - type: precision_at_1000 value: 0.24 - type: precision_at_3 value: 16.14 - type: precision_at_5 value: 11.897 - type: recall_at_1 value: 24.296 - type: recall_at_10 value: 49.85 - type: recall_at_100 value: 78.457 - type: recall_at_1000 value: 92.618 - type: recall_at_3 value: 36.138999999999996 - type: recall_at_5 value: 42.223 - type: map_at_1 value: 20.591 - type: map_at_10 value: 28.902 - type: map_at_100 value: 29.886000000000003 - type: map_at_1000 value: 29.987000000000002 - type: map_at_3 value: 26.740000000000002 - type: map_at_5 value: 27.976 - type: mrr_at_1 value: 22.366 - type: mrr_at_10 value: 30.971 - type: mrr_at_100 value: 31.865 - type: mrr_at_1000 value: 31.930999999999997 - type: mrr_at_3 value: 28.927999999999997 - type: mrr_at_5 value: 30.231 - type: ndcg_at_1 value: 22.366 - type: ndcg_at_10 value: 33.641 - type: ndcg_at_100 value: 38.477 - type: ndcg_at_1000 value: 41.088 - type: ndcg_at_3 value: 29.486 - type: ndcg_at_5 value: 31.612000000000002 - type: precision_at_1 value: 22.366 - type: precision_at_10 value: 5.3420000000000005 - type: precision_at_100 value: 0.828 - type: precision_at_1000 value: 0.11800000000000001 - type: precision_at_3 value: 12.939 - type: precision_at_5 value: 9.094 - type: recall_at_1 value: 20.591 - type: recall_at_10 value: 46.052 - type: recall_at_100 value: 68.193 - type: recall_at_1000 value: 87.638 - type: recall_at_3 value: 34.966 - type: recall_at_5 value: 40.082 - task: type: Retrieval dataset: name: MTEB ClimateFEVER type: climate-fever config: default split: test revision: None metrics: - type: map_at_1 value: 15.091 - type: map_at_10 value: 26.38 - type: map_at_100 value: 28.421999999999997 - type: map_at_1000 value: 28.621999999999996 - type: map_at_3 value: 21.597 - type: map_at_5 value: 24.12 - type: mrr_at_1 value: 34.266999999999996 - type: mrr_at_10 value: 46.864 - type: mrr_at_100 value: 47.617 - type: mrr_at_1000 value: 47.644 - type: mrr_at_3 value: 43.312 - type: mrr_at_5 value: 45.501000000000005 - type: ndcg_at_1 value: 34.266999999999996 - type: ndcg_at_10 value: 36.095 - type: ndcg_at_100 value: 43.447 - type: ndcg_at_1000 value: 46.661 - type: ndcg_at_3 value: 29.337999999999997 - type: ndcg_at_5 value: 31.824 - type: precision_at_1 value: 34.266999999999996 - type: precision_at_10 value: 11.472 - type: precision_at_100 value: 1.944 - type: precision_at_1000 value: 0.255 - type: precision_at_3 value: 21.933 - type: precision_at_5 value: 17.224999999999998 - type: recall_at_1 value: 15.091 - type: recall_at_10 value: 43.022 - type: recall_at_100 value: 68.075 - type: recall_at_1000 value: 85.76 - type: recall_at_3 value: 26.564 - type: recall_at_5 value: 33.594 - task: type: Retrieval dataset: name: MTEB DBPedia type: dbpedia-entity config: default split: test revision: None metrics: - type: map_at_1 value: 9.252 - type: map_at_10 value: 20.923 - type: map_at_100 value: 30.741000000000003 - type: map_at_1000 value: 32.542 - type: map_at_3 value: 14.442 - type: map_at_5 value: 17.399 - type: mrr_at_1 value: 70.25 - type: mrr_at_10 value: 78.17 - type: mrr_at_100 value: 78.444 - type: mrr_at_1000 value: 78.45100000000001 - type: mrr_at_3 value: 76.958 - type: mrr_at_5 value: 77.571 - type: ndcg_at_1 value: 58.375 - type: ndcg_at_10 value: 44.509 - type: ndcg_at_100 value: 49.897999999999996 - type: ndcg_at_1000 value: 57.269999999999996 - type: ndcg_at_3 value: 48.64 - type: ndcg_at_5 value: 46.697 - type: precision_at_1 value: 70.25 - type: precision_at_10 value: 36.05 - type: precision_at_100 value: 11.848 - type: precision_at_1000 value: 2.213 - type: precision_at_3 value: 52.917 - type: precision_at_5 value: 45.7 - type: recall_at_1 value: 9.252 - type: recall_at_10 value: 27.006999999999998 - type: recall_at_100 value: 57.008 - type: recall_at_1000 value: 80.697 - type: recall_at_3 value: 15.798000000000002 - type: recall_at_5 value: 20.4 - task: type: Classification dataset: name: MTEB EmotionClassification type: mteb/emotion config: default split: test revision: 4f58c6b202a23cf9a4da393831edf4f9183cad37 metrics: - type: accuracy value: 50.88 - type: f1 value: 45.545495028653384 - task: type: Retrieval dataset: name: MTEB FEVER type: fever config: default split: test revision: None metrics: - type: map_at_1 value: 75.424 - type: map_at_10 value: 83.435 - type: map_at_100 value: 83.66900000000001 - type: map_at_1000 value: 83.685 - type: map_at_3 value: 82.39800000000001 - type: map_at_5 value: 83.07 - type: mrr_at_1 value: 81.113 - type: mrr_at_10 value: 87.77199999999999 - type: mrr_at_100 value: 87.862 - type: mrr_at_1000 value: 87.86500000000001 - type: mrr_at_3 value: 87.17099999999999 - type: mrr_at_5 value: 87.616 - type: ndcg_at_1 value: 81.113 - type: ndcg_at_10 value: 86.909 - type: ndcg_at_100 value: 87.746 - type: ndcg_at_1000 value: 88.017 - type: ndcg_at_3 value: 85.368 - type: ndcg_at_5 value: 86.28099999999999 - type: precision_at_1 value: 81.113 - type: precision_at_10 value: 10.363 - type: precision_at_100 value: 1.102 - type: precision_at_1000 value: 0.11399999999999999 - type: precision_at_3 value: 32.507999999999996 - type: precision_at_5 value: 20.138 - type: recall_at_1 value: 75.424 - type: recall_at_10 value: 93.258 - type: recall_at_100 value: 96.545 - type: recall_at_1000 value: 98.284 - type: recall_at_3 value: 89.083 - type: recall_at_5 value: 91.445 - task: type: Retrieval dataset: name: MTEB FiQA2018 type: fiqa config: default split: test revision: None metrics: - type: map_at_1 value: 22.532 - type: map_at_10 value: 37.141999999999996 - type: map_at_100 value: 39.162 - type: map_at_1000 value: 39.322 - type: map_at_3 value: 32.885 - type: map_at_5 value: 35.093999999999994 - type: mrr_at_1 value: 44.29 - type: mrr_at_10 value: 53.516 - type: mrr_at_100 value: 54.24 - type: mrr_at_1000 value: 54.273 - type: mrr_at_3 value: 51.286 - type: mrr_at_5 value: 52.413 - type: ndcg_at_1 value: 44.29 - type: ndcg_at_10 value: 45.268 - type: ndcg_at_100 value: 52.125 - type: ndcg_at_1000 value: 54.778000000000006 - type: ndcg_at_3 value: 41.829 - type: ndcg_at_5 value: 42.525 - type: precision_at_1 value: 44.29 - type: precision_at_10 value: 12.5 - type: precision_at_100 value: 1.9720000000000002 - type: precision_at_1000 value: 0.245 - type: precision_at_3 value: 28.035 - type: precision_at_5 value: 20.093 - type: recall_at_1 value: 22.532 - type: recall_at_10 value: 52.419000000000004 - type: recall_at_100 value: 77.43299999999999 - type: recall_at_1000 value: 93.379 - type: recall_at_3 value: 38.629000000000005 - type: recall_at_5 value: 43.858000000000004 - task: type: Retrieval dataset: name: MTEB HotpotQA type: hotpotqa config: default split: test revision: None metrics: - type: map_at_1 value: 39.359 - type: map_at_10 value: 63.966 - type: map_at_100 value: 64.87 - type: map_at_1000 value: 64.92599999999999 - type: map_at_3 value: 60.409 - type: map_at_5 value: 62.627 - type: mrr_at_1 value: 78.717 - type: mrr_at_10 value: 84.468 - type: mrr_at_100 value: 84.655 - type: mrr_at_1000 value: 84.661 - type: mrr_at_3 value: 83.554 - type: mrr_at_5 value: 84.133 - type: ndcg_at_1 value: 78.717 - type: ndcg_at_10 value: 72.03399999999999 - type: ndcg_at_100 value: 75.158 - type: ndcg_at_1000 value: 76.197 - type: ndcg_at_3 value: 67.049 - type: ndcg_at_5 value: 69.808 - type: precision_at_1 value: 78.717 - type: precision_at_10 value: 15.201 - type: precision_at_100 value: 1.764 - type: precision_at_1000 value: 0.19 - type: precision_at_3 value: 43.313 - type: precision_at_5 value: 28.165000000000003 - type: recall_at_1 value: 39.359 - type: recall_at_10 value: 76.003 - type: recall_at_100 value: 88.197 - type: recall_at_1000 value: 95.003 - type: recall_at_3 value: 64.97 - type: recall_at_5 value: 70.41199999999999 - task: type: Classification dataset: name: MTEB ImdbClassification type: mteb/imdb config: default split: test revision: 3d86128a09e091d6018b6d26cad27f2739fc2db7 metrics: - type: accuracy value: 92.83200000000001 - type: ap value: 89.33560571859861 - type: f1 value: 92.82322915005167 - task: type: Retrieval dataset: name: MTEB MSMARCO type: msmarco config: default split: dev revision: None metrics: - type: map_at_1 value: 21.983 - type: map_at_10 value: 34.259 - type: map_at_100 value: 35.432 - type: map_at_1000 value: 35.482 - type: map_at_3 value: 30.275999999999996 - type: map_at_5 value: 32.566 - type: mrr_at_1 value: 22.579 - type: mrr_at_10 value: 34.882999999999996 - type: mrr_at_100 value: 35.984 - type: mrr_at_1000 value: 36.028 - type: mrr_at_3 value: 30.964999999999996 - type: mrr_at_5 value: 33.245000000000005 - type: ndcg_at_1 value: 22.564 - type: ndcg_at_10 value: 41.258 - type: ndcg_at_100 value: 46.824 - type: ndcg_at_1000 value: 48.037 - type: ndcg_at_3 value: 33.17 - type: ndcg_at_5 value: 37.263000000000005 - type: precision_at_1 value: 22.564 - type: precision_at_10 value: 6.572 - type: precision_at_100 value: 0.935 - type: precision_at_1000 value: 0.104 - type: precision_at_3 value: 14.130999999999998 - type: precision_at_5 value: 10.544 - type: recall_at_1 value: 21.983 - type: recall_at_10 value: 62.775000000000006 - type: recall_at_100 value: 88.389 - type: recall_at_1000 value: 97.603 - type: recall_at_3 value: 40.878 - type: recall_at_5 value: 50.690000000000005 - task: type: Classification dataset: name: MTEB MTOPDomainClassification (en) type: mteb/mtop_domain config: en split: test revision: d80d48c1eb48d3562165c59d59d0034df9fff0bf metrics: - type: accuracy value: 93.95120839033288 - type: f1 value: 93.73824125055208 - task: type: Classification dataset: name: MTEB MTOPIntentClassification (en) type: mteb/mtop_intent config: en split: test revision: ae001d0e6b1228650b7bd1c2c65fb50ad11a8aba metrics: - type: accuracy value: 76.78978568171455 - type: f1 value: 57.50180552858304 - task: type: Classification dataset: name: MTEB MassiveIntentClassification (en) type: mteb/amazon_massive_intent config: en split: test revision: 31efe3c427b0bae9c22cbb560b8f15491cc6bed7 metrics: - type: accuracy value: 76.24411566913248 - type: f1 value: 74.37851403532832 - task: type: Classification dataset: name: MTEB MassiveScenarioClassification (en) type: mteb/amazon_massive_scenario config: en split: test revision: 7d571f92784cd94a019292a1f45445077d0ef634 metrics: - type: accuracy value: 79.94620040349699 - type: f1 value: 80.21293397970435 - task: type: Clustering dataset: name: MTEB MedrxivClusteringP2P type: mteb/medrxiv-clustering-p2p config: default split: test revision: e7a26af6f3ae46b30dde8737f02c07b1505bcc73 metrics: - type: v_measure value: 33.44403096245675 - task: type: Clustering dataset: name: MTEB MedrxivClusteringS2S type: mteb/medrxiv-clustering-s2s config: default split: test revision: 35191c8c0dca72d8ff3efcd72aa802307d469663 metrics: - type: v_measure value: 31.659594631336812 - task: type: Reranking dataset: name: MTEB MindSmallReranking type: mteb/mind_small config: default split: test revision: 3bdac13927fdc888b903db93b2ffdbd90b295a69 metrics: - type: map value: 32.53833075108798 - type: mrr value: 33.78840823218308 - task: type: Retrieval dataset: name: MTEB NFCorpus type: nfcorpus config: default split: test revision: None metrics: - type: map_at_1 value: 7.185999999999999 - type: map_at_10 value: 15.193999999999999 - type: map_at_100 value: 19.538 - type: map_at_1000 value: 21.178 - type: map_at_3 value: 11.208 - type: map_at_5 value: 12.745999999999999 - type: mrr_at_1 value: 48.916 - type: mrr_at_10 value: 58.141 - type: mrr_at_100 value: 58.656 - type: mrr_at_1000 value: 58.684999999999995 - type: mrr_at_3 value: 55.521 - type: mrr_at_5 value: 57.239 - type: ndcg_at_1 value: 47.059 - type: ndcg_at_10 value: 38.644 - type: ndcg_at_100 value: 36.272999999999996 - type: ndcg_at_1000 value: 44.996 - type: ndcg_at_3 value: 43.293 - type: ndcg_at_5 value: 40.819 - type: precision_at_1 value: 48.916 - type: precision_at_10 value: 28.607 - type: precision_at_100 value: 9.195 - type: precision_at_1000 value: 2.225 - type: precision_at_3 value: 40.454 - type: precision_at_5 value: 34.985 - type: recall_at_1 value: 7.185999999999999 - type: recall_at_10 value: 19.654 - type: recall_at_100 value: 37.224000000000004 - type: recall_at_1000 value: 68.663 - type: recall_at_3 value: 12.158 - type: recall_at_5 value: 14.674999999999999 - task: type: Retrieval dataset: name: MTEB NQ type: nq config: default split: test revision: None metrics: - type: map_at_1 value: 31.552000000000003 - type: map_at_10 value: 47.75 - type: map_at_100 value: 48.728 - type: map_at_1000 value: 48.754 - type: map_at_3 value: 43.156 - type: map_at_5 value: 45.883 - type: mrr_at_1 value: 35.66 - type: mrr_at_10 value: 50.269 - type: mrr_at_100 value: 50.974 - type: mrr_at_1000 value: 50.991 - type: mrr_at_3 value: 46.519 - type: mrr_at_5 value: 48.764 - type: ndcg_at_1 value: 35.632000000000005 - type: ndcg_at_10 value: 55.786 - type: ndcg_at_100 value: 59.748999999999995 - type: ndcg_at_1000 value: 60.339 - type: ndcg_at_3 value: 47.292 - type: ndcg_at_5 value: 51.766999999999996 - type: precision_at_1 value: 35.632000000000005 - type: precision_at_10 value: 9.267 - type: precision_at_100 value: 1.149 - type: precision_at_1000 value: 0.12 - type: precision_at_3 value: 21.601 - type: precision_at_5 value: 15.539 - type: recall_at_1 value: 31.552000000000003 - type: recall_at_10 value: 77.62400000000001 - type: recall_at_100 value: 94.527 - type: recall_at_1000 value: 98.919 - type: recall_at_3 value: 55.898 - type: recall_at_5 value: 66.121 - task: type: Retrieval dataset: name: MTEB QuoraRetrieval type: quora config: default split: test revision: None metrics: - type: map_at_1 value: 71.414 - type: map_at_10 value: 85.37400000000001 - type: map_at_100 value: 86.01100000000001 - type: map_at_1000 value: 86.027 - type: map_at_3 value: 82.562 - type: map_at_5 value: 84.284 - type: mrr_at_1 value: 82.24000000000001 - type: mrr_at_10 value: 88.225 - type: mrr_at_100 value: 88.324 - type: mrr_at_1000 value: 88.325 - type: mrr_at_3 value: 87.348 - type: mrr_at_5 value: 87.938 - type: ndcg_at_1 value: 82.24000000000001 - type: ndcg_at_10 value: 88.97699999999999 - type: ndcg_at_100 value: 90.16 - type: ndcg_at_1000 value: 90.236 - type: ndcg_at_3 value: 86.371 - type: ndcg_at_5 value: 87.746 - type: precision_at_1 value: 82.24000000000001 - type: precision_at_10 value: 13.481000000000002 - type: precision_at_100 value: 1.534 - type: precision_at_1000 value: 0.157 - type: precision_at_3 value: 37.86 - type: precision_at_5 value: 24.738 - type: recall_at_1 value: 71.414 - type: recall_at_10 value: 95.735 - type: recall_at_100 value: 99.696 - type: recall_at_1000 value: 99.979 - type: recall_at_3 value: 88.105 - type: recall_at_5 value: 92.17999999999999 - task: type: Clustering dataset: name: MTEB RedditClustering type: mteb/reddit-clustering config: default split: test revision: 24640382cdbf8abc73003fb0fa6d111a705499eb metrics: - type: v_measure value: 60.22146692057259 - task: type: Clustering dataset: name: MTEB RedditClusteringP2P type: mteb/reddit-clustering-p2p config: default split: test revision: 282350215ef01743dc01b456c7f5241fa8937f16 metrics: - type: v_measure value: 65.29273320614578 - task: type: Retrieval dataset: name: MTEB SCIDOCS type: scidocs config: default split: test revision: None metrics: - type: map_at_1 value: 5.023 - type: map_at_10 value: 14.161000000000001 - type: map_at_100 value: 16.68 - type: map_at_1000 value: 17.072000000000003 - type: map_at_3 value: 9.763 - type: map_at_5 value: 11.977 - type: mrr_at_1 value: 24.8 - type: mrr_at_10 value: 37.602999999999994 - type: mrr_at_100 value: 38.618 - type: mrr_at_1000 value: 38.659 - type: mrr_at_3 value: 34.117 - type: mrr_at_5 value: 36.082 - type: ndcg_at_1 value: 24.8 - type: ndcg_at_10 value: 23.316 - type: ndcg_at_100 value: 32.613 - type: ndcg_at_1000 value: 38.609 - type: ndcg_at_3 value: 21.697 - type: ndcg_at_5 value: 19.241 - type: precision_at_1 value: 24.8 - type: precision_at_10 value: 12.36 - type: precision_at_100 value: 2.593 - type: precision_at_1000 value: 0.402 - type: precision_at_3 value: 20.767 - type: precision_at_5 value: 17.34 - type: recall_at_1 value: 5.023 - type: recall_at_10 value: 25.069999999999997 - type: recall_at_100 value: 52.563 - type: recall_at_1000 value: 81.525 - type: recall_at_3 value: 12.613 - type: recall_at_5 value: 17.583 - task: type: STS dataset: name: MTEB SICK-R type: mteb/sickr-sts config: default split: test revision: a6ea5a8cab320b040a23452cc28066d9beae2cee metrics: - type: cos_sim_pearson value: 87.71506247604255 - type: cos_sim_spearman value: 82.91813463738802 - type: euclidean_pearson value: 85.5154616194479 - type: euclidean_spearman value: 82.91815254466314 - type: manhattan_pearson value: 85.5280917850374 - type: manhattan_spearman value: 82.92276537286398 - task: type: STS dataset: name: MTEB STS12 type: mteb/sts12-sts config: default split: test revision: a0d554a64d88156834ff5ae9920b964011b16384 metrics: - type: cos_sim_pearson value: 87.43772054228462 - type: cos_sim_spearman value: 78.75750601716682 - type: euclidean_pearson value: 85.76074482955764 - type: euclidean_spearman value: 78.75651057223058 - type: manhattan_pearson value: 85.73390291701668 - type: manhattan_spearman value: 78.72699385957797 - task: type: STS dataset: name: MTEB STS13 type: mteb/sts13-sts config: default split: test revision: 7e90230a92c190f1bf69ae9002b8cea547a64cca metrics: - type: cos_sim_pearson value: 89.58144067172472 - type: cos_sim_spearman value: 90.3524512966946 - type: euclidean_pearson value: 89.71365391594237 - type: euclidean_spearman value: 90.35239632843408 - type: manhattan_pearson value: 89.66905421746478 - type: manhattan_spearman value: 90.31508211683513 - task: type: STS dataset: name: MTEB STS14 type: mteb/sts14-sts config: default split: test revision: 6031580fec1f6af667f0bd2da0a551cf4f0b2375 metrics: - type: cos_sim_pearson value: 87.77692637102102 - type: cos_sim_spearman value: 85.45710562643485 - type: euclidean_pearson value: 87.42456979928723 - type: euclidean_spearman value: 85.45709386240908 - type: manhattan_pearson value: 87.40754529526272 - type: manhattan_spearman value: 85.44834854173303 - task: type: STS dataset: name: MTEB STS15 type: mteb/sts15-sts config: default split: test revision: ae752c7c21bf194d8b67fd573edf7ae58183cbe3 metrics: - type: cos_sim_pearson value: 88.28491331695997 - type: cos_sim_spearman value: 89.62037029566964 - type: euclidean_pearson value: 89.02479391362826 - type: euclidean_spearman value: 89.62036733618466 - type: manhattan_pearson value: 89.00394756040342 - type: manhattan_spearman value: 89.60867744215236 - task: type: STS dataset: name: MTEB STS16 type: mteb/sts16-sts config: default split: test revision: 4d8694f8f0e0100860b497b999b3dbed754a0513 metrics: - type: cos_sim_pearson value: 85.08911381280191 - type: cos_sim_spearman value: 86.5791780765767 - type: euclidean_pearson value: 86.16063473577861 - type: euclidean_spearman value: 86.57917745378766 - type: manhattan_pearson value: 86.13677924604175 - type: manhattan_spearman value: 86.56115615768685 - task: type: STS dataset: name: MTEB STS17 (en-en) type: mteb/sts17-crosslingual-sts config: en-en split: test revision: af5e6fb845001ecf41f4c1e033ce921939a2a68d metrics: - type: cos_sim_pearson value: 89.58029496205235 - type: cos_sim_spearman value: 89.49551253826998 - type: euclidean_pearson value: 90.13714840963748 - type: euclidean_spearman value: 89.49551253826998 - type: manhattan_pearson value: 90.13039633601363 - type: manhattan_spearman value: 89.4513453745516 - task: type: STS dataset: name: MTEB STS22 (en) type: mteb/sts22-crosslingual-sts config: en split: test revision: 6d1ba47164174a496b7fa5d3569dae26a6813b80 metrics: - type: cos_sim_pearson value: 69.01546399666435 - type: cos_sim_spearman value: 69.33824484595624 - type: euclidean_pearson value: 70.76511642998874 - type: euclidean_spearman value: 69.33824484595624 - type: manhattan_pearson value: 70.84320785047453 - type: manhattan_spearman value: 69.54233632223537 - task: type: STS dataset: name: MTEB STSBenchmark type: mteb/stsbenchmark-sts config: default split: test revision: b0fddb56ed78048fa8b90373c8a3cfc37b684831 metrics: - type: cos_sim_pearson value: 87.26389196390119 - type: cos_sim_spearman value: 89.09721478341385 - type: euclidean_pearson value: 88.97208685922517 - type: euclidean_spearman value: 89.09720927308881 - type: manhattan_pearson value: 88.97513670502573 - type: manhattan_spearman value: 89.07647853984004 - task: type: Reranking dataset: name: MTEB SciDocsRR type: mteb/scidocs-reranking config: default split: test revision: d3c5e1fc0b855ab6097bf1cda04dd73947d7caab metrics: - type: map value: 87.53075025771936 - type: mrr value: 96.24327651288436 - task: type: Retrieval dataset: name: MTEB SciFact type: scifact config: default split: test revision: None metrics: - type: map_at_1 value: 60.428000000000004 - type: map_at_10 value: 70.088 - type: map_at_100 value: 70.589 - type: map_at_1000 value: 70.614 - type: map_at_3 value: 67.191 - type: map_at_5 value: 68.515 - type: mrr_at_1 value: 63.333 - type: mrr_at_10 value: 71.13000000000001 - type: mrr_at_100 value: 71.545 - type: mrr_at_1000 value: 71.569 - type: mrr_at_3 value: 68.944 - type: mrr_at_5 value: 70.078 - type: ndcg_at_1 value: 63.333 - type: ndcg_at_10 value: 74.72800000000001 - type: ndcg_at_100 value: 76.64999999999999 - type: ndcg_at_1000 value: 77.176 - type: ndcg_at_3 value: 69.659 - type: ndcg_at_5 value: 71.626 - type: precision_at_1 value: 63.333 - type: precision_at_10 value: 10 - type: precision_at_100 value: 1.09 - type: precision_at_1000 value: 0.11299999999999999 - type: precision_at_3 value: 27.111 - type: precision_at_5 value: 17.666999999999998 - type: recall_at_1 value: 60.428000000000004 - type: recall_at_10 value: 87.98899999999999 - type: recall_at_100 value: 96.167 - type: recall_at_1000 value: 100 - type: recall_at_3 value: 74.006 - type: recall_at_5 value: 79.05 - task: type: PairClassification dataset: name: MTEB SprintDuplicateQuestions type: mteb/sprintduplicatequestions-pairclassification config: default split: test revision: d66bd1f72af766a5cc4b0ca5e00c162f89e8cc46 metrics: - type: cos_sim_accuracy value: 99.87326732673267 - type: cos_sim_ap value: 96.81770773701805 - type: cos_sim_f1 value: 93.6318407960199 - type: cos_sim_precision value: 93.16831683168317 - type: cos_sim_recall value: 94.1 - type: dot_accuracy value: 99.87326732673267 - type: dot_ap value: 96.8174218946665 - type: dot_f1 value: 93.6318407960199 - type: dot_precision value: 93.16831683168317 - type: dot_recall value: 94.1 - type: euclidean_accuracy value: 99.87326732673267 - type: euclidean_ap value: 96.81770773701807 - type: euclidean_f1 value: 93.6318407960199 - type: euclidean_precision value: 93.16831683168317 - type: euclidean_recall value: 94.1 - type: manhattan_accuracy value: 99.87227722772278 - type: manhattan_ap value: 96.83164126821747 - type: manhattan_f1 value: 93.54677338669335 - type: manhattan_precision value: 93.5935935935936 - type: manhattan_recall value: 93.5 - type: max_accuracy value: 99.87326732673267 - type: max_ap value: 96.83164126821747 - type: max_f1 value: 93.6318407960199 - task: type: Clustering dataset: name: MTEB StackExchangeClustering type: mteb/stackexchange-clustering config: default split: test revision: 6cbc1f7b2bc0622f2e39d2c77fa502909748c259 metrics: - type: v_measure value: 65.6212042420246 - task: type: Clustering dataset: name: MTEB StackExchangeClusteringP2P type: mteb/stackexchange-clustering-p2p config: default split: test revision: 815ca46b2622cec33ccafc3735d572c266efdb44 metrics: - type: v_measure value: 35.779230635982564 - task: type: Reranking dataset: name: MTEB StackOverflowDupQuestions type: mteb/stackoverflowdupquestions-reranking config: default split: test revision: e185fbe320c72810689fc5848eb6114e1ef5ec69 metrics: - type: map value: 55.217701909036286 - type: mrr value: 56.17658995416349 - task: type: Summarization dataset: name: MTEB SummEval type: mteb/summeval config: default split: test revision: cda12ad7615edc362dbf25a00fdd61d3b1eaf93c metrics: - type: cos_sim_pearson value: 30.954206018888453 - type: cos_sim_spearman value: 32.71062599450096 - type: dot_pearson value: 30.95420929056943 - type: dot_spearman value: 32.71062599450096 - task: type: Retrieval dataset: name: MTEB TRECCOVID type: trec-covid config: default split: test revision: None metrics: - type: map_at_1 value: 0.22699999999999998 - type: map_at_10 value: 1.924 - type: map_at_100 value: 10.525 - type: map_at_1000 value: 24.973 - type: map_at_3 value: 0.638 - type: map_at_5 value: 1.0659999999999998 - type: mrr_at_1 value: 84 - type: mrr_at_10 value: 91.067 - type: mrr_at_100 value: 91.067 - type: mrr_at_1000 value: 91.067 - type: mrr_at_3 value: 90.667 - type: mrr_at_5 value: 91.067 - type: ndcg_at_1 value: 81 - type: ndcg_at_10 value: 75.566 - type: ndcg_at_100 value: 56.387 - type: ndcg_at_1000 value: 49.834 - type: ndcg_at_3 value: 80.899 - type: ndcg_at_5 value: 80.75099999999999 - type: precision_at_1 value: 84 - type: precision_at_10 value: 79 - type: precision_at_100 value: 57.56 - type: precision_at_1000 value: 21.8 - type: precision_at_3 value: 84.667 - type: precision_at_5 value: 85.2 - type: recall_at_1 value: 0.22699999999999998 - type: recall_at_10 value: 2.136 - type: recall_at_100 value: 13.861 - type: recall_at_1000 value: 46.299 - type: recall_at_3 value: 0.6649999999999999 - type: recall_at_5 value: 1.145 - task: type: Retrieval dataset: name: MTEB Touche2020 type: webis-touche2020 config: default split: test revision: None metrics: - type: map_at_1 value: 2.752 - type: map_at_10 value: 9.951 - type: map_at_100 value: 16.794999999999998 - type: map_at_1000 value: 18.251 - type: map_at_3 value: 5.288 - type: map_at_5 value: 6.954000000000001 - type: mrr_at_1 value: 38.775999999999996 - type: mrr_at_10 value: 50.458000000000006 - type: mrr_at_100 value: 51.324999999999996 - type: mrr_at_1000 value: 51.339999999999996 - type: mrr_at_3 value: 46.939 - type: mrr_at_5 value: 47.857 - type: ndcg_at_1 value: 36.735 - type: ndcg_at_10 value: 25.198999999999998 - type: ndcg_at_100 value: 37.938 - type: ndcg_at_1000 value: 49.145 - type: ndcg_at_3 value: 29.348000000000003 - type: ndcg_at_5 value: 25.804 - type: precision_at_1 value: 38.775999999999996 - type: precision_at_10 value: 22.041 - type: precision_at_100 value: 7.939 - type: precision_at_1000 value: 1.555 - type: precision_at_3 value: 29.932 - type: precision_at_5 value: 24.490000000000002 - type: recall_at_1 value: 2.752 - type: recall_at_10 value: 16.197 - type: recall_at_100 value: 49.166 - type: recall_at_1000 value: 84.18900000000001 - type: recall_at_3 value: 6.438000000000001 - type: recall_at_5 value: 9.093 - task: type: Classification dataset: name: MTEB ToxicConversationsClassification type: mteb/toxic_conversations_50k config: default split: test revision: d7c0de2777da35d6aae2200a62c6e0e5af397c4c metrics: - type: accuracy value: 71.47980000000001 - type: ap value: 14.605194452178754 - type: f1 value: 55.07362924988948 - task: type: Classification dataset: name: MTEB TweetSentimentExtractionClassification type: mteb/tweet_sentiment_extraction config: default split: test revision: d604517c81ca91fe16a244d1248fc021f9ecee7a metrics: - type: accuracy value: 59.708545557441994 - type: f1 value: 60.04751270975683 - task: type: Clustering dataset: name: MTEB TwentyNewsgroupsClustering type: mteb/twentynewsgroups-clustering config: default split: test revision: 6125ec4e24fa026cec8a478383ee943acfbd5449 metrics: - type: v_measure value: 53.21105960597211 - task: type: PairClassification dataset: name: MTEB TwitterSemEval2015 type: mteb/twittersemeval2015-pairclassification config: default split: test revision: 70970daeab8776df92f5ea462b6173c0b46fd2d1 metrics: - type: cos_sim_accuracy value: 87.58419264469214 - type: cos_sim_ap value: 78.55300004517404 - type: cos_sim_f1 value: 71.49673530889001 - type: cos_sim_precision value: 68.20795400095831 - type: cos_sim_recall value: 75.11873350923483 - type: dot_accuracy value: 87.58419264469214 - type: dot_ap value: 78.55297659559511 - type: dot_f1 value: 71.49673530889001 - type: dot_precision value: 68.20795400095831 - type: dot_recall value: 75.11873350923483 - type: euclidean_accuracy value: 87.58419264469214 - type: euclidean_ap value: 78.55300477331477 - type: euclidean_f1 value: 71.49673530889001 - type: euclidean_precision value: 68.20795400095831 - type: euclidean_recall value: 75.11873350923483 - type: manhattan_accuracy value: 87.5663110210407 - type: manhattan_ap value: 78.49982050876562 - type: manhattan_f1 value: 71.35488740722104 - type: manhattan_precision value: 68.18946862226497 - type: manhattan_recall value: 74.82849604221636 - type: max_accuracy value: 87.58419264469214 - type: max_ap value: 78.55300477331477 - type: max_f1 value: 71.49673530889001 - task: type: PairClassification dataset: name: MTEB TwitterURLCorpus type: mteb/twitterurlcorpus-pairclassification config: default split: test revision: 8b6510b0b1fa4e4c4f879467980e9be563ec1cdf metrics: - type: cos_sim_accuracy value: 89.09069740365584 - type: cos_sim_ap value: 86.22749303724757 - type: cos_sim_f1 value: 78.36863452005407 - type: cos_sim_precision value: 76.49560117302053 - type: cos_sim_recall value: 80.33569448721897 - type: dot_accuracy value: 89.09069740365584 - type: dot_ap value: 86.22750233655673 - type: dot_f1 value: 78.36863452005407 - type: dot_precision value: 76.49560117302053 - type: dot_recall value: 80.33569448721897 - type: euclidean_accuracy value: 89.09069740365584 - type: euclidean_ap value: 86.22749355597347 - type: euclidean_f1 value: 78.36863452005407 - type: euclidean_precision value: 76.49560117302053 - type: euclidean_recall value: 80.33569448721897 - type: manhattan_accuracy value: 89.08293553770326 - type: manhattan_ap value: 86.21913616084771 - type: manhattan_f1 value: 78.3907031479847 - type: manhattan_precision value: 75.0352013517319 - type: manhattan_recall value: 82.06036341238065 - type: max_accuracy value: 89.09069740365584 - type: max_ap value: 86.22750233655673 - type: max_f1 value: 78.3907031479847 --- # smcleod/mxbai-embed-large-v1-Q8_0-GGUF This model was converted to GGUF format from [`mixedbread-ai/mxbai-embed-large-v1`](https://huggingface.co/mixedbread-ai/mxbai-embed-large-v1) using llama.cpp via the ggml.ai's [GGUF-my-repo](https://huggingface.co/spaces/ggml-org/gguf-my-repo) space. Refer to the [original model card](https://huggingface.co/mixedbread-ai/mxbai-embed-large-v1) for more details on the model. ## Use with llama.cpp Install llama.cpp through brew (works on Mac and Linux) ```bash brew install llama.cpp ``` Invoke the llama.cpp server or the CLI. ### CLI: ```bash llama-cli --hf-repo smcleod/mxbai-embed-large-v1-Q8_0-GGUF --hf-file mxbai-embed-large-v1-q8_0.gguf -p "The meaning to life and the universe is" ``` ### Server: ```bash llama-server --hf-repo smcleod/mxbai-embed-large-v1-Q8_0-GGUF --hf-file mxbai-embed-large-v1-q8_0.gguf -c 2048 ``` Note: You can also use this checkpoint directly through the [usage steps](https://github.com/ggerganov/llama.cpp?tab=readme-ov-file#usage) listed in the Llama.cpp repo as well. Step 1: Clone llama.cpp from GitHub. ``` git clone https://github.com/ggerganov/llama.cpp ``` Step 2: Move into the llama.cpp folder and build it with `LLAMA_CURL=1` flag along with other hardware-specific flags (for ex: LLAMA_CUDA=1 for Nvidia GPUs on Linux). ``` cd llama.cpp && LLAMA_CURL=1 make ``` Step 3: Run inference through the main binary. ``` ./llama-cli --hf-repo smcleod/mxbai-embed-large-v1-Q8_0-GGUF --hf-file mxbai-embed-large-v1-q8_0.gguf -p "The meaning to life and the universe is" ``` or ``` ./llama-server --hf-repo smcleod/mxbai-embed-large-v1-Q8_0-GGUF --hf-file mxbai-embed-large-v1-q8_0.gguf -c 2048 ```
[ "SUMMARIZATION" ]
[ "BIOSSES", "SCIFACT" ]
strongpear/M3-retriever-MEDICAL
strongpear
sentence-similarity
[ "sentence-transformers", "safetensors", "xlm-roberta", "sentence-similarity", "feature-extraction", "generated_from_trainer", "dataset_size:123750", "loss:MultipleNegativesRankingLoss", "arxiv:1908.10084", "arxiv:1705.00652", "base_model:BAAI/bge-m3", "base_model:finetune:BAAI/bge-m3", "autotrain_compatible", "text-embeddings-inference", "endpoints_compatible", "region:us" ]
2024-12-12T17:05:00
2024-12-12T17:06:14
108
0
--- base_model: BAAI/bge-m3 library_name: sentence-transformers pipeline_tag: sentence-similarity tags: - sentence-transformers - sentence-similarity - feature-extraction - generated_from_trainer - dataset_size:123750 - loss:MultipleNegativesRankingLoss widget: - source_sentence: Kết quả nghiên cứu cho thấy điều gì về sự chuyên môn hóa chức năng ở bán cầu não của trẻ em suy giảm khả năng học tập và ngôn ngữ? sentences: - Cấu trúc não trên hình ảnh cộng hưởng từ ở trẻ suy giảm khả năng học tập và ngôn ngữ. Sử dụng hình ảnh cộng hưởng từ, 20 trẻ em bị suy giảm khả năng học tập và ngôn ngữ được so sánh với 12 đối tượng đối chứng bình thường. Cấu trúc tổng thể của não đặc biệt bình thường ở nhóm suy giảm khả năng học tập và ngôn ngữ. Hình thái bán tự động được sử dụng để đo thể tích bán cầu và sự bất đối xứng của não ở sáu vùng não. Thể tích của vùng perisylvian phía sau bên trái giảm đáng kể ở trẻ suy giảm khả năng học tập và ngôn ngữ. Sự bất đối xứng ở vùng não dưới trước và trên sau cũng khác biệt đáng kể ở nhóm này. Kết quả so sánh nhóm mô tả về thể tích ước tính của các cấu trúc chất xám khác của não làm tăng khả năng một số trẻ bị suy giảm khả năng ngôn ngữ và khả năng học tập có thể bị giảm thêm thể tích ở các cấu trúc vỏ não và dưới vỏ não. Kết quả cho thấy sự chuyên môn hóa chức năng ở bán cầu có thể là bất thường ở nhóm dân số này. - để đánh giá sự xâm nhập đường thở của bạch cầu ái toan trong phản ứng hen suyễn của chuột cống nâu Na Uy được mẫn cảm với ovalbumin, diễn biến thời gian của sự xâm nhập bạch cầu ái toan và sức đề kháng hô hấp rrs sau khi thử thách ovalbumin được đo lường, hiệu quả của việc điều trị bằng kháng thể đơn dòng chống lại icam và cd cuối cùng đã được nghiên cứu biểu hiện icam và cd trong đường thở đã được nghiên cứu tất cả chuột đều cho thấy tỷ lệ rrs tăng nhiều giờ sau khi thử thách với albumin ovalbumin cho thấy phản ứng hen muộn. Những con vật mắc bệnh lar có số lượng bạch cầu ái toan cao hơn những con có IAR iar và ở những con vật nhạy cảm nhưng không bị thách thức, chuột được điều trị bằng kháng thể cho thấy mức tăng rrs nhỏ hơn đáng kể và số lượng bạch cầu ái toan thấp hơn so với hóa mô miễn dịch ở động vật đối chứng IF trong đường thở được thực hiện. Phản ứng miễn dịch icam dương tính trên cả biểu mô và nội mô mạch máu của phần tr và trên biểu hiện icam nội mô mạch máu phổi được điều chỉnh tăng thách thức T3. Các tế bào dương tính cd trong các phần của tr và phổi tăng lên sau thử thách, kết quả của chúng tôi cho thấy sự thâm nhập của bạch cầu ái toan rất quan trọng trong sự phát triển của ấu trùng và việc điều trị bằng chất đối kháng của icam và cd có thể mang lại một phương pháp điều trị để giảm các triệu chứng hen suyễn - pH của chất trào ngược vào thực quản là một yếu tố quan trọng không chỉ đối với RE mà còn đối với barretts thực quản và OD của CA thực quản barretts. Mặt khác, nhiễm vi khuẩn H pylori được cho là ngăn ngừa viêm thực quản trào ngược và barretts thực quản bằng cách gây ra viêm teo dạ dày. lần lượt giảm GAS hơn nữa việc bảo tồn GAS có thể quan trọng đối với sự phát triển của CA nối dạ dày thực quản bao gồm cả barretts EC bất kể tình trạng nhiễm H pylori. Sự gia tăng tiết axit dạ dày ở dân số Nhật Bản đã được dự đoán dựa trên tỷ lệ nhiễm H pylori giảm và Việc phương Tây hóa thói quen ăn uống ở Nhật Bản điều này có thể dẫn đến sự gia tăng tỷ lệ mắc bệnh ung thư thực quản Barretts ở Nhật Bản trong tương lai - source_sentence: Tại sao việc điều trị viêm tiểu phế quản sai cách lại nguy hiểm cho trẻ? sentences: - 'Mẫu máu để xét nghiệm lipid máu thường được lấy ở tĩnh mạch hoặc mao mạch trong điều kiện đói, nghĩa là bệnh nhân cần được nhịn ăn khoảng 9-12 giờ trước khi lấy máu. 1. Sinh học của lipid và lipoprotein Lipid là một nhóm chất béo và các chất giống như chất béo, là thành phần quan trọng của tế bào và nguồn năng lượng. Hai lipid quan trọng là cholesterol và triglyceride, được vận chuyển trong máu bởi các hạt lipoprotein. Các hạt lipid được cấu tạo bới các phân tử protein, cholesterol, triglyceride và phospholipid và được phân loại theo tỷ trọng thành các lipoprotein tỷ trọng cao (HDL), lipoprotein tỷ trọng thấp (LDL) và lipoprotein tỷ trọng rất thấp (VLDL). Cholesterol có thể được tổng hợp trong cơ thể từ các mẩu 2C (nội sinh) hoặc được đưa từ ngoài và qua đường ăn uống. Việc ăn quá nhiều thực phẩm chứa nhiều chất béo hoặc yếu tố di truyền có thể làm tăng mức độ cholesterol trong máu. Lượng cholesterol dư thừa có thể được lắng đọng trong các mảng bám trên thành mạch máu, có thể gây hẹp lòng mạch, ngăn cản sự lưu thông của máu, dẫn đến xơ cứng động mạch (xơ vữa động mạch) và làm tăng nguy cơ mắc nhiều bệnh, gồm bệnh tim mạch và đột quỵ. Một mức độ triglyceride cao trong máu cũng có thể làm tăng nguy cơ phát triển bệnh tim mạch (CVD). 2. Sử dụng Các xét nghiệm lipid, lipoprotein được sử dụng để phát hiện, chẩn đoán, theo dõi điều trị và tiên lượng các rối loạn chuyển hóa lipid trong nhiều bệnh, chẳng hạn như bệnh tim mạch (CHD), đái tháo đường (DM), hội chứng chuyển hóa (MS), bệnh thận mạn (CKD), ... Các thông số lipid, lipoprotein và tỷ số của chúng được sử dụng trong lâm sàng gồm: 1) Triglycerides (TG); 2) Cholesterol toàn phần (total cholesterol: TC); 3) Cholesterol lipoprotein tỷ trọng cao (HDL-C); 4) Cholesterol lipoprotein tỷ trọng thấp (LDL-C); 5) Non-HDL-C; 6) Tỷ số TG/HDL-C; 7) Tỷ số TC/HDL-C; 8) Tỷ số LDL/HDL-C; 9) Tỷ số non-HDL/HDL-C. 3. Chỉ định xét nghiệm lipid máu Người trưởng thành khỏe mạnh không có các yếu tố nguy cơ mắc bệnh tim khác nên được chỉ định các xét nghiệm lipid lúc đói mỗi 4 đến 6 năm một lần. Nếu kết quả kiểm tra lipid máu bất thường, cần được quản lý, xét nghiệm thường xuyên hơn với bệnh án đầy đủ.' - Việc sử dụng ống dẫn thần kinh nhân tạo chứa các tế bào schwann khả thi là một trong những chiến lược hứa hẹn nhất để sửa chữa tổn thương PN nhằm chế tạo một ống dẫn thần kinh hiệu quả có cấu trúc vi mô và môi trường bên trong thuận lợi hơn trong việc tái tạo thần kinh so với các kỹ thuật hiện có bằng kỹ thuật d schwann CC ba chiều mới. MG và DRG drg được phát triển ống dẫn thần kinh của các tế bào schwann được sắp xếp d được chế tạo bằng cách gieo trực tiếp drg mới thu hoạch vào các ống silicon chứa đầy matrigel có đường kính trong mm mm và nuôi cấy trong ống nghiệm SC trong nhiều tuần quá trình tái tạo dây thần kinh của ống dẫn tế bào schwann nuôi cấy d nhóm n được đánh giá bằng cách sử dụng khuyết tật dây thần kinh tọa của chuột spraguedawley mm và được so sánh với khuyết tật của ống dẫn silicone chứa đầy matrigel và SC được điều chế bằng phương pháp SC đơn giản thông thường nhóm d ống dẫn hai chiều n sau nhiều tuần chức năng thần kinh tọa được đánh giá bằng phân tích dáng đi và SFI sfi và hình thái học của ống dẫn thần kinh và các mô bẩm sinh của dây thần kinh tọa đã được kiểm tra bằng cách sử dụng máy phân tích hình ảnh và phương pháp điện hiển vi góc nghiêng của sfi và mắt cá chân trong PET DUE là độ độ trong nhóm ống dẫn d n và độ trong nhóm ống dẫn d n nhóm có bao myelin sợi trục nằm trong nhóm ống dẫn d và trong nhóm ống dẫn d trong electron truyền TSM T0, nhóm ống dẫn d cho thấy SF thần kinh có myelin phong phú hơn với collagen ngoại bào dày và được tổ chức tốt hơn so với nhóm ống dẫn d và gân GM và BF trong ống dẫn d nhóm ít bị teo hơn và giảm tình trạng xơ hóa với ít thâm nhiễm mỡ hơn so với nhóm ống dẫn d. Một kỹ thuật nuôi cấy tế bào d schwann mới đã được thiết lập và ống dẫn thần kinh được chế tạo bằng kỹ thuật này cho thấy khả năng tái tạo thần kinh được cải thiện nhiều so với ống silicone chứa đầy matrigel và SC được điều chế từ phương pháp nuôi cấy đơn giản thông thường - "– Cho trẻ viêm tiểu phế quản tái khám theo đúng lịch hẹn của bác sĩ đã dặn.\n\ \ So với các bệnh về hô hấp thông thường, bệnh viêm tiểu phế quản ở trẻ em có\ \ triệu chứng đặc thù là biểu hiện khò khè, khó thở. Nếu điều trị sai cách khiến\ \ triệu chứng trở nặng, trẻ có nguy cơ bị suy hô hấp và kéo theo các biến chứng\ \ nguy hiểm khác như: tràn khí màng phổi, viêm phổi, xẹp phổi, hen phế quản… Do\ \ đó, phụ huynh chớ chủ quan mà nên cho bé đi khám kịp thời để được bác sĩ tư\ \ vấn phác đồ điều trị phù hợp, đảm bảo hiệu quả và an toàn nhé." - source_sentence: Nguy cơ bị ung thư phổi có liên quan như thế nào đến việc hút thuốc lá điện tử? sentences: - "– Kết hợp những loại đồ ăn chung không phù hợp ví dụ như ăn đồ quá nóng cùng\ \ lúc với đồ quá lạnh hoặc ăn đồ sống lẫn với đồ chín,…\n – Uống nhiều rượu bia.\n\ \ – Những thói quen ăn uống không tốt như bỏ bữa, ăn quá nhanh, ăn uống tùy ý\ \ không đúng giờ, ăn đêm, ăn quá no hoặc để bụng quá đói trong 1 bữa,…\n Ăn uống\ \ không khoa học là nguyên nhân phổ biến gây ra cơn đau dạ dày về đêm.\n 2.2.\ \ Do căng thẳng\n Hiện tượng đau dạ dày về ban đêm cũng có thể xảy ra nếu bạn\ \ thường xuyên rơi vào trạng thái căng thẳng, mệt mỏi trong thời gian dài. Khi\ \ căng thẳng, dạ dày phải hoạt động nhiều hơn bình thường và tiết ra nhiều acid\ \ hơn. Điều này dễ khiến niêm mạc của dạ dày chịu nhiều tổn thương. Đây cũng là\ \ lý do vì sao khi stress bạn thường gặp phải các vấn đề tiêu hóa như chán ăn,\ \ ăn không ngon và trong đó có những cơn đau bụng âm ỉ về buổi đêm.\n 2.3. Nguyên\ \ nhân bệnh lý ở dạ dày\n Đây là nguyên nhân trực tiếp dẫn đến những cơn đau bụng\ \ về đêm. Các bệnh dạ dày thường gặp dẫn đến cơn đau về đêm bao gồm:\n – Viêm\ \ dạ dày\n – Loét dạ dày\n – Trào ngược dạ dày thực quản\n – Viêm hành tá tràng\n\ \ – Hội chứng ruột kích thích (IBS)\n – Bệnh Crohn\n – Thậm chí là ung thư\n 2.4.\ \ Đau dạ dày đêm đến từ bệnh lý khác ngoài đường tiêu hóa\n Một số bệnh lý ngoài\ \ đường tiêu hóa sau đây có thể là nguyên nhân gây đau dạ dày lúc về đêm:\n –\ \ Sỏi mật: Đây cũng là nguyên nhân thường gặp khi bệnh nhân thăm khám với triệu\ \ chứng đau dạ dày ban đêm. Sỏi mật bị tắt tại ống mật gây ra những cơn đau quặn\ \ dữ dội liên tục ở phía dạ dày. Cơn đau có xu hướng gia tăng sau một bữa ăn với\ \ nhiều chất béo và “hoành hành” khi bạn đã chìm vào giấc ngủ. Cơn đau có thể\ \ kèm theo cảm giác buồn nôn, nôn, sốt cao, người bệnh bị vàng da vàng mắt và\ \ phân có màu trắng.\n – Sỏi thận: Khi viên sỏi di chuyển và đi vào niệu quản\ \ thì bệnh nhân sẽ gặp phải cơn đau nhói đột ngột ở vùng lưng. Sau đó, cơn đau\ \ nhanh chóng lan đến dạ dày và cả vùng bụng." - Nghiên cứu hiện tại đã so sánh hai hệ thống phân loại khác nhau của bệnh nhân mắc cvid suy giảm miễn dịch VL thông thường, một dựa trên sinh tổng hợp globulin miễn dịch trong ống nghiệm và một hệ thống khác dựa trên số lượng tế bào cdnaïve. Các tế bào đơn nhân máu ngoại vi (pbmcs) được phân lập từ các bệnh nhân cvid và đối chứng khỏe mạnh mà chúng được kích thích cho Sự tiết ra igm và igg T3 kích thích bằng túi SAC i khi bổ sung interleukin il hoặc với các tập hợp tế bào T mitogen t pokeweed được ước tính bằng phương pháp đo tế bào dòng chảy bởi hệ thống đầu tiên. igm nhưng o sự trao đổi gen nhẹ giữa các quần thể trên cơ sở T0 của cấu trúc di truyền quần thể và quần thể gongliu có độ đa dạng di truyền cao nhất cần được ưu tiên cao trong việc bảo tồn nguồn gen malus stirersii trong bảo tồn nguồn gen tại chỗ - "Nguy cơ bị ung thư phổi liên quan đến hệ thống phân phối điện tử (ví dụ: thuốc\ \ lá điện tử) vẫn còn được xác định, mặc dù người ta cho rằng các sản phẩm của\ \ quá trình đốt cháy thuốc lá là các chất gây ung thư chính.\nNguy cơ bị ung thư\ \ sẽ giảm sau khi ngừng hút thuốc, nhưng không bao giờ trở về mức nguy cơ ở lần\ \ khám ban đầu ở những người không bao giờ hút thuốc. Có khoảng 15 đến 20% những\ \ người bị ung thư phổi chưa bao giờ hút thuốc hoặc hút thuốc rất ít.\nLiệu có\ \ hay không và lượng phơi nhiễm với radon trong các hộ gia đình là bao nhiêu để\ \ dẫn đến tăng nguy cơ bị ung thư phổi thì vẫn còn nhiều tranh cãi.\nViêm mạn\ \ tính làm tăng nguy cơ mắc nhiều bệnh ung thư, bao gồm cả ung thư phổi. Ví dụ:\ \ COPD (bệnh phổi tắc nghẽn mạn tính), thiếu hụt alpha-1 antitrypsin và xơ phổi\ \ làm tăng độ nhạy cảm với ung thư phổi. Ở những bệnh nhân có sẹo ở phổi do các\ \ bệnh phổi khác (ví dụ: bệnh lao) có khả năng tăng nguy cơ bị ung thư phổi. Ngoài\ \ ra, những người hút thuốc có hoạt tính sử dụng chất bổ sung beta-carotene có\ \ thể có nguy cơ phát triển ung thư phổi. \n \nĐánh giá rủi ro ung thư phổi ở\ \ những người đang hút thuốc hiện tại và quá khứ (6 tuổi)\nCác tế bào biểu mô\ \ hô hấp đòi hỏi phải tiếp xúc kéo dài với tác nhân gây ung thư và tích tụ nhiều\ \ đột biến di truyền trước khi trở thành ung thư phổi (một hiệu ứng gọi là ung\ \ thư trường diễn). \nỞ một số bệnh nhân bị ung thư phổi, các đột biến thứ phát\ \ hoặc bổ sung ở các gen kích thích sự phát triển của tế bào (K-ras, MYC) gây\ \ ra những bất thường trong receptor nhận tín hiệu của yếu tố tăng trưởng (EGFR,\ \ HER2/neu) và ức chế quá trình chết của tế bào, góp phần làm tăng sự phát triển\ \ không kiểm soát các tế bào bất thường. Ngoài ra, các đột biến ức chế gen ức\ \ chế khối u (ví dụ: p53, APC) có thể dẫn đến ung thư. Các đột biến khác có thể\ \ là nguyên nhân bao gồm chuyển vị EML-4-ALK và các đột biến ở ROS-1, BRAF và\ \ PI3KCA. Các gen là nguyên nhân gây ung thư phổi được gọi là đột biến thúc đẩy\ \ gen sinh ung thư." - source_sentence: Tại sao người bệnh nên thăm khám sớm khi có triệu chứng thoái hóa cột sống? sentences: - "Thời điểm tiêm vắc xin phế cầu cho trẻ thường bắt đầu khi trẻ đủ từ 6 tuần tuổi\ \ trở lên. Trẻ sẽ được tiêm ở khu vực cơ delta của cánh tay hoặc mặt trước của\ \ đùi. Sau đây là những thời điểm cụ thể mà các cha mẹ cần nắm rõ để lưu ý đưa\ \ trẻ đi tiêm phòng phế cầu đúng lịch:\n 2.1. Tiêm phế cầu cho bé từ 6 tuần đến\ \ 6 tháng tuổi\n Trẻ sơ sinh từ 6 tuần đến 6 tháng tuổi được khuyến cáo áp dụng\ \ 1 trong 2 liệu trình tiêm vắc xin như sau:\n Tiêm 3 liều cơ bản: Mũi tiêm đầu\ \ tiên sẽ được thực hiện khi trẻ tròn mốc 2 tháng tuổi. Mũi thứ 2 sẽ cách mũi\ \ đầu 1 tháng và mũi cuối cùng cách mũi thứ 2 sau 1 tháng. Trẻ sẽ tiêm mũi nhắc\ \ lại sau 6 tháng kể từ ngày hoàn thành mũi thứ 3.\n Tiêm 2 liều cơ bản: Mũi tiêm\ \ đầu tiên được áp dụng khi trẻ đủ 2 tháng tuổi. Mũi tiêm tiếp theo cách mũi đầu\ \ sau 2 tháng.\n Liều tiêm nhắc lại sau ít nhất 6 tháng kể từ ngày tiêm mũi cơ\ \ bản cuối cùng.\n 2.2. Tiêm vắc xin phế cầu cho trẻ sinh non\n Đối với các bé\ \ sinh non khi tiêm vắc xin phế cầu cần phải tuân theo chỉ định của bác sĩ chuyên\ \ khoa. Thay vì áp dụng lịch tiêm cơ bản như trên, các bé sinh non cần được tiêm\ \ đủ 4 liều vắc xin phế cầu, mỗi liều 0,5ml.\n Theo đó, 3 mũi tiêm đầu tiên cũng\ \ sẽ được chỉ định tiêm khi trẻ đủ 2 tháng tuổi. Các mũi 2 và mũi 3 cách nhau\ \ 1 tháng. Riêng mũi 4 áp dụng tiêm sau 6 tháng tiếp theo.\n 2.3. Tiêm vắc xin\ \ cho bé từ 7 – 11 tháng, chưa từng tiêm vắc xin phòng phế cầu khuẩn\n Trẻ nhỏ\ \ từ 7 đến 11 tháng tuổi sẽ tiêm theo liệu trình 2 mũi cơ bản, khoảng cách giữa\ \ các mũi tiêm là 1 tháng. Mũi nhắc lại sẽ tiêm khi trẻ trên 1 tuổi hoặc có thể\ \ tiêm cách mũi thứ 2 khoảng 2 tháng.\n 2.4. Tiêm vắc xin cho bé đủ 12 đến 23\ \ tháng tuổi\n Liều tiêm cho bé từ 12 tháng đến dưới 2 tuổi là 2 liều, mỗi liều\ \ tiêm 0,5ml. Khoảng cách giữa 2 liều tiêm tối thiểu 2 tháng." - "Triệu chứng thoái hóa cột sống theo từng vị tríTriệu chứng thoái hóa cột sống\ \ thường gặp là đau nhức xương khớp. Tùy thuộc vào các vị trí nhất định, người\ \ bệnh sẽ có biểu hiện và cảm nhận không giống nhau. Khi xuất hiện các triệu chứng\ \ nghi ngờ thoái hóa, người bệnh nên chủ động thăm khám sớm để được điều trị kịp\ \ thời.\n Triệu chứng thoái hóa cột sống thường gặp là đau nhức xương khớp. Tùy\ \ thuộc vào các vị trí nhất định, người bệnh sẽ có biểu hiện và cảm nhận không\ \ giống nhau. Khi xuất hiện các triệu chứng nghi ngờ thoái hóa, người bệnh nên\ \ chủ động thăm khám sớm để được điều trị kịp thời.\n 1. Thoái hóa cột sống là\ \ gì?\n Thoái hóa đốt sống là tình trạng lớp sụn khớp bị hao mòn dần dần, trong\ \ quá trình hoạt động thể chất, các đầu đốt sống sẽ cọ xát trực tiếp vào nhau\ \ gây viêm nhiễm, dẫn đến màng hoạt dịch sưng tấy và dịch khớp bị khô do dịch\ \ khớp bị hạn chế. bài tiết. Ngoài ra, ma sát ở các đầu xương cũng góp phần hình\ \ thành các gai xương tại đây. Các gai xương phát triển quá mức cọ sát vào đốt\ \ sống, rễ thần kinh và các mô mềm xung quanh.\n Thoái hóa đốt sống là tình trạng\ \ lớp sụn khớp bị hao mòn dần dần, cọ xát vào nhau gây viêm nhiễm.\n 2. Triệu\ \ chứng thoái hóa cột sống theo vị trí khởi phát\n Hầu hết bệnh nhân đều cảm thấy\ \ đau và khó chịu. Cơn đau ở mỗi trường hợp sẽ khác nhau tùy theo vị trí đốt sống\ \ bị thoái hóa, cụ thể như:\n 2.1. Triệu chứng thoái hóa cột sống thắt lưng cần\ \ lưu ý\n Triệu chứng của bệnh ở giai đoạn này không rõ ràng, thường chỉ đau nhẹ\ \ hoặc không đau, chỉ có cảm giác đau nhẹ ở lưng, do sụn khớp mới bắt đầu có dấu\ \ hiệu bào mòn nên rất dễ nhầm lẫn với bệnh đau lưng đơn thuần. nỗi đau. Vì vậy,\ \ có xu hướng chủ quan hướng tới các giai đoạn bệnh nặng hơn.\n Tiếp theo là cơn\ \ đau xuất hiện dưới dạng đau âm ỉ, đau khi làm việc và biến mất khi nghỉ ngơi,\ \ đặc biệt là khi nằm trên sàn cứng (nệm cứng). Đồng thời có dấu hiệu giảm khả\ \ năng vận động, thường xuyên bị đau nhức, khó chịu khi mang vác vật nặng." - "– Chảy máu âm đạo bất thường\n Tình trạng thai ở ngoài tử cung có thể chẩn đoán\ \ thông qua siêu âm\n Tình trạng mang thai ngoài tử cung nếu có các dấu hiệu nghi\ \ ngờ bản thân mang thai ở ngoài tử cung, chị em cần đến các cơ sở y tế chuyên\ \ khoa để được các bác sĩ chẩn đoán và đưa ra các chỉ định kịp thời.\n Chẩn đoán\ \ thai ngoài tử cung thường sử dụng các phương pháp sau:\n – Thử thai: Việc kiểm\ \ tra nồng độ hormone hCG (human chorionic gonadotropin) trong máu có thể cung\ \ cấp thông tin quan trọng và bước đầu kiểm tra được chị em có đang mang thai\ \ hay không,\n – Siêu âm thai: Thông qua siêu âm để bác sĩ xác định vị trí, sự\ \ phát triển của thai ngoài tử cung và xác nhận chẩn đoán một cách chính xác.\n\ \ – Xét nghiệm máu có thể được sử dụng để kiểm tra mức độ hCG và các yếu tố khác\ \ trong máu, giúp chẩn đoán thai ngoài tử cung\n Ngoài ra, bác sĩ cũng sẽ đánh\ \ giá các triệu chứng lâm sàng như đau vùng bụng dưới, chảy máu âm đạo, và cảm\ \ giác đau khi quan hệ tình dục để đưa ra kết quả đầy đủ về tình trạng sức khỏe.\n\ \ 2. Thai ngoài tử cung được điều trị bằng cách nào?\n Khối thai nằm ngoài tử\ \ cung không thể di chuyển về lại tử cung và cũng không thể tiếp tục phát triển\ \ như thai bình thường. Chính vì thế cần phải loại bỏ sớm để tránh các biến chứng\ \ nguy hiểm có thể xảy ra. Tùy vào mỗi trường hợp cụ thể mà bác sĩ sẽ chỉ định\ \ phương pháp điều trị khối thai ngoài tử cung.\n – Điều trị nội khoa: Bác sĩ\ \ có thể chỉ định điều trị bằng thuốc Methotrexate để ngăn chặn sự phát triển\ \ của tế bào và kết thúc thai kỳ, thuốc thường được áp dụng cho các trường hợp\ \ có giai đoạn thai ngoài tử cung chưa phát triển lớn, kích thước nhỏ.\n – Phẫu\ \ thuật nội soi: Bác sĩ sẽ áp dụng phương pháp này trong trường hợp mang thai\ \ ở ngoài tử cung đã phát triển nhưng chưa bị vỡ. Tùy vào từng trường hợp sẽ phải\ \ loại bỏ cả khối thai ngoài tử cung lẫn vòi trứng, hoặc bảo tồn vòi trứng." - source_sentence: Mục đích chính của T0 là gì? sentences: - "Sâu răng càng nặng, răng sẽ càng chịu nhiều tổn thương. Thậm chí, nếu không được\ \ điều trị sớm, sâu răng sẽ tấn công và khiến cho rằng hàm vỡ hết chỉ còn chân\ \ răng.\n 2. Mức độ nguy hiểm của răng hàm vỡ chỉ còn chân răng\n Tình trạng răng\ \ cối bị sâu dẫn tới vỡ chỉ còn chân răng rất nguy hiểm. Nếu không được chăm sóc\ \ và điều trị kịp thời sẽ dẫn tới nhiều biến chứng gây ảnh hưởng nghiêm trọng\ \ tới sức khỏe người bệnh.\n 2.1 Ảnh hưởng chức năng ăn nhai của răng\n Khi tổ\ \ chức cứng của răng bị phá hủy càng nhiều, răng sẽ càng dễ vỡ. Thời điểm răng\ \ chỉ còn lại chân, chức năng ăn nhai của răng cũng sẽ mất.\n 2.2 Viêm lợi\n Sâu\ \ răng sẽ tạo ra những lỗ, hốc sâu. Đây chính là vị trí thuận lợi để răng sâu\ \ lưu giữ thức ăn. Lâu ngày, điều này sẽ dẫn tới hình thành ổ vi khuẩn, gây hôi\ \ miệng. Bên cạnh đó, khi răng xuất hiện hốc sâu, lợi sẽ dễ có xu hướng lấp kín\ \ hốc sâu đó. Phần lợi này sẽ dễ bị viêm, sưng, chảy máu cho phải chà sát nhiều\ \ trong quá trình ăn nhai.\n 2.3 Viêm tủy, nhiễm trùng chóp răng\n Khi tình trạng\ \ sâu răng hàm chuyển biến nặng sẽ dẫn tới đau nhức, viêm tủy, viêm chóp răng\ \ gây nguy hiểm\n Sâu răng khi chuyển nặng, sâu xuống dưới và vào tủy sẽ khiến\ \ răng bị đau nhức do viêm tủy. Khi tình trạng viêm tủy trở nên nghiêm trọng,\ \ lan sâu xuống chóp răng sẽ gây viêm nhiễm vùng chóp. Khi này, răng sẽ bị đau,\ \ lung lay, phần lợi xung quang sẽ sưng to. Lâu ngày, ổ abscess chóp răng sẽ hình\ \ thành. Vấn đề này không được xử lý sớm sẽ chính là nguy cơ mất răng và lây lan\ \ sang cả những răng lân cận.\n Ổ nhiễm trùng chóp răng lây lan sẽ gây viêm xương\ \ hàm. Sau đó, nếu không được khắc phục, phần mềm và các tổ chức lân cận cũng\ \ sẽ bị ảnh hưởng tạo nên ổ nhiễm trùng lớn rất khó để kiểm soát.\n Khi ổ nhiễm\ \ trùng lan rộng sẽ tạo ra nang to. Chúng phá hủy xương hàm và khiến xương hàm\ \ bị gãy, gây tổn thương thần kinh, đường mạch máu, …\n 3." - "Có nhiều hình thái bệnh biểu thị mức độ nặng nhẹ khác nhau. Thông thường có 3\ \ hình thái biểu hiện bệnh nhẹ tới nặng như sau:\n– Mi mắt dưới sưng đỏ, nổi cục,\ \ có vảy, cảm giác vướng mí nhẹ. Triệu chứng này có thể xuất hiện ở bên trong\ \ hoặc bên ngoài mí mắt dưới\n– Các nốt nổi gây ngứa, sưng đỏ, tiết nhiều tiết\ \ tố khiến mi bị bám dính. Lúc này bờ mi chưa bị loét\n– Mí mắt tấy đỏ, ngứa,\ \ đau rát, bắt đầu có các vết loét\nBệnh không gây giảm, mất thị lực hay các hậu\ \ quả nghiêm trọng nhưng lại dễ tái phát gây khó chịu. Cần phát hiện kịp thời\ \ các dấu hiệu bất thường để có phương án điều trị phù hợp và nhanh chóng. Nếu\ \ để bệnh dai dẳng dễ trở thành tiền đề, tạo điều kiện cho các bệnh lý khác tấn\ \ công đôi mắt bạn.\n 3. Điều trị viêm mí mắt khoa học\n 3.1. Biện pháp vệ sinh,\ \ hạn chế tình trạng viêm mí mắt dưới nặng hơn\n Như đã đề cập, cần phát hiện\ \ sớm các dấu hiệu để có các biện pháp điều trị kịp thời, hạn chế các tác động\ \ tiêu cực lên mắt. Tùy vào chuyển biến của bệnh mà bạn có thể lựa chọn các phương\ \ pháp điều trị phù hợp, đồng thời thực hiện các biện pháp dưới đây:\n– Dùng kính\ \ bảo hộ tránh tác động của khói bụi. Dừng sử dụng kính áp tròng với mục đích\ \ làm đẹp và sử dụng kính gọng thông thường thay thế trong trường hợp mắc cận\ \ thị, viễn thị, loạn thị,…\n– Vệ sinh mi mắt bằng các vật dụng y tế như gạc,\ \ bông,… tránh sử dụng các loại khăn chà sát. Chú ý rửa bằng nước muối sinh lý,\ \ nước ấm giúp bong lớp vảy và giảm tiết nhờn\n– Dùng khăn ấm chườm lên vùng mí\ \ mắt bị sưng viêm\nTuy nhiên, nếu bạn đã cố gắng đảm bảo giữ mi mắt sạch sẽ mà\ \ tình trạng viêm mí mắt dưới vẫn có chiều hướng nặng hơn thì hãy gặp các bác\ \ sĩ chuyên khoa\n Bệnh nhân nên lựa chọn cơ sở y tế uy tín điều trị bệnh.\n 3.2.\ \ Điều trị bệnh viêm mí mắt dưới an toàn hơn khi gặp bác sĩ\n Các bác sĩ sẽ tư\ \ vấn và có hướng điều trị tốt hơn như:\n– Sử dụng các loại kháng sinh dạng thuốc\ \ nhỏ mắt hoặc thuốc mỡ bôi." - Mục đích của T0 này là so sánh hiệu quả của bài tập chuyển động thụ động và bài tập chuyển động AS đối với sức khỏe chức năng ở các đối tượng và phương pháp của cư dân viện dưỡng lão cao tuổi. Hai mươi ba cư dân tại viện dưỡng lão nam và nữ tình nguyện tham gia T0 này, họ được chia thành một nhóm thụ động nhóm tập thể dục chuyển động n và một nhóm tập thể dục chuyển động AS n và thực hiện các buổi tập tối thiểu hai lần một tuần trong nhiều tuần ghế tập thể hình PET đứng lên ngồi vươn tay ra sau Các bài kiểm tra FR và đi bộ tối thiểu đã được đánh giá trước đó và T3 kết quả can thiệp không có đường cơ sở đáng kể sự khác biệt được ghi nhận giữa các nhóm về các biến đo được sau can thiệp trong tuần không có nhóm tương tác đáng kể nào à thời gian được ghi nhận trong các biến thể dục chức năng giữa CG ngoại trừ điểm FR Bài tập chuyển động AS Bài tập chuyển động thụ động cải thiện đáng kể theo thời gian đã được ghi nhận trong bài tập chuyển động thụ động nhóm ngồi trên ghế cong đứng lên điểm đi bộ và đi bộ tối thiểu và trong nhóm tập thể dục chuyển động tích cực ngồi trên ghế cong đứng lên điểm đi bộ FR và đi bộ tối thiểu tỷ lệ tuân thủ trong các nhóm tập thể dục thụ động và chuyển động AS lần lượt là kết luận bài tập chuyển động thụ động và bài tập chuyển động tích cực được cho là có hiệu quả tương tự trong việc cải thiện thể lực PET của người già ở viện dưỡng lão --- # SentenceTransformer based on BAAI/bge-m3 This is a [sentence-transformers](https://www.SBERT.net) model finetuned from [BAAI/bge-m3](https://huggingface.co/BAAI/bge-m3). It maps sentences & paragraphs to a 1024-dimensional dense vector space and can be used for semantic textual similarity, semantic search, paraphrase mining, text classification, clustering, and more. ## Model Details ### Model Description - **Model Type:** Sentence Transformer - **Base model:** [BAAI/bge-m3](https://huggingface.co/BAAI/bge-m3) <!-- at revision 5617a9f61b028005a4858fdac845db406aefb181 --> - **Maximum Sequence Length:** 8192 tokens - **Output Dimensionality:** 1024 dimensions - **Similarity Function:** Cosine Similarity <!-- - **Training Dataset:** Unknown --> <!-- - **Language:** Unknown --> <!-- - **License:** Unknown --> ### Model Sources - **Documentation:** [Sentence Transformers Documentation](https://sbert.net) - **Repository:** [Sentence Transformers on GitHub](https://github.com/UKPLab/sentence-transformers) - **Hugging Face:** [Sentence Transformers on Hugging Face](https://huggingface.co/models?library=sentence-transformers) ### Full Model Architecture ``` SentenceTransformer( (0): Transformer({'max_seq_length': 8192, 'do_lower_case': False}) with Transformer model: XLMRobertaModel (1): Pooling({'word_embedding_dimension': 1024, 'pooling_mode_cls_token': True, 'pooling_mode_mean_tokens': False, 'pooling_mode_max_tokens': False, 'pooling_mode_mean_sqrt_len_tokens': False, 'pooling_mode_weightedmean_tokens': False, 'pooling_mode_lasttoken': False, 'include_prompt': True}) (2): Normalize() ) ``` ## Usage ### Direct Usage (Sentence Transformers) First install the Sentence Transformers library: ```bash pip install -U sentence-transformers ``` Then you can load this model and run inference. ```python from sentence_transformers import SentenceTransformer # Download from the 🤗 Hub model = SentenceTransformer("strongpear/M3-retriever-Medical") # Run inference sentences = [ 'Mục đích chính của T0 là gì?', 'Mục đích của T0 này là so sánh hiệu quả của bài tập chuyển động thụ động và bài tập chuyển động AS đối với sức khỏe chức năng ở các đối tượng và phương pháp của cư dân viện dưỡng lão cao tuổi. Hai mươi ba cư dân tại viện dưỡng lão nam và nữ tình nguyện tham gia T0 này, họ được chia thành một nhóm thụ động nhóm tập thể dục chuyển động n và một nhóm tập thể dục chuyển động AS n và thực hiện các buổi tập tối thiểu hai lần một tuần trong nhiều tuần ghế tập thể hình PET đứng lên ngồi vươn tay ra sau Các bài kiểm tra FR và đi bộ tối thiểu đã được đánh giá trước đó và T3 kết quả can thiệp không có đường cơ sở đáng kể sự khác biệt được ghi nhận giữa các nhóm về các biến đo được sau can thiệp trong tuần không có nhóm tương tác đáng kể nào à thời gian được ghi nhận trong các biến thể dục chức năng giữa CG ngoại trừ điểm FR Bài tập chuyển động AS Bài tập chuyển động thụ động cải thiện đáng kể theo thời gian đã được ghi nhận trong bài tập chuyển động thụ động nhóm ngồi trên ghế cong đứng lên điểm đi bộ và đi bộ tối thiểu và trong nhóm tập thể dục chuyển động tích cực ngồi trên ghế cong đứng lên điểm đi bộ FR và đi bộ tối thiểu tỷ lệ tuân thủ trong các nhóm tập thể dục thụ động và chuyển động AS lần lượt là kết luận bài tập chuyển động thụ động và bài tập chuyển động tích cực được cho là có hiệu quả tương tự trong việc cải thiện thể lực PET của người già ở viện dưỡng lão', 'Sâu răng càng nặng, răng sẽ càng chịu nhiều tổn thương. Thậm chí, nếu không được điều trị sớm, sâu răng sẽ tấn công và khiến cho rằng hàm vỡ hết chỉ còn chân răng.\n 2. Mức độ nguy hiểm của răng hàm vỡ chỉ còn chân răng\n Tình trạng răng cối bị sâu dẫn tới vỡ chỉ còn chân răng rất nguy hiểm. Nếu không được chăm sóc và điều trị kịp thời sẽ dẫn tới nhiều biến chứng gây ảnh hưởng nghiêm trọng tới sức khỏe người bệnh.\n 2.1 Ảnh hưởng chức năng ăn nhai của răng\n Khi tổ chức cứng của răng bị phá hủy càng nhiều, răng sẽ càng dễ vỡ. Thời điểm răng chỉ còn lại chân, chức năng ăn nhai của răng cũng sẽ mất.\n 2.2 Viêm lợi\n Sâu răng sẽ tạo ra những lỗ, hốc sâu. Đây chính là vị trí thuận lợi để răng sâu lưu giữ thức ăn. Lâu ngày, điều này sẽ dẫn tới hình thành ổ vi khuẩn, gây hôi miệng. Bên cạnh đó, khi răng xuất hiện hốc sâu, lợi sẽ dễ có xu hướng lấp kín hốc sâu đó. Phần lợi này sẽ dễ bị viêm, sưng, chảy máu cho phải chà sát nhiều trong quá trình ăn nhai.\n 2.3 Viêm tủy, nhiễm trùng chóp răng\n Khi tình trạng sâu răng hàm chuyển biến nặng sẽ dẫn tới đau nhức, viêm tủy, viêm chóp răng gây nguy hiểm\n Sâu răng khi chuyển nặng, sâu xuống dưới và vào tủy sẽ khiến răng bị đau nhức do viêm tủy. Khi tình trạng viêm tủy trở nên nghiêm trọng, lan sâu xuống chóp răng sẽ gây viêm nhiễm vùng chóp. Khi này, răng sẽ bị đau, lung lay, phần lợi xung quang sẽ sưng to. Lâu ngày, ổ abscess chóp răng sẽ hình thành. Vấn đề này không được xử lý sớm sẽ chính là nguy cơ mất răng và lây lan sang cả những răng lân cận.\n Ổ nhiễm trùng chóp răng lây lan sẽ gây viêm xương hàm. Sau đó, nếu không được khắc phục, phần mềm và các tổ chức lân cận cũng sẽ bị ảnh hưởng tạo nên ổ nhiễm trùng lớn rất khó để kiểm soát.\n Khi ổ nhiễm trùng lan rộng sẽ tạo ra nang to. Chúng phá hủy xương hàm và khiến xương hàm bị gãy, gây tổn thương thần kinh, đường mạch máu, …\n 3.', ] embeddings = model.encode(sentences) print(embeddings.shape) # [3, 1024] # Get the similarity scores for the embeddings similarities = model.similarity(embeddings, embeddings) print(similarities.shape) # [3, 3] ``` <!-- ### Direct Usage (Transformers) <details><summary>Click to see the direct usage in Transformers</summary> </details> --> <!-- ### Downstream Usage (Sentence Transformers) You can finetune this model on your own dataset. <details><summary>Click to expand</summary> </details> --> <!-- ### Out-of-Scope Use *List how the model may foreseeably be misused and address what users ought not to do with the model.* --> <!-- ## Bias, Risks and Limitations *What are the known or foreseeable issues stemming from this model? You could also flag here known failure cases or weaknesses of the model.* --> <!-- ### Recommendations *What are recommendations with respect to the foreseeable issues? For example, filtering explicit content.* --> ## Training Details ### Training Dataset #### Unnamed Dataset * Size: 123,750 training samples * Columns: <code>anchor</code> and <code>positive</code> * Approximate statistics based on the first 1000 samples: | | anchor | positive | |:--------|:-----------------------------------------------------------------------------------|:-------------------------------------------------------------------------------------| | type | string | string | | details | <ul><li>min: 10 tokens</li><li>mean: 19.64 tokens</li><li>max: 48 tokens</li></ul> | <ul><li>min: 19 tokens</li><li>mean: 396.68 tokens</li><li>max: 735 tokens</li></ul> | * Samples: | anchor | positive | |:----------------------------------------------------------------------------------|:---------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------| | <code>Manh tràng có hình dạng và vị trí như thế nào trong đại tràng?</code> | <code>Giải phẫu học về túi thừa đại tràng<br>Bài viết được viết bởi Bác sĩ Mai Viễn Phương - Khoa Khám bệnh & Nội Khoa - Bệnh viện Đa khoa Quốc tế Vinmec Central Park<br>Để hiểu rõ về bệnh túi thừa đại tràng và viêm túi thừa đại tràng, cần nắm vững về giải phẫu học và chức năng của ruột. Ruột non là đoạn ruột mảnh và dài bắt đầu từ dạ dày và kết thúc ở ruột già hoặc đại tràng. Đại tràng bắt đầu từ vùng hố chậu phải, có hình dạng một dấu hỏi lớn bắc ngang qua khắp ổ bụng và kết thúc ở trực tràng<br>1. Cấu tạo của đại tràng<br>Đại tràng chia làm 3 phần chính: manh tràng, kết tràng và trực tràng. Ruột non thông với ruột già tại ranh giới giữa manh tràng và kết tràng. Giữa ruột non và ruột già có van hồi manh tràng giữ cho các chất trong ruột già không chảy ngược lại lên ruột non.<br>Cấu tạo của đại trực tràng<br>Manh tràng. Hình dạng giống một chiếc túi hình tròn, vị trí của nó nằm ở ngay phía dưới của hỗng tràng được đổ vào bên trong ruột già. Manh tràng được liên kết với ruột thừa có hình dạng gần giống với ng...</code> | | <code>Chấn thương tụy có thể gây ra những vấn đề gì?</code> | <code>Chấn thương<br>Những sang chấn bên ngoài hay tổn thương trực tiếp vào tuyến tụy làm phá vỡ cấu trúc của tế bào, giải thoát dịch tụy ra ngoài, không chỉ gây viêm tụy cấp mà còn tổn thương các tạng xung quanh, viêm phúc mạc.Do tụy nằm kín đáo phía sau phúc mạc, mọi sang chấn có ảnh hưởng cả tụy thì mức độ thường nặng nề, điều trị khó khăn. Yêu cầu phẫu thuật khẩn cấp luôn được đặt ra để cầm máu, bảo tồn các tạng, làm sạch ổ bụng cũng như cứu giữ tính mạng.<br>11. Tụy chia đôi<br>Đây là một bất thường giải phẫu trong thời kỳ bào thai khiến tụy không sáp nhập mà thành hai tuyến tụy riêng biệt với ống bài tiết độc lập.Tuy nhiên, phát hiện này chỉ biết được khi khám nghiệm tử thi và cơ chế gây viêm tụy cũng chưa được biết rõ ràng.<br>12. Bệnh lý mạch máu<br>Tương tự như các cơ quan khác, tuyến tụy cũng được nuôi dưỡng bởi hệ thống mạch máu nuôi. Vì vậy, mọi bệnh lý trên mạch máu đều có thể ảnh hưởng đến chức năng tụy.Trong đó, thiếu máu cục bộ tụy là một nguyên nhân hiếm gặp của viêm tụy cấp nhưng lại rất ...</code> | | <code>Hình ảnh nội soi NBI có điểm gì nổi bật so với nội soi thông thường?</code> | <code>Hình ảnh nội soi NBI có độ phân giải và độ phân giải cao nên giúp bác sỹ dễ dàng phát hiện những thay đổi nhỏ về màu sắc, hình thái của tổn thương ung thư và tiền ung thư mà nội soi thông thường khó phát hiện được.Ngoài ra, nội soi đại trực tràng và mọi quy trình kỹ thuật khác thực hiện tại Vinmec đều được kiểm soát nghiêm ngặt, đạt các tiêu chuẩn chất lượng quốc tế, đảm bảo an toàn và chất lượng cho người bệnh.Trước mỗi ca thực hiện, hệ thống máy nội soi đều được đảm bảo vô trùng, đem đến tính an toàn tuyệt đối đến người bệnh. Sau khi thủ thuật kết thúc, ống nội soi được vệ sinh bằng máy rửa dây soi tự động của chính hãng Olympus dưới hệ thống lọc nước RO.<br>Để đặt lịch khám tại viện, Quý khách vui lòng bấm số<br>HOTLINE<br>hoặc đặt lịch trực tiếp TẠI ĐÂY.<br>Tải và đặt lịch khám tự động trên ứng dụng My. Vinmec để quản lý, theo dõi lịch và đặt hẹn mọi lúc mọi nơi ngay trên ứng dụng.</code> | * Loss: [<code>MultipleNegativesRankingLoss</code>](https://sbert.net/docs/package_reference/sentence_transformer/losses.html#multiplenegativesrankingloss) with these parameters: ```json { "scale": 20.0, "similarity_fct": "cos_sim" } ``` ### Evaluation Dataset #### Unnamed Dataset * Size: 1,250 evaluation samples * Columns: <code>anchor</code> and <code>positive</code> * Approximate statistics based on the first 1000 samples: | | anchor | positive | |:--------|:-----------------------------------------------------------------------------------|:-------------------------------------------------------------------------------------| | type | string | string | | details | <ul><li>min: 10 tokens</li><li>mean: 19.71 tokens</li><li>max: 58 tokens</li></ul> | <ul><li>min: 17 tokens</li><li>mean: 396.06 tokens</li><li>max: 632 tokens</li></ul> | * Samples: | anchor | positive | |:-------------------------------------------------------------------------------------------------------------------|:---------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------| | <code>Moxonidine có tác dụng gì đối với bệnh tăng huyết áp?</code> | <code>sử dụng trong thận nó là AS chống lại rối loạn nhịp tim VVI trong nhiều môi trường kinh nghiệm khác nhau nó không có tác dụng ức chế hô hấp do kích hoạt CE alpha nó có tác dụng có lợi đối với chuyển hóa thủy tinh và lipid máu ở chuột béo phì bị tăng huyết áp di truyền nó thể hiện hoạt động chống loét và cuối cùng là moxonidine làm giảm áp lực nội nhãn gợi ý một lợi ích có thể có trong bệnh tăng nhãn áp do đó MOX bằng phương thức hoạt động mới của nó thể hiện một nguyên tắc điều trị mới trong điều trị tăng huyết áp vì đặc tính độc đáo của nó. MOX có thể chứng minh là có hiệu quả trong việc làm chậm sự tiến triển của bệnh bằng cách mang lại tác dụng bảo vệ ngoài việc chỉ giảm huyết áp cần nghiên cứu thêm để xác minh tiềm năng này</code> | | <code>Tại sao bác sĩ Haydar Cemal lại đưa ra tuyên bố về việc tiêm chủng cho những người bị kết án tử hình?</code> | <code>vắc-xin để tiêm chủng tuyệt đối cho một số đối tượng bị kết án tử hình, bác sĩ haydar cemal tuyên bố trên một tờ báo ngày tháng 12 rằng những người được báo cáo là đối tượng bị kết án tử hình thực sự là người Armenia và những người Armenia vô tội bị đánh dấu trục xuất đã được tiêm máu của bệnh nhân sốt phát ban và rằng anh ta đã chứng kiến ​​​​tất cả những sự kiện này do lời khai của anh ta, Bộ Nội vụ đã yêu cầu một cuộc điều tra ngay lập tức và khi kết thúc cuộc điều tra đó, người ta hiểu rằng bác sĩ haydar cemal và bác sĩ hamdi suat chưa bao giờ làm việc cùng nhau ở erzincan vào thời điểm bác sĩ haydar cemal tuyên bố tất cả các tuyên bố đều bị EC điều tra bác bỏ và không ai bị buộc tội trong một trận dịch sốt phát ban nghiêm trọng. Các bác sĩ Thổ Nhĩ Kỳ đã tiêm vắc xin sốt phát ban nhằm mục đích cứu một mạng sống khỏi đám cháy. Theo các nguồn tin, sự phân biệt đối xử tôn giáo đối với những người được tiêm chủng đã được chứng minh rằng tuyên bố rằng một số bác sĩ Thổ Nhĩ Kỳ đã sử dụng...</code> | | <code>Sinh thiết ảnh hưởng đến quyết định lâm sàng như thế nào trong giai đoạn đầu sau ghép gan?</code> | <code>PLB thông thường trong giai đoạn hậu phẫu sớm trong vòng vài ngày sau khi ghép CL có thể không thực hiện được do bệnh lý đông máu và/hoặc cổ trướng việc sử dụng sinh thiết VG gan xuyên tĩnh mạch cảnh tjlb trong trường hợp này là một phương pháp thay thế hấp dẫn vì có thể thu được chẩn đoán mô mặc dù có chống chỉ định tương đối đối với sinh thiết qua da trong giai đoạn này trong giai đoạn đầu sau ghép tjlb đã được thực hiện ở những bệnh nhân gan phần lớn trong số họ đã ghép gan thông thường mà không bảo tồn VPI VC bản địa những người khác đã có IVC bản địa trong tỷ lệ thành công về mặt kỹ thuật là với mẫu bệnh phẩm đủ để chẩn đoán xác định trong hầu hết các trường hợp bao gồm cả chẩn đoán đào thải và không đào thải trên mô bệnh học cuối cùng chẩn đoán sinh thiết ảnh hưởng đến việc xử trí lâm sàng trong phần lớn các trường hợp với các quyết định được đưa ra để thực hiện ghép lại để ảnh hưởng đến việc bắt đầu liệu pháp chống đào thải và bắt đầu liệu pháp kháng vi-rút không có bệnh tật hoặ...</code> | * Loss: [<code>MultipleNegativesRankingLoss</code>](https://sbert.net/docs/package_reference/sentence_transformer/losses.html#multiplenegativesrankingloss) with these parameters: ```json { "scale": 20.0, "similarity_fct": "cos_sim" } ``` ### Training Hyperparameters #### Non-Default Hyperparameters - `eval_strategy`: steps - `per_device_train_batch_size`: 32 - `per_device_eval_batch_size`: 32 - `warmup_ratio`: 0.1 - `bf16`: True - `batch_sampler`: no_duplicates #### All Hyperparameters <details><summary>Click to expand</summary> - `overwrite_output_dir`: False - `do_predict`: False - `eval_strategy`: steps - `prediction_loss_only`: True - `per_device_train_batch_size`: 32 - `per_device_eval_batch_size`: 32 - `per_gpu_train_batch_size`: None - `per_gpu_eval_batch_size`: None - `gradient_accumulation_steps`: 1 - `eval_accumulation_steps`: None - `torch_empty_cache_steps`: None - `learning_rate`: 5e-05 - `weight_decay`: 0.0 - `adam_beta1`: 0.9 - `adam_beta2`: 0.999 - `adam_epsilon`: 1e-08 - `max_grad_norm`: 1.0 - `num_train_epochs`: 3 - `max_steps`: -1 - `lr_scheduler_type`: linear - `lr_scheduler_kwargs`: {} - `warmup_ratio`: 0.1 - `warmup_steps`: 0 - `log_level`: passive - `log_level_replica`: warning - `log_on_each_node`: True - `logging_nan_inf_filter`: True - `save_safetensors`: True - `save_on_each_node`: False - `save_only_model`: False - `restore_callback_states_from_checkpoint`: False - `no_cuda`: False - `use_cpu`: False - `use_mps_device`: False - `seed`: 42 - `data_seed`: None - `jit_mode_eval`: False - `use_ipex`: False - `bf16`: True - `fp16`: False - `fp16_opt_level`: O1 - `half_precision_backend`: auto - `bf16_full_eval`: False - `fp16_full_eval`: False - `tf32`: None - `local_rank`: 0 - `ddp_backend`: None - `tpu_num_cores`: None - `tpu_metrics_debug`: False - `debug`: [] - `dataloader_drop_last`: False - `dataloader_num_workers`: 0 - `dataloader_prefetch_factor`: None - `past_index`: -1 - `disable_tqdm`: False - `remove_unused_columns`: True - `label_names`: None - `load_best_model_at_end`: False - `ignore_data_skip`: False - `fsdp`: [] - `fsdp_min_num_params`: 0 - `fsdp_config`: {'min_num_params': 0, 'xla': False, 'xla_fsdp_v2': False, 'xla_fsdp_grad_ckpt': False} - `fsdp_transformer_layer_cls_to_wrap`: None - `accelerator_config`: {'split_batches': False, 'dispatch_batches': None, 'even_batches': True, 'use_seedable_sampler': True, 'non_blocking': False, 'gradient_accumulation_kwargs': None} - `deepspeed`: None - `label_smoothing_factor`: 0.0 - `optim`: adamw_torch - `optim_args`: None - `adafactor`: False - `group_by_length`: False - `length_column_name`: length - `ddp_find_unused_parameters`: None - `ddp_bucket_cap_mb`: None - `ddp_broadcast_buffers`: False - `dataloader_pin_memory`: True - `dataloader_persistent_workers`: False - `skip_memory_metrics`: True - `use_legacy_prediction_loop`: False - `push_to_hub`: False - `resume_from_checkpoint`: None - `hub_model_id`: None - `hub_strategy`: every_save - `hub_private_repo`: False - `hub_always_push`: False - `gradient_checkpointing`: False - `gradient_checkpointing_kwargs`: None - `include_inputs_for_metrics`: False - `eval_do_concat_batches`: True - `fp16_backend`: auto - `push_to_hub_model_id`: None - `push_to_hub_organization`: None - `mp_parameters`: - `auto_find_batch_size`: False - `full_determinism`: False - `torchdynamo`: None - `ray_scope`: last - `ddp_timeout`: 1800 - `torch_compile`: False - `torch_compile_backend`: None - `torch_compile_mode`: None - `dispatch_batches`: None - `split_batches`: None - `include_tokens_per_second`: False - `include_num_input_tokens_seen`: False - `neftune_noise_alpha`: None - `optim_target_modules`: None - `batch_eval_metrics`: False - `eval_on_start`: False - `use_liger_kernel`: False - `eval_use_gather_object`: False - `prompts`: None - `batch_sampler`: no_duplicates - `multi_dataset_batch_sampler`: proportional </details> ### Training Logs | Epoch | Step | Training Loss | Validation Loss | |:------:|:-----:|:-------------:|:---------------:| | 0.0517 | 200 | 0.1317 | 0.0428 | | 0.1034 | 400 | 0.0646 | 0.0437 | | 0.1551 | 600 | 0.063 | 0.0416 | | 0.2068 | 800 | 0.0623 | 0.0441 | | 0.2585 | 1000 | 0.0752 | 0.0475 | | 0.3102 | 1200 | 0.0754 | 0.0491 | | 0.3619 | 1400 | 0.0794 | 0.0507 | | 0.4137 | 1600 | 0.0849 | 0.0658 | | 0.4654 | 1800 | 0.076 | 0.0503 | | 0.5171 | 2000 | 0.0778 | 0.0490 | | 0.5688 | 2200 | 0.0747 | 0.0455 | | 0.6205 | 2400 | 0.0601 | 0.0412 | | 0.6722 | 2600 | 0.0664 | 0.0458 | | 0.7239 | 2800 | 0.0628 | 0.0430 | | 0.7756 | 3000 | 0.067 | 0.0500 | | 0.8273 | 3200 | 0.0655 | 0.0501 | | 0.8790 | 3400 | 0.0626 | 0.0577 | | 0.9307 | 3600 | 0.0731 | 0.0512 | | 0.9824 | 3800 | 0.0662 | 0.0443 | | 1.0341 | 4000 | 0.056 | 0.0428 | | 1.0858 | 4200 | 0.0496 | 0.0378 | | 1.1375 | 4400 | 0.0405 | 0.0360 | | 1.1892 | 4600 | 0.0333 | 0.0318 | | 1.2410 | 4800 | 0.0326 | 0.0313 | | 1.2927 | 5000 | 0.0273 | 0.0321 | | 1.3444 | 5200 | 0.0254 | 0.0334 | | 1.3961 | 5400 | 0.0261 | 0.0291 | | 1.4478 | 5600 | 0.0228 | 0.0359 | | 1.4995 | 5800 | 0.0247 | 0.0344 | | 1.5512 | 6000 | 0.0216 | 0.0344 | | 1.6029 | 6200 | 0.0183 | 0.0303 | | 1.6546 | 6400 | 0.0205 | 0.0294 | | 1.7063 | 6600 | 0.0194 | 0.0294 | | 1.7580 | 6800 | 0.018 | 0.0258 | | 1.8097 | 7000 | 0.0197 | 0.0296 | | 1.8614 | 7200 | 0.0196 | 0.0340 | | 1.9131 | 7400 | 0.0207 | 0.0284 | | 1.9648 | 7600 | 0.0234 | 0.0268 | | 2.0165 | 7800 | 0.0139 | 0.0275 | | 2.0683 | 8000 | 0.015 | 0.0270 | | 2.1200 | 8200 | 0.0103 | 0.0293 | | 2.1717 | 8400 | 0.0095 | 0.0259 | | 2.2234 | 8600 | 0.0082 | 0.0284 | | 2.2751 | 8800 | 0.0077 | 0.0264 | | 2.3268 | 9000 | 0.0073 | 0.0252 | | 2.3785 | 9200 | 0.0078 | 0.0247 | | 2.4302 | 9400 | 0.0074 | 0.0294 | | 2.4819 | 9600 | 0.0059 | 0.0324 | | 2.5336 | 9800 | 0.007 | 0.0321 | | 2.5853 | 10000 | 0.005 | 0.0299 | | 2.6370 | 10200 | 0.0062 | 0.0283 | | 2.6887 | 10400 | 0.0059 | 0.0277 | | 2.7404 | 10600 | 0.0053 | 0.0285 | | 2.7921 | 10800 | 0.0049 | 0.0280 | | 2.8438 | 11000 | 0.0055 | 0.0288 | | 2.8956 | 11200 | 0.0046 | 0.0286 | | 2.9473 | 11400 | 0.0072 | 0.0280 | | 2.9990 | 11600 | 0.0046 | 0.0281 | ### Framework Versions - Python: 3.9.20 - Sentence Transformers: 3.3.1 - Transformers: 4.45.2 - PyTorch: 2.4.0+cu121 - Accelerate: 1.0.1 - Datasets: 3.0.0 - Tokenizers: 0.20.1 ## Citation ### BibTeX #### Sentence Transformers ```bibtex @inproceedings{reimers-2019-sentence-bert, title = "Sentence-BERT: Sentence Embeddings using Siamese BERT-Networks", author = "Reimers, Nils and Gurevych, Iryna", booktitle = "Proceedings of the 2019 Conference on Empirical Methods in Natural Language Processing", month = "11", year = "2019", publisher = "Association for Computational Linguistics", url = "https://arxiv.org/abs/1908.10084", } ``` #### MultipleNegativesRankingLoss ```bibtex @misc{henderson2017efficient, title={Efficient Natural Language Response Suggestion for Smart Reply}, author={Matthew Henderson and Rami Al-Rfou and Brian Strope and Yun-hsuan Sung and Laszlo Lukacs and Ruiqi Guo and Sanjiv Kumar and Balint Miklos and Ray Kurzweil}, year={2017}, eprint={1705.00652}, archivePrefix={arXiv}, primaryClass={cs.CL} } ``` <!-- ## Glossary *Clearly define terms in order to be accessible across audiences.* --> <!-- ## Model Card Authors *Lists the people who create the model card, providing recognition and accountability for the detailed work that goes into its construction.* --> <!-- ## Model Card Contact *Provides a way for people who have updates to the Model Card, suggestions, or questions, to contact the Model Card authors.* -->
[ "TEXT_CLASSIFICATION" ]
[ "CHIA" ]
Philipp-Sc/mistral-7b-reverse-instruct
Philipp-Sc
text-generation
[ "safetensors", "gguf", "text-generation", "en", "dataset:pankajmathur/WizardLM_Orca", "dataset:teknium/trismegistus-project", "dataset:unalignment/toxic-dpo-v0.1", "dataset:Intel/orca_dpo_pairs", "license:apache-2.0", "endpoints_compatible", "region:us", "conversational" ]
2023-12-13T05:42:00
2023-12-20T08:01:42
105
5
--- datasets: - pankajmathur/WizardLM_Orca - teknium/trismegistus-project - unalignment/toxic-dpo-v0.1 - Intel/orca_dpo_pairs language: - en license: apache-2.0 pipeline_tag: text-generation --- ## Mistral 7b Reverse Instruct This model is sft (LoRA) fine tuned to reverse engineer the original prompt of a given LLM output/response. Use Case: The generation of synthetic instruct datasets for developing chatbots and domain specific fine tuning (e.g. "Summarization" & "Roleplay"). It is useful for labelling unlabeled datasets. - base_model: mistralai/Mistral-7B-v0.1 (=checkpoint-v1) - base_model: mistralai/Mistral-7B-v0.2 (>=checkpoint-v2) For convinience the latest model export is provided under [/latest_model_export](https://huggingface.co/Philipp-Sc/mistral-7b-reverse-instruct/tree/main/latest_model_export) as well as gguf quantized versions under [/latest_ggml_models](https://huggingface.co/Philipp-Sc/mistral-7b-reverse-instruct/tree/main/latest_ggml_models) ## Response Format "[INST]\n### System:\n{system}\n### Instruction:\n{instruction}\n[/INST]\n" - Grammar File: [inst_format.gbnf](https://huggingface.co/Philipp-Sc/mistral-7b-reverse-instruct/blob/main/inst_format.gbnf) ## Prompt Template "\n### System:\nYou craft instructions for generating the given output through reverse engineering.\n### Instruction:\nDecipher the steps used to produce the given output and articulate a refined set of instructions (System & Instruction).\n### OUTPUT:\n {output}" (use the template without the " ") ## Example Completions Prompt 1: ```bash ### System: You craft instructions for generating the given output through reverse engineering. ### Instruction: Decipher the steps used to produce the given output and articulate a refined set of instructions (System & Instruction). ### OUTPUT: SE3 Condenser Microphone from SE Electronics Sonic Distribution is now handling the SE Electronics line of imported studio condensers. The SE3 caught my eye at the Summer NAMM Show in Nashville and is their flagship pencil microphone with a fixed cardioid pattern and 48V phantom powering. This mic uses Class A FET amplifier electronics and has both low cut filter and -10dB pad switches. I had the opportunity to try this mic out on several sources while recording a band and was impressed by its natural sound and all around usefulness. I used it for acoustic guitar overdubs where the low cut filter helped to tame a jumbo bodied guitar's boomy sound. The gentle presence lift added a sparkle without using EQ. I also tried it on drums and cymbals and it (using the pad) didn't fold up (overload) at all. I even tried it on vocals with good results although it does 'pop' easily and required a couple of pop screens. Housed in an elegantly finished new body design, it comes with a sturdy shock mount and packaged in a deluxe wooden travel case. Significant specifications are: frequency response rated at 20Hz-20khz; sensitivity is 10mV/Pa +/- 2dB; noise level is 17dB (A weighted); and Max SPL for 0.5% THD @ 1kHz is 135dB. I certainly found a 'Swiss army knife' of a condenser with the SE3 and I completely recommend it for any studio task especially acoustic instruments such as guitar, violin, cello or string bass. ### RESPONSE: ``` (example from [vikp/reverse_instruct](https://huggingface.co/vikp/reverse_instruct)) Response 1: ```bash [INST] ### System: You are a professional musician and sound engineer who specializes in mixing live music. You have recently reviewed the SE Electronics SE3 Condenser Microphone and want to share your experience with your followers on social media. ### Instruction: Write a short review of the SE Electronics SE3 Condenser Microphone for your social media followers. [/INST] ``` Prompt 2: ```bash ### System: You craft instructions for generating the given output through reverse engineering. ### Instruction: Decipher the steps used to produce the given output and articulate a refined set of instructions (System & Instruction). ### OUTPUT: **Lesson: Introduction to Sales** **Objective:** To introduce students to the concept of sales and its importance in business. **Introduction:** Sales is the process of persuading someone to buy a product or service. It is an essential part of any business, as it helps generate revenue and grow the company. In this lesson, we will learn about the basics of sales and how it can benefit businesses. **Section 1: What is Sales?** **Topic 1: Definition of Sales** Sales is the process of persuading someone to buy a product or service. It involves identifying the needs of potential customers, presenting solutions that meet those needs, and closing the deal by making the sale. **Topic 2: Importance of Sales** Sales is crucial to the success of a business because it generates revenue and helps the company grow. Without sales, businesses would not be able to make profits, expand their operations, or reach new customers. **Topic 3: Types of Sales** There are several types of sales, including direct sales, indirect sales, and online sales. Direct sales involve selling products or services directly to customers, while indirect sales involve selling through intermediaries such as distributors or retailers. Online sales involve selling products or services over the internet. **Section 2: Skills Required for Sales** **Topic 1: Communication Skills** Effective communication is essential for sales success. Salespeople must be able to communicate clearly and persuasively to potential customers, negotiate deals, and close sales. **Topic 2: Interpersonal Skills** Salespeople must also have strong interpersonal skills, including active listening, empathy, and building relationships with customers. These skills help salespeople understand customer needs and build trust with them. **Topic 3: Time Management Skills** Sales can be a time-consuming process, so salespeople must be skilled at managing their time effectively. They must prioritize tasks, set goals, and stay organized to be successful. **Section 3: Benefits of Sales** **Topic 1: Financial Benefits** Sales generate revenue for businesses, which can be used to fund growth and expansion. Sales also help businesses increase their profits by generating more income than expenses. **Topic 2: Customer Benefits** Sales can also benefit customers by providing them with products or services that meet their needs and improve their lives. Sales help businesses introduce new products and services to the market, which can improve customers' lives and create new opportunities for them. **Conclusion:** In conclusion, sales is an essential part of any business, as it helps generate revenue and grow the company. By understanding the basics of sales, developing essential skills, and recognizing the benefits of sales, individuals can become successful salespeople and contribute to the growth of their businesses. **Glossary:** - Sales: The process of persuading someone to buy a product or service - Interpersonal Skills: The ability to communicate and build relationships with others - Time Management Skills: The ability to prioritize tasks, set goals, and stay organized - Revenue: The income generated by a business from its sales - Profit: The difference between a business's income and expenses - Intermediaries: Companies that sell products or services on behalf of another company`` ``` (source [nampdn-ai/tiny-textbooks](https://huggingface.co/nampdn-ai/tiny-textbooks)) Response 2: ```bash [INST] ### System: You are an AI assistant. You should describe the task and explain your answer. While answering a multiple choice question, first output the correct answer(s). Then explain why other answers are wrong. You might need to use additional information to answer the question. ### Instruction: Create a detailed lesson plan with a specific focus on teaching the concept of sales, including the various aspects of the sales process such as lead generation, prospecting, qualification, presentation, objection handling, closing techniques, and post-sales activities. The lesson should also cover the key skills required for successful sales professionals, such as communication, interpersonal skills, time management, and product knowledge. Additionally, the plan should include examples of real-life sales scenarios and role-plays to help students develop practical skills and gain a deeper understanding of the sales industry. Finally, the lesson plan should be designed to meet the needs of different learning styles, incorporating visual aids, interactive activities, and assessments to ensure maximum engagement and effectiveness for all learners. [/INST] ``` ## Training Dataset About 21k items of the following datasets were used. (mostly coding-like tasks were removed) - v1 & v2: [reverse-instruct_v1.json](https://huggingface.co/Philipp-Sc/mistral-7b-reverse-instruct/blob/main/reverse-instruct_v1.json) - v3: [reverse-instruct_v2.json](https://huggingface.co/Philipp-Sc/mistral-7b-reverse-instruct/blob/main/reverse-instruct_v2.json) The reverse instruct dataset has been compiled with entries from the following datasets: - [alpaca_gpt4_data](https://raw.githubusercontent.com/Instruction-Tuning-with-GPT-4/GPT-4-LLM/main/data/alpaca_gpt4_data.json) - [roleplay-instruct-v2.1](https://raw.githubusercontent.com/teknium1/GPTeacher/main/Roleplay%20Supplemental/roleplay-instruct-v2.1.json) - [wizardlm_orca](https://huggingface.co/datasets/pankajmathur/WizardLM_Orca/resolve/main/wizardlm_orca.json) - [toxic-dpo-v0.1](https://huggingface.co/datasets/unalignment/toxic-dpo-v0.1/resolve/main/toxic-dpo.parquet) - [orca_dpo_pairs](https://huggingface.co/datasets/Intel/orca_dpo_pairs/resolve/main/orca_rlhf.jsonl) - [occultexpert](https://huggingface.co/datasets/teknium/trismegistus-project/resolve/main/occultexpert.json) ## Training Procedure ```bash !cd LLaMA-Factory && WANDB_DISABLED=True PYTORCH_CUDA_ALLOC_CONF=max_split_size_mb:256 accelerate launch \ --multi_gpu \ --mixed_precision fp16 \ --num_processes 2 \ --num_machines 1 \ --rdzv_backend static \ --same_network \ --gpu_ids all \ --machine_rank 0 \ --main_training_function main \ -- src/train_bash.py \ --stage sft \ --model_name_or_path mistralai/Mistral-7B-Instruct-v0.2 \ --adapter_name_or_path path_to_checkpoint \ --flash_attn \ --neftune_noise_alpha 5 \ --do_train \ --dataset default \ --template vanilla \ --finetuning_type lora \ --lora_target q_proj,v_proj \ --output_dir path_to_sft_checkpoint \ --overwrite_cache \ --per_device_train_batch_size 1 \ --gradient_accumulation_steps 1 \ --lr_scheduler_type cosine \ --logging_steps 10 \ --save_steps 10 \ --save_total_limit 3 \ --learning_rate 5e-5 \ --num_train_epochs 9.0 \ --plot_loss \ --fp16 \ --overwrite_output_dir \ --cutoff_len 4096 \ --quantization_bit 4 ``` ## Training Time - v1: ~12h on Kaggle's P100 GPU - v2: >30h on Kaggle's T4 x2 - v3: >40h on Kaggle's T4 x2 ## Loss - v3: ```bash {'loss': 0.4424, 'learning_rate': 4.8398000023144565e-05, 'epoch': 1.03} ``` ### Framework versions - LLaMA-Factory
[ "SUMMARIZATION" ]
[ "CRAFT" ]
fishbone64/gte-Qwen2-7B-instruct-Q8_0-GGUF
fishbone64
sentence-similarity
[ "sentence-transformers", "gguf", "mteb", "transformers", "Qwen2", "sentence-similarity", "llama-cpp", "gguf-my-repo", "base_model:Alibaba-NLP/gte-Qwen2-7B-instruct", "base_model:quantized:Alibaba-NLP/gte-Qwen2-7B-instruct", "license:apache-2.0", "model-index", "autotrain_compatible", "endpoints_compatible", "region:us", "conversational" ]
2024-06-26T17:02:19
2024-06-26T17:03:03
105
2
--- base_model: Alibaba-NLP/gte-Qwen2-7B-instruct license: apache-2.0 tags: - mteb - sentence-transformers - transformers - Qwen2 - sentence-similarity - llama-cpp - gguf-my-repo model-index: - name: gte-qwen2-7B-instruct results: - task: type: Classification dataset: name: MTEB AmazonCounterfactualClassification (en) type: mteb/amazon_counterfactual config: en split: test revision: e8379541af4e31359cca9fbcf4b00f2671dba205 metrics: - type: accuracy value: 91.31343283582089 - type: ap value: 67.64251402604096 - type: f1 value: 87.53372530755692 - task: type: Classification dataset: name: MTEB AmazonPolarityClassification type: mteb/amazon_polarity config: default split: test revision: e2d317d38cd51312af73b3d32a06d1a08b442046 metrics: - type: accuracy value: 97.497825 - type: ap value: 96.30329547047529 - type: f1 value: 97.49769793778039 - task: type: Classification dataset: name: MTEB AmazonReviewsClassification (en) type: mteb/amazon_reviews_multi config: en split: test revision: 1399c76144fd37290681b995c656ef9b2e06e26d metrics: - type: accuracy value: 62.564 - type: f1 value: 60.975777935041066 - task: type: Retrieval dataset: name: MTEB ArguAna type: mteb/arguana config: default split: test revision: c22ab2a51041ffd869aaddef7af8d8215647e41a metrics: - type: map_at_1 value: 36.486000000000004 - type: map_at_10 value: 54.842 - type: map_at_100 value: 55.206999999999994 - type: map_at_1000 value: 55.206999999999994 - type: map_at_3 value: 49.893 - type: map_at_5 value: 53.105000000000004 - type: mrr_at_1 value: 37.34 - type: mrr_at_10 value: 55.143 - type: mrr_at_100 value: 55.509 - type: mrr_at_1000 value: 55.509 - type: mrr_at_3 value: 50.212999999999994 - type: mrr_at_5 value: 53.432 - type: ndcg_at_1 value: 36.486000000000004 - type: ndcg_at_10 value: 64.273 - type: ndcg_at_100 value: 65.66199999999999 - type: ndcg_at_1000 value: 65.66199999999999 - type: ndcg_at_3 value: 54.352999999999994 - type: ndcg_at_5 value: 60.131 - type: precision_at_1 value: 36.486000000000004 - type: precision_at_10 value: 9.395000000000001 - type: precision_at_100 value: 0.996 - type: precision_at_1000 value: 0.1 - type: precision_at_3 value: 22.428 - type: precision_at_5 value: 16.259 - type: recall_at_1 value: 36.486000000000004 - type: recall_at_10 value: 93.95400000000001 - type: recall_at_100 value: 99.644 - type: recall_at_1000 value: 99.644 - type: recall_at_3 value: 67.283 - type: recall_at_5 value: 81.294 - task: type: Clustering dataset: name: MTEB ArxivClusteringP2P type: mteb/arxiv-clustering-p2p config: default split: test revision: a122ad7f3f0291bf49cc6f4d32aa80929df69d5d metrics: - type: v_measure value: 56.461169803700564 - task: type: Clustering dataset: name: MTEB ArxivClusteringS2S type: mteb/arxiv-clustering-s2s config: default split: test revision: f910caf1a6075f7329cdf8c1a6135696f37dbd53 metrics: - type: v_measure value: 51.73600434466286 - task: type: Reranking dataset: name: MTEB AskUbuntuDupQuestions type: mteb/askubuntudupquestions-reranking config: default split: test revision: 2000358ca161889fa9c082cb41daa8dcfb161a54 metrics: - type: map value: 67.57827065898053 - type: mrr value: 79.08136569493911 - task: type: STS dataset: name: MTEB BIOSSES type: mteb/biosses-sts config: default split: test revision: d3fb88f8f02e40887cd149695127462bbcf29b4a metrics: - type: cos_sim_pearson value: 83.53324575999243 - type: cos_sim_spearman value: 81.37173362822374 - type: euclidean_pearson value: 82.19243335103444 - type: euclidean_spearman value: 81.33679307304334 - type: manhattan_pearson value: 82.38752665975699 - type: manhattan_spearman value: 81.31510583189689 - task: type: Classification dataset: name: MTEB Banking77Classification type: mteb/banking77 config: default split: test revision: 0fd18e25b25c072e09e0d92ab615fda904d66300 metrics: - type: accuracy value: 87.56818181818181 - type: f1 value: 87.25826722019875 - task: type: Clustering dataset: name: MTEB BiorxivClusteringP2P type: mteb/biorxiv-clustering-p2p config: default split: test revision: 65b79d1d13f80053f67aca9498d9402c2d9f1f40 metrics: - type: v_measure value: 50.09239610327673 - task: type: Clustering dataset: name: MTEB BiorxivClusteringS2S type: mteb/biorxiv-clustering-s2s config: default split: test revision: 258694dd0231531bc1fd9de6ceb52a0853c6d908 metrics: - type: v_measure value: 46.64733054606282 - task: type: Retrieval dataset: name: MTEB CQADupstackAndroidRetrieval type: BeIR/cqadupstack config: default split: test revision: f46a197baaae43b4f621051089b82a364682dfeb metrics: - type: map_at_1 value: 33.997 - type: map_at_10 value: 48.176 - type: map_at_100 value: 49.82 - type: map_at_1000 value: 49.924 - type: map_at_3 value: 43.626 - type: map_at_5 value: 46.275 - type: mrr_at_1 value: 42.059999999999995 - type: mrr_at_10 value: 53.726 - type: mrr_at_100 value: 54.398 - type: mrr_at_1000 value: 54.416 - type: mrr_at_3 value: 50.714999999999996 - type: mrr_at_5 value: 52.639 - type: ndcg_at_1 value: 42.059999999999995 - type: ndcg_at_10 value: 55.574999999999996 - type: ndcg_at_100 value: 60.744 - type: ndcg_at_1000 value: 61.85699999999999 - type: ndcg_at_3 value: 49.363 - type: ndcg_at_5 value: 52.44 - type: precision_at_1 value: 42.059999999999995 - type: precision_at_10 value: 11.101999999999999 - type: precision_at_100 value: 1.73 - type: precision_at_1000 value: 0.218 - type: precision_at_3 value: 24.464 - type: precision_at_5 value: 18.026 - type: recall_at_1 value: 33.997 - type: recall_at_10 value: 70.35900000000001 - type: recall_at_100 value: 91.642 - type: recall_at_1000 value: 97.977 - type: recall_at_3 value: 52.76 - type: recall_at_5 value: 61.148 - task: type: Retrieval dataset: name: MTEB CQADupstackEnglishRetrieval type: BeIR/cqadupstack config: default split: test revision: ad9991cb51e31e31e430383c75ffb2885547b5f0 metrics: - type: map_at_1 value: 35.884 - type: map_at_10 value: 48.14 - type: map_at_100 value: 49.5 - type: map_at_1000 value: 49.63 - type: map_at_3 value: 44.646 - type: map_at_5 value: 46.617999999999995 - type: mrr_at_1 value: 44.458999999999996 - type: mrr_at_10 value: 53.751000000000005 - type: mrr_at_100 value: 54.37800000000001 - type: mrr_at_1000 value: 54.415 - type: mrr_at_3 value: 51.815 - type: mrr_at_5 value: 52.882 - type: ndcg_at_1 value: 44.458999999999996 - type: ndcg_at_10 value: 54.157 - type: ndcg_at_100 value: 58.362 - type: ndcg_at_1000 value: 60.178 - type: ndcg_at_3 value: 49.661 - type: ndcg_at_5 value: 51.74999999999999 - type: precision_at_1 value: 44.458999999999996 - type: precision_at_10 value: 10.248 - type: precision_at_100 value: 1.5890000000000002 - type: precision_at_1000 value: 0.207 - type: precision_at_3 value: 23.928 - type: precision_at_5 value: 16.878999999999998 - type: recall_at_1 value: 35.884 - type: recall_at_10 value: 64.798 - type: recall_at_100 value: 82.345 - type: recall_at_1000 value: 93.267 - type: recall_at_3 value: 51.847 - type: recall_at_5 value: 57.601 - task: type: Retrieval dataset: name: MTEB CQADupstackGamingRetrieval type: BeIR/cqadupstack config: default split: test revision: 4885aa143210c98657558c04aaf3dc47cfb54340 metrics: - type: map_at_1 value: 39.383 - type: map_at_10 value: 53.714 - type: map_at_100 value: 54.838 - type: map_at_1000 value: 54.87800000000001 - type: map_at_3 value: 50.114999999999995 - type: map_at_5 value: 52.153000000000006 - type: mrr_at_1 value: 45.016 - type: mrr_at_10 value: 56.732000000000006 - type: mrr_at_100 value: 57.411 - type: mrr_at_1000 value: 57.431 - type: mrr_at_3 value: 54.044000000000004 - type: mrr_at_5 value: 55.639 - type: ndcg_at_1 value: 45.016 - type: ndcg_at_10 value: 60.228 - type: ndcg_at_100 value: 64.277 - type: ndcg_at_1000 value: 65.07 - type: ndcg_at_3 value: 54.124 - type: ndcg_at_5 value: 57.147000000000006 - type: precision_at_1 value: 45.016 - type: precision_at_10 value: 9.937 - type: precision_at_100 value: 1.288 - type: precision_at_1000 value: 0.13899999999999998 - type: precision_at_3 value: 24.471999999999998 - type: precision_at_5 value: 16.991 - type: recall_at_1 value: 39.383 - type: recall_at_10 value: 76.175 - type: recall_at_100 value: 93.02 - type: recall_at_1000 value: 98.60900000000001 - type: recall_at_3 value: 60.265 - type: recall_at_5 value: 67.46600000000001 - task: type: Retrieval dataset: name: MTEB CQADupstackGisRetrieval type: BeIR/cqadupstack config: default split: test revision: 5003b3064772da1887988e05400cf3806fe491f2 metrics: - type: map_at_1 value: 27.426000000000002 - type: map_at_10 value: 37.397000000000006 - type: map_at_100 value: 38.61 - type: map_at_1000 value: 38.678000000000004 - type: map_at_3 value: 34.150999999999996 - type: map_at_5 value: 36.137 - type: mrr_at_1 value: 29.944 - type: mrr_at_10 value: 39.654 - type: mrr_at_100 value: 40.638000000000005 - type: mrr_at_1000 value: 40.691 - type: mrr_at_3 value: 36.817 - type: mrr_at_5 value: 38.524 - type: ndcg_at_1 value: 29.944 - type: ndcg_at_10 value: 43.094 - type: ndcg_at_100 value: 48.789 - type: ndcg_at_1000 value: 50.339999999999996 - type: ndcg_at_3 value: 36.984 - type: ndcg_at_5 value: 40.248 - type: precision_at_1 value: 29.944 - type: precision_at_10 value: 6.78 - type: precision_at_100 value: 1.024 - type: precision_at_1000 value: 0.11800000000000001 - type: precision_at_3 value: 15.895000000000001 - type: precision_at_5 value: 11.39 - type: recall_at_1 value: 27.426000000000002 - type: recall_at_10 value: 58.464000000000006 - type: recall_at_100 value: 84.193 - type: recall_at_1000 value: 95.52000000000001 - type: recall_at_3 value: 42.172 - type: recall_at_5 value: 50.101 - task: type: Retrieval dataset: name: MTEB CQADupstackMathematicaRetrieval type: BeIR/cqadupstack config: default split: test revision: 90fceea13679c63fe563ded68f3b6f06e50061de metrics: - type: map_at_1 value: 19.721 - type: map_at_10 value: 31.604 - type: map_at_100 value: 32.972 - type: map_at_1000 value: 33.077 - type: map_at_3 value: 27.218999999999998 - type: map_at_5 value: 29.53 - type: mrr_at_1 value: 25.0 - type: mrr_at_10 value: 35.843 - type: mrr_at_100 value: 36.785000000000004 - type: mrr_at_1000 value: 36.842000000000006 - type: mrr_at_3 value: 32.193 - type: mrr_at_5 value: 34.264 - type: ndcg_at_1 value: 25.0 - type: ndcg_at_10 value: 38.606 - type: ndcg_at_100 value: 44.272 - type: ndcg_at_1000 value: 46.527 - type: ndcg_at_3 value: 30.985000000000003 - type: ndcg_at_5 value: 34.43 - type: precision_at_1 value: 25.0 - type: precision_at_10 value: 7.811 - type: precision_at_100 value: 1.203 - type: precision_at_1000 value: 0.15 - type: precision_at_3 value: 15.423 - type: precision_at_5 value: 11.791 - type: recall_at_1 value: 19.721 - type: recall_at_10 value: 55.625 - type: recall_at_100 value: 79.34400000000001 - type: recall_at_1000 value: 95.208 - type: recall_at_3 value: 35.19 - type: recall_at_5 value: 43.626 - task: type: Retrieval dataset: name: MTEB CQADupstackPhysicsRetrieval type: BeIR/cqadupstack config: default split: test revision: 79531abbd1fb92d06c6d6315a0cbbbf5bb247ea4 metrics: - type: map_at_1 value: 33.784 - type: map_at_10 value: 47.522 - type: map_at_100 value: 48.949999999999996 - type: map_at_1000 value: 49.038 - type: map_at_3 value: 43.284 - type: map_at_5 value: 45.629 - type: mrr_at_1 value: 41.482 - type: mrr_at_10 value: 52.830999999999996 - type: mrr_at_100 value: 53.559999999999995 - type: mrr_at_1000 value: 53.588 - type: mrr_at_3 value: 50.016000000000005 - type: mrr_at_5 value: 51.614000000000004 - type: ndcg_at_1 value: 41.482 - type: ndcg_at_10 value: 54.569 - type: ndcg_at_100 value: 59.675999999999995 - type: ndcg_at_1000 value: 60.989000000000004 - type: ndcg_at_3 value: 48.187000000000005 - type: ndcg_at_5 value: 51.183 - type: precision_at_1 value: 41.482 - type: precision_at_10 value: 10.221 - type: precision_at_100 value: 1.486 - type: precision_at_1000 value: 0.17500000000000002 - type: precision_at_3 value: 23.548 - type: precision_at_5 value: 16.805 - type: recall_at_1 value: 33.784 - type: recall_at_10 value: 69.798 - type: recall_at_100 value: 90.098 - type: recall_at_1000 value: 98.176 - type: recall_at_3 value: 52.127 - type: recall_at_5 value: 59.861 - task: type: Retrieval dataset: name: MTEB CQADupstackProgrammersRetrieval type: BeIR/cqadupstack config: default split: test revision: 6184bc1440d2dbc7612be22b50686b8826d22b32 metrics: - type: map_at_1 value: 28.038999999999998 - type: map_at_10 value: 41.904 - type: map_at_100 value: 43.36 - type: map_at_1000 value: 43.453 - type: map_at_3 value: 37.785999999999994 - type: map_at_5 value: 40.105000000000004 - type: mrr_at_1 value: 35.046 - type: mrr_at_10 value: 46.926 - type: mrr_at_100 value: 47.815000000000005 - type: mrr_at_1000 value: 47.849000000000004 - type: mrr_at_3 value: 44.273 - type: mrr_at_5 value: 45.774 - type: ndcg_at_1 value: 35.046 - type: ndcg_at_10 value: 48.937000000000005 - type: ndcg_at_100 value: 54.544000000000004 - type: ndcg_at_1000 value: 56.069 - type: ndcg_at_3 value: 42.858000000000004 - type: ndcg_at_5 value: 45.644 - type: precision_at_1 value: 35.046 - type: precision_at_10 value: 9.452 - type: precision_at_100 value: 1.429 - type: precision_at_1000 value: 0.173 - type: precision_at_3 value: 21.346999999999998 - type: precision_at_5 value: 15.342 - type: recall_at_1 value: 28.038999999999998 - type: recall_at_10 value: 64.59700000000001 - type: recall_at_100 value: 87.735 - type: recall_at_1000 value: 97.41300000000001 - type: recall_at_3 value: 47.368 - type: recall_at_5 value: 54.93900000000001 - task: type: Retrieval dataset: name: MTEB CQADupstackRetrieval type: BeIR/cqadupstack config: default split: test revision: 4ffe81d471b1924886b33c7567bfb200e9eec5c4 metrics: - type: map_at_1 value: 28.17291666666667 - type: map_at_10 value: 40.025749999999995 - type: map_at_100 value: 41.39208333333333 - type: map_at_1000 value: 41.499249999999996 - type: map_at_3 value: 36.347 - type: map_at_5 value: 38.41391666666667 - type: mrr_at_1 value: 33.65925 - type: mrr_at_10 value: 44.085499999999996 - type: mrr_at_100 value: 44.94116666666667 - type: mrr_at_1000 value: 44.9855 - type: mrr_at_3 value: 41.2815 - type: mrr_at_5 value: 42.91491666666666 - type: ndcg_at_1 value: 33.65925 - type: ndcg_at_10 value: 46.430833333333325 - type: ndcg_at_100 value: 51.761 - type: ndcg_at_1000 value: 53.50899999999999 - type: ndcg_at_3 value: 40.45133333333333 - type: ndcg_at_5 value: 43.31483333333334 - type: precision_at_1 value: 33.65925 - type: precision_at_10 value: 8.4995 - type: precision_at_100 value: 1.3210000000000004 - type: precision_at_1000 value: 0.16591666666666666 - type: precision_at_3 value: 19.165083333333335 - type: precision_at_5 value: 13.81816666666667 - type: recall_at_1 value: 28.17291666666667 - type: recall_at_10 value: 61.12624999999999 - type: recall_at_100 value: 83.97266666666667 - type: recall_at_1000 value: 95.66550000000001 - type: recall_at_3 value: 44.661249999999995 - type: recall_at_5 value: 51.983333333333334 - type: map_at_1 value: 17.936 - type: map_at_10 value: 27.399 - type: map_at_100 value: 28.632 - type: map_at_1000 value: 28.738000000000003 - type: map_at_3 value: 24.456 - type: map_at_5 value: 26.06 - type: mrr_at_1 value: 19.224 - type: mrr_at_10 value: 28.998 - type: mrr_at_100 value: 30.11 - type: mrr_at_1000 value: 30.177 - type: mrr_at_3 value: 26.247999999999998 - type: mrr_at_5 value: 27.708 - type: ndcg_at_1 value: 19.224 - type: ndcg_at_10 value: 32.911 - type: ndcg_at_100 value: 38.873999999999995 - type: ndcg_at_1000 value: 41.277 - type: ndcg_at_3 value: 27.142 - type: ndcg_at_5 value: 29.755 - type: precision_at_1 value: 19.224 - type: precision_at_10 value: 5.6930000000000005 - type: precision_at_100 value: 0.9259999999999999 - type: precision_at_1000 value: 0.126 - type: precision_at_3 value: 12.138 - type: precision_at_5 value: 8.909 - type: recall_at_1 value: 17.936 - type: recall_at_10 value: 48.096 - type: recall_at_100 value: 75.389 - type: recall_at_1000 value: 92.803 - type: recall_at_3 value: 32.812999999999995 - type: recall_at_5 value: 38.851 - task: type: Retrieval dataset: name: MTEB CQADupstackStatsRetrieval type: BeIR/cqadupstack config: default split: test revision: 65ac3a16b8e91f9cee4c9828cc7c335575432a2a metrics: - type: map_at_1 value: 24.681 - type: map_at_10 value: 34.892 - type: map_at_100 value: 35.996 - type: map_at_1000 value: 36.083 - type: map_at_3 value: 31.491999999999997 - type: map_at_5 value: 33.632 - type: mrr_at_1 value: 28.528 - type: mrr_at_10 value: 37.694 - type: mrr_at_100 value: 38.613 - type: mrr_at_1000 value: 38.668 - type: mrr_at_3 value: 34.714 - type: mrr_at_5 value: 36.616 - type: ndcg_at_1 value: 28.528 - type: ndcg_at_10 value: 40.703 - type: ndcg_at_100 value: 45.993 - type: ndcg_at_1000 value: 47.847 - type: ndcg_at_3 value: 34.622 - type: ndcg_at_5 value: 38.035999999999994 - type: precision_at_1 value: 28.528 - type: precision_at_10 value: 6.902 - type: precision_at_100 value: 1.0370000000000001 - type: precision_at_1000 value: 0.126 - type: precision_at_3 value: 15.798000000000002 - type: precision_at_5 value: 11.655999999999999 - type: recall_at_1 value: 24.681 - type: recall_at_10 value: 55.81 - type: recall_at_100 value: 79.785 - type: recall_at_1000 value: 92.959 - type: recall_at_3 value: 39.074 - type: recall_at_5 value: 47.568 - task: type: Retrieval dataset: name: MTEB CQADupstackTexRetrieval type: BeIR/cqadupstack config: default split: test revision: 46989137a86843e03a6195de44b09deda022eec7 metrics: - type: map_at_1 value: 18.627 - type: map_at_10 value: 27.872000000000003 - type: map_at_100 value: 29.237999999999996 - type: map_at_1000 value: 29.363 - type: map_at_3 value: 24.751 - type: map_at_5 value: 26.521 - type: mrr_at_1 value: 23.021 - type: mrr_at_10 value: 31.924000000000003 - type: mrr_at_100 value: 32.922000000000004 - type: mrr_at_1000 value: 32.988 - type: mrr_at_3 value: 29.192 - type: mrr_at_5 value: 30.798 - type: ndcg_at_1 value: 23.021 - type: ndcg_at_10 value: 33.535 - type: ndcg_at_100 value: 39.732 - type: ndcg_at_1000 value: 42.201 - type: ndcg_at_3 value: 28.153 - type: ndcg_at_5 value: 30.746000000000002 - type: precision_at_1 value: 23.021 - type: precision_at_10 value: 6.459 - type: precision_at_100 value: 1.1320000000000001 - type: precision_at_1000 value: 0.153 - type: precision_at_3 value: 13.719000000000001 - type: precision_at_5 value: 10.193000000000001 - type: recall_at_1 value: 18.627 - type: recall_at_10 value: 46.463 - type: recall_at_100 value: 74.226 - type: recall_at_1000 value: 91.28500000000001 - type: recall_at_3 value: 31.357000000000003 - type: recall_at_5 value: 38.067 - task: type: Retrieval dataset: name: MTEB CQADupstackUnixRetrieval type: BeIR/cqadupstack config: default split: test revision: 6c6430d3a6d36f8d2a829195bc5dc94d7e063e53 metrics: - type: map_at_1 value: 31.457 - type: map_at_10 value: 42.888 - type: map_at_100 value: 44.24 - type: map_at_1000 value: 44.327 - type: map_at_3 value: 39.588 - type: map_at_5 value: 41.423 - type: mrr_at_1 value: 37.126999999999995 - type: mrr_at_10 value: 47.083000000000006 - type: mrr_at_100 value: 47.997 - type: mrr_at_1000 value: 48.044 - type: mrr_at_3 value: 44.574000000000005 - type: mrr_at_5 value: 46.202 - type: ndcg_at_1 value: 37.126999999999995 - type: ndcg_at_10 value: 48.833 - type: ndcg_at_100 value: 54.327000000000005 - type: ndcg_at_1000 value: 56.011 - type: ndcg_at_3 value: 43.541999999999994 - type: ndcg_at_5 value: 46.127 - type: precision_at_1 value: 37.126999999999995 - type: precision_at_10 value: 8.376999999999999 - type: precision_at_100 value: 1.2309999999999999 - type: precision_at_1000 value: 0.146 - type: precision_at_3 value: 20.211000000000002 - type: precision_at_5 value: 14.16 - type: recall_at_1 value: 31.457 - type: recall_at_10 value: 62.369 - type: recall_at_100 value: 85.444 - type: recall_at_1000 value: 96.65599999999999 - type: recall_at_3 value: 47.961 - type: recall_at_5 value: 54.676 - task: type: Retrieval dataset: name: MTEB CQADupstackWebmastersRetrieval type: BeIR/cqadupstack config: default split: test revision: 160c094312a0e1facb97e55eeddb698c0abe3571 metrics: - type: map_at_1 value: 27.139999999999997 - type: map_at_10 value: 38.801 - type: map_at_100 value: 40.549 - type: map_at_1000 value: 40.802 - type: map_at_3 value: 35.05 - type: map_at_5 value: 36.884 - type: mrr_at_1 value: 33.004 - type: mrr_at_10 value: 43.864 - type: mrr_at_100 value: 44.667 - type: mrr_at_1000 value: 44.717 - type: mrr_at_3 value: 40.777 - type: mrr_at_5 value: 42.319 - type: ndcg_at_1 value: 33.004 - type: ndcg_at_10 value: 46.022 - type: ndcg_at_100 value: 51.542 - type: ndcg_at_1000 value: 53.742000000000004 - type: ndcg_at_3 value: 39.795 - type: ndcg_at_5 value: 42.272 - type: precision_at_1 value: 33.004 - type: precision_at_10 value: 9.012 - type: precision_at_100 value: 1.7770000000000001 - type: precision_at_1000 value: 0.26 - type: precision_at_3 value: 19.038 - type: precision_at_5 value: 13.675999999999998 - type: recall_at_1 value: 27.139999999999997 - type: recall_at_10 value: 60.961 - type: recall_at_100 value: 84.451 - type: recall_at_1000 value: 98.113 - type: recall_at_3 value: 43.001 - type: recall_at_5 value: 49.896 - task: type: Retrieval dataset: name: MTEB ClimateFEVER type: mteb/climate-fever config: default split: test revision: 47f2ac6acb640fc46020b02a5b59fdda04d39380 metrics: - type: map_at_1 value: 22.076999999999998 - type: map_at_10 value: 35.44 - type: map_at_100 value: 37.651 - type: map_at_1000 value: 37.824999999999996 - type: map_at_3 value: 30.764999999999997 - type: map_at_5 value: 33.26 - type: mrr_at_1 value: 50.163000000000004 - type: mrr_at_10 value: 61.207 - type: mrr_at_100 value: 61.675000000000004 - type: mrr_at_1000 value: 61.692 - type: mrr_at_3 value: 58.60999999999999 - type: mrr_at_5 value: 60.307 - type: ndcg_at_1 value: 50.163000000000004 - type: ndcg_at_10 value: 45.882 - type: ndcg_at_100 value: 53.239999999999995 - type: ndcg_at_1000 value: 55.852000000000004 - type: ndcg_at_3 value: 40.514 - type: ndcg_at_5 value: 42.038 - type: precision_at_1 value: 50.163000000000004 - type: precision_at_10 value: 13.466000000000001 - type: precision_at_100 value: 2.164 - type: precision_at_1000 value: 0.266 - type: precision_at_3 value: 29.707 - type: precision_at_5 value: 21.694 - type: recall_at_1 value: 22.076999999999998 - type: recall_at_10 value: 50.193 - type: recall_at_100 value: 74.993 - type: recall_at_1000 value: 89.131 - type: recall_at_3 value: 35.472 - type: recall_at_5 value: 41.814 - task: type: Retrieval dataset: name: MTEB DBPedia type: mteb/dbpedia config: default split: test revision: c0f706b76e590d620bd6618b3ca8efdd34e2d659 metrics: - type: map_at_1 value: 9.953 - type: map_at_10 value: 24.515 - type: map_at_100 value: 36.173 - type: map_at_1000 value: 38.351 - type: map_at_3 value: 16.592000000000002 - type: map_at_5 value: 20.036 - type: mrr_at_1 value: 74.25 - type: mrr_at_10 value: 81.813 - type: mrr_at_100 value: 82.006 - type: mrr_at_1000 value: 82.011 - type: mrr_at_3 value: 80.875 - type: mrr_at_5 value: 81.362 - type: ndcg_at_1 value: 62.5 - type: ndcg_at_10 value: 52.42 - type: ndcg_at_100 value: 56.808 - type: ndcg_at_1000 value: 63.532999999999994 - type: ndcg_at_3 value: 56.654 - type: ndcg_at_5 value: 54.18300000000001 - type: precision_at_1 value: 74.25 - type: precision_at_10 value: 42.699999999999996 - type: precision_at_100 value: 13.675 - type: precision_at_1000 value: 2.664 - type: precision_at_3 value: 60.5 - type: precision_at_5 value: 52.800000000000004 - type: recall_at_1 value: 9.953 - type: recall_at_10 value: 30.253999999999998 - type: recall_at_100 value: 62.516000000000005 - type: recall_at_1000 value: 84.163 - type: recall_at_3 value: 18.13 - type: recall_at_5 value: 22.771 - task: type: Classification dataset: name: MTEB EmotionClassification type: mteb/emotion config: default split: test revision: 4f58c6b202a23cf9a4da393831edf4f9183cad37 metrics: - type: accuracy value: 79.455 - type: f1 value: 74.16798697647569 - task: type: Retrieval dataset: name: MTEB FEVER type: mteb/fever config: default split: test revision: bea83ef9e8fb933d90a2f1d5515737465d613e12 metrics: - type: map_at_1 value: 87.531 - type: map_at_10 value: 93.16799999999999 - type: map_at_100 value: 93.341 - type: map_at_1000 value: 93.349 - type: map_at_3 value: 92.444 - type: map_at_5 value: 92.865 - type: mrr_at_1 value: 94.014 - type: mrr_at_10 value: 96.761 - type: mrr_at_100 value: 96.762 - type: mrr_at_1000 value: 96.762 - type: mrr_at_3 value: 96.672 - type: mrr_at_5 value: 96.736 - type: ndcg_at_1 value: 94.014 - type: ndcg_at_10 value: 95.112 - type: ndcg_at_100 value: 95.578 - type: ndcg_at_1000 value: 95.68900000000001 - type: ndcg_at_3 value: 94.392 - type: ndcg_at_5 value: 94.72500000000001 - type: precision_at_1 value: 94.014 - type: precision_at_10 value: 11.065 - type: precision_at_100 value: 1.157 - type: precision_at_1000 value: 0.11800000000000001 - type: precision_at_3 value: 35.259 - type: precision_at_5 value: 21.599 - type: recall_at_1 value: 87.531 - type: recall_at_10 value: 97.356 - type: recall_at_100 value: 98.965 - type: recall_at_1000 value: 99.607 - type: recall_at_3 value: 95.312 - type: recall_at_5 value: 96.295 - task: type: Retrieval dataset: name: MTEB FiQA2018 type: mteb/fiqa config: default split: test revision: 27a168819829fe9bcd655c2df245fb19452e8e06 metrics: - type: map_at_1 value: 32.055 - type: map_at_10 value: 53.114 - type: map_at_100 value: 55.235 - type: map_at_1000 value: 55.345 - type: map_at_3 value: 45.854 - type: map_at_5 value: 50.025 - type: mrr_at_1 value: 60.34 - type: mrr_at_10 value: 68.804 - type: mrr_at_100 value: 69.309 - type: mrr_at_1000 value: 69.32199999999999 - type: mrr_at_3 value: 66.40899999999999 - type: mrr_at_5 value: 67.976 - type: ndcg_at_1 value: 60.34 - type: ndcg_at_10 value: 62.031000000000006 - type: ndcg_at_100 value: 68.00500000000001 - type: ndcg_at_1000 value: 69.286 - type: ndcg_at_3 value: 56.355999999999995 - type: ndcg_at_5 value: 58.687 - type: precision_at_1 value: 60.34 - type: precision_at_10 value: 17.176 - type: precision_at_100 value: 2.36 - type: precision_at_1000 value: 0.259 - type: precision_at_3 value: 37.14 - type: precision_at_5 value: 27.809 - type: recall_at_1 value: 32.055 - type: recall_at_10 value: 70.91 - type: recall_at_100 value: 91.83 - type: recall_at_1000 value: 98.871 - type: recall_at_3 value: 51.202999999999996 - type: recall_at_5 value: 60.563 - task: type: Retrieval dataset: name: MTEB HotpotQA type: mteb/hotpotqa config: default split: test revision: ab518f4d6fcca38d87c25209f94beba119d02014 metrics: - type: map_at_1 value: 43.68 - type: map_at_10 value: 64.389 - type: map_at_100 value: 65.24 - type: map_at_1000 value: 65.303 - type: map_at_3 value: 61.309000000000005 - type: map_at_5 value: 63.275999999999996 - type: mrr_at_1 value: 87.36 - type: mrr_at_10 value: 91.12 - type: mrr_at_100 value: 91.227 - type: mrr_at_1000 value: 91.229 - type: mrr_at_3 value: 90.57600000000001 - type: mrr_at_5 value: 90.912 - type: ndcg_at_1 value: 87.36 - type: ndcg_at_10 value: 73.076 - type: ndcg_at_100 value: 75.895 - type: ndcg_at_1000 value: 77.049 - type: ndcg_at_3 value: 68.929 - type: ndcg_at_5 value: 71.28 - type: precision_at_1 value: 87.36 - type: precision_at_10 value: 14.741000000000001 - type: precision_at_100 value: 1.694 - type: precision_at_1000 value: 0.185 - type: precision_at_3 value: 43.043 - type: precision_at_5 value: 27.681 - type: recall_at_1 value: 43.68 - type: recall_at_10 value: 73.707 - type: recall_at_100 value: 84.7 - type: recall_at_1000 value: 92.309 - type: recall_at_3 value: 64.564 - type: recall_at_5 value: 69.203 - task: type: Classification dataset: name: MTEB ImdbClassification type: mteb/imdb config: default split: test revision: 3d86128a09e091d6018b6d26cad27f2739fc2db7 metrics: - type: accuracy value: 96.75399999999999 - type: ap value: 95.29389839242187 - type: f1 value: 96.75348377433475 - task: type: Retrieval dataset: name: MTEB MSMARCO type: mteb/msmarco config: default split: dev revision: c5a29a104738b98a9e76336939199e264163d4a0 metrics: - type: map_at_1 value: 25.176 - type: map_at_10 value: 38.598 - type: map_at_100 value: 39.707 - type: map_at_1000 value: 39.744 - type: map_at_3 value: 34.566 - type: map_at_5 value: 36.863 - type: mrr_at_1 value: 25.874000000000002 - type: mrr_at_10 value: 39.214 - type: mrr_at_100 value: 40.251 - type: mrr_at_1000 value: 40.281 - type: mrr_at_3 value: 35.291 - type: mrr_at_5 value: 37.545 - type: ndcg_at_1 value: 25.874000000000002 - type: ndcg_at_10 value: 45.98 - type: ndcg_at_100 value: 51.197 - type: ndcg_at_1000 value: 52.073 - type: ndcg_at_3 value: 37.785999999999994 - type: ndcg_at_5 value: 41.870000000000005 - type: precision_at_1 value: 25.874000000000002 - type: precision_at_10 value: 7.181 - type: precision_at_100 value: 0.979 - type: precision_at_1000 value: 0.106 - type: precision_at_3 value: 16.051000000000002 - type: precision_at_5 value: 11.713 - type: recall_at_1 value: 25.176 - type: recall_at_10 value: 68.67699999999999 - type: recall_at_100 value: 92.55 - type: recall_at_1000 value: 99.164 - type: recall_at_3 value: 46.372 - type: recall_at_5 value: 56.16 - task: type: Classification dataset: name: MTEB MTOPDomainClassification (en) type: mteb/mtop_domain config: en split: test revision: d80d48c1eb48d3562165c59d59d0034df9fff0bf metrics: - type: accuracy value: 99.03784769721841 - type: f1 value: 98.97791641821495 - task: type: Classification dataset: name: MTEB MTOPIntentClassification (en) type: mteb/mtop_intent config: en split: test revision: ae001d0e6b1228650b7bd1c2c65fb50ad11a8aba metrics: - type: accuracy value: 91.88326493388054 - type: f1 value: 73.74809928034335 - task: type: Classification dataset: name: MTEB MassiveIntentClassification (en) type: mteb/amazon_massive_intent config: en split: test revision: 31efe3c427b0bae9c22cbb560b8f15491cc6bed7 metrics: - type: accuracy value: 85.41358439811701 - type: f1 value: 83.503679460639 - task: type: Classification dataset: name: MTEB MassiveScenarioClassification (en) type: mteb/amazon_massive_scenario config: en split: test revision: 7d571f92784cd94a019292a1f45445077d0ef634 metrics: - type: accuracy value: 89.77135171486215 - type: f1 value: 88.89843747468366 - task: type: Clustering dataset: name: MTEB MedrxivClusteringP2P type: mteb/medrxiv-clustering-p2p config: default split: test revision: e7a26af6f3ae46b30dde8737f02c07b1505bcc73 metrics: - type: v_measure value: 46.22695362087359 - task: type: Clustering dataset: name: MTEB MedrxivClusteringS2S type: mteb/medrxiv-clustering-s2s config: default split: test revision: 35191c8c0dca72d8ff3efcd72aa802307d469663 metrics: - type: v_measure value: 44.132372165849425 - task: type: Reranking dataset: name: MTEB MindSmallReranking type: mteb/mind_small config: default split: test revision: 3bdac13927fdc888b903db93b2ffdbd90b295a69 metrics: - type: map value: 33.35680810650402 - type: mrr value: 34.72625715637218 - task: type: Retrieval dataset: name: MTEB NFCorpus type: mteb/nfcorpus config: default split: test revision: ec0fa4fe99da2ff19ca1214b7966684033a58814 metrics: - type: map_at_1 value: 7.165000000000001 - type: map_at_10 value: 15.424 - type: map_at_100 value: 20.28 - type: map_at_1000 value: 22.065 - type: map_at_3 value: 11.236 - type: map_at_5 value: 13.025999999999998 - type: mrr_at_1 value: 51.702999999999996 - type: mrr_at_10 value: 59.965 - type: mrr_at_100 value: 60.667 - type: mrr_at_1000 value: 60.702999999999996 - type: mrr_at_3 value: 58.772000000000006 - type: mrr_at_5 value: 59.267 - type: ndcg_at_1 value: 49.536 - type: ndcg_at_10 value: 40.6 - type: ndcg_at_100 value: 37.848 - type: ndcg_at_1000 value: 46.657 - type: ndcg_at_3 value: 46.117999999999995 - type: ndcg_at_5 value: 43.619 - type: precision_at_1 value: 51.393 - type: precision_at_10 value: 30.31 - type: precision_at_100 value: 9.972 - type: precision_at_1000 value: 2.329 - type: precision_at_3 value: 43.137 - type: precision_at_5 value: 37.585 - type: recall_at_1 value: 7.165000000000001 - type: recall_at_10 value: 19.689999999999998 - type: recall_at_100 value: 39.237 - type: recall_at_1000 value: 71.417 - type: recall_at_3 value: 12.247 - type: recall_at_5 value: 14.902999999999999 - task: type: Retrieval dataset: name: MTEB NQ type: mteb/nq config: default split: test revision: b774495ed302d8c44a3a7ea25c90dbce03968f31 metrics: - type: map_at_1 value: 42.653999999999996 - type: map_at_10 value: 59.611999999999995 - type: map_at_100 value: 60.32300000000001 - type: map_at_1000 value: 60.336 - type: map_at_3 value: 55.584999999999994 - type: map_at_5 value: 58.19 - type: mrr_at_1 value: 47.683 - type: mrr_at_10 value: 62.06700000000001 - type: mrr_at_100 value: 62.537 - type: mrr_at_1000 value: 62.544999999999995 - type: mrr_at_3 value: 59.178 - type: mrr_at_5 value: 61.034 - type: ndcg_at_1 value: 47.654 - type: ndcg_at_10 value: 67.001 - type: ndcg_at_100 value: 69.73899999999999 - type: ndcg_at_1000 value: 69.986 - type: ndcg_at_3 value: 59.95700000000001 - type: ndcg_at_5 value: 64.025 - type: precision_at_1 value: 47.654 - type: precision_at_10 value: 10.367999999999999 - type: precision_at_100 value: 1.192 - type: precision_at_1000 value: 0.121 - type: precision_at_3 value: 26.651000000000003 - type: precision_at_5 value: 18.459 - type: recall_at_1 value: 42.653999999999996 - type: recall_at_10 value: 86.619 - type: recall_at_100 value: 98.04899999999999 - type: recall_at_1000 value: 99.812 - type: recall_at_3 value: 68.987 - type: recall_at_5 value: 78.158 - task: type: Retrieval dataset: name: MTEB QuoraRetrieval type: mteb/quora config: default split: test revision: None metrics: - type: map_at_1 value: 72.538 - type: map_at_10 value: 86.702 - type: map_at_100 value: 87.31 - type: map_at_1000 value: 87.323 - type: map_at_3 value: 83.87 - type: map_at_5 value: 85.682 - type: mrr_at_1 value: 83.31 - type: mrr_at_10 value: 89.225 - type: mrr_at_100 value: 89.30399999999999 - type: mrr_at_1000 value: 89.30399999999999 - type: mrr_at_3 value: 88.44300000000001 - type: mrr_at_5 value: 89.005 - type: ndcg_at_1 value: 83.32000000000001 - type: ndcg_at_10 value: 90.095 - type: ndcg_at_100 value: 91.12 - type: ndcg_at_1000 value: 91.179 - type: ndcg_at_3 value: 87.606 - type: ndcg_at_5 value: 89.031 - type: precision_at_1 value: 83.32000000000001 - type: precision_at_10 value: 13.641 - type: precision_at_100 value: 1.541 - type: precision_at_1000 value: 0.157 - type: precision_at_3 value: 38.377 - type: precision_at_5 value: 25.162000000000003 - type: recall_at_1 value: 72.538 - type: recall_at_10 value: 96.47200000000001 - type: recall_at_100 value: 99.785 - type: recall_at_1000 value: 99.99900000000001 - type: recall_at_3 value: 89.278 - type: recall_at_5 value: 93.367 - task: type: Clustering dataset: name: MTEB RedditClustering type: mteb/reddit-clustering config: default split: test revision: 24640382cdbf8abc73003fb0fa6d111a705499eb metrics: - type: v_measure value: 73.55219145406065 - task: type: Clustering dataset: name: MTEB RedditClusteringP2P type: mteb/reddit-clustering-p2p config: default split: test revision: 282350215ef01743dc01b456c7f5241fa8937f16 metrics: - type: v_measure value: 74.13437105242755 - task: type: Retrieval dataset: name: MTEB SCIDOCS type: mteb/scidocs config: default split: test revision: None metrics: - type: map_at_1 value: 6.873 - type: map_at_10 value: 17.944 - type: map_at_100 value: 21.171 - type: map_at_1000 value: 21.528 - type: map_at_3 value: 12.415 - type: map_at_5 value: 15.187999999999999 - type: mrr_at_1 value: 33.800000000000004 - type: mrr_at_10 value: 46.455 - type: mrr_at_100 value: 47.378 - type: mrr_at_1000 value: 47.394999999999996 - type: mrr_at_3 value: 42.367 - type: mrr_at_5 value: 44.972 - type: ndcg_at_1 value: 33.800000000000004 - type: ndcg_at_10 value: 28.907 - type: ndcg_at_100 value: 39.695 - type: ndcg_at_1000 value: 44.582 - type: ndcg_at_3 value: 26.949 - type: ndcg_at_5 value: 23.988 - type: precision_at_1 value: 33.800000000000004 - type: precision_at_10 value: 15.079999999999998 - type: precision_at_100 value: 3.056 - type: precision_at_1000 value: 0.42100000000000004 - type: precision_at_3 value: 25.167 - type: precision_at_5 value: 21.26 - type: recall_at_1 value: 6.873 - type: recall_at_10 value: 30.568 - type: recall_at_100 value: 62.062 - type: recall_at_1000 value: 85.37700000000001 - type: recall_at_3 value: 15.312999999999999 - type: recall_at_5 value: 21.575 - task: type: STS dataset: name: MTEB SICK-R type: mteb/sickr-sts config: default split: test revision: a6ea5a8cab320b040a23452cc28066d9beae2cee metrics: - type: cos_sim_pearson value: 82.37009118256057 - type: cos_sim_spearman value: 79.27986395671529 - type: euclidean_pearson value: 79.18037715442115 - type: euclidean_spearman value: 79.28004791561621 - type: manhattan_pearson value: 79.34062972800541 - type: manhattan_spearman value: 79.43106695543402 - task: type: STS dataset: name: MTEB STS12 type: mteb/sts12-sts config: default split: test revision: a0d554a64d88156834ff5ae9920b964011b16384 metrics: - type: cos_sim_pearson value: 87.48474767383833 - type: cos_sim_spearman value: 79.54505388752513 - type: euclidean_pearson value: 83.43282704179565 - type: euclidean_spearman value: 79.54579919925405 - type: manhattan_pearson value: 83.77564492427952 - type: manhattan_spearman value: 79.84558396989286 - task: type: STS dataset: name: MTEB STS13 type: mteb/sts13-sts config: default split: test revision: 7e90230a92c190f1bf69ae9002b8cea547a64cca metrics: - type: cos_sim_pearson value: 88.803698035802 - type: cos_sim_spearman value: 88.83451367754881 - type: euclidean_pearson value: 88.28939285711628 - type: euclidean_spearman value: 88.83528996073112 - type: manhattan_pearson value: 88.28017412671795 - type: manhattan_spearman value: 88.9228828016344 - task: type: STS dataset: name: MTEB STS14 type: mteb/sts14-sts config: default split: test revision: 6031580fec1f6af667f0bd2da0a551cf4f0b2375 metrics: - type: cos_sim_pearson value: 85.27469288153428 - type: cos_sim_spearman value: 83.87477064876288 - type: euclidean_pearson value: 84.2601737035379 - type: euclidean_spearman value: 83.87431082479074 - type: manhattan_pearson value: 84.3621547772745 - type: manhattan_spearman value: 84.12094375000423 - task: type: STS dataset: name: MTEB STS15 type: mteb/sts15-sts config: default split: test revision: ae752c7c21bf194d8b67fd573edf7ae58183cbe3 metrics: - type: cos_sim_pearson value: 88.12749863201587 - type: cos_sim_spearman value: 88.54287568368565 - type: euclidean_pearson value: 87.90429700607999 - type: euclidean_spearman value: 88.5437689576261 - type: manhattan_pearson value: 88.19276653356833 - type: manhattan_spearman value: 88.99995393814679 - task: type: STS dataset: name: MTEB STS16 type: mteb/sts16-sts config: default split: test revision: 4d8694f8f0e0100860b497b999b3dbed754a0513 metrics: - type: cos_sim_pearson value: 85.68398747560902 - type: cos_sim_spearman value: 86.48815303460574 - type: euclidean_pearson value: 85.52356631237954 - type: euclidean_spearman value: 86.486391949551 - type: manhattan_pearson value: 85.67267981761788 - type: manhattan_spearman value: 86.7073696332485 - task: type: STS dataset: name: MTEB STS17 (en-en) type: mteb/sts17-crosslingual-sts config: en-en split: test revision: af5e6fb845001ecf41f4c1e033ce921939a2a68d metrics: - type: cos_sim_pearson value: 88.9057107443124 - type: cos_sim_spearman value: 88.7312168757697 - type: euclidean_pearson value: 88.72810439714794 - type: euclidean_spearman value: 88.71976185854771 - type: manhattan_pearson value: 88.50433745949111 - type: manhattan_spearman value: 88.51726175544195 - task: type: STS dataset: name: MTEB STS22 (en) type: mteb/sts22-crosslingual-sts config: en split: test revision: eea2b4fe26a775864c896887d910b76a8098ad3f metrics: - type: cos_sim_pearson value: 67.59391795109886 - type: cos_sim_spearman value: 66.87613008631367 - type: euclidean_pearson value: 69.23198488262217 - type: euclidean_spearman value: 66.85427723013692 - type: manhattan_pearson value: 69.50730124841084 - type: manhattan_spearman value: 67.10404669820792 - task: type: STS dataset: name: MTEB STSBenchmark type: mteb/stsbenchmark-sts config: default split: test revision: b0fddb56ed78048fa8b90373c8a3cfc37b684831 metrics: - type: cos_sim_pearson value: 87.0820605344619 - type: cos_sim_spearman value: 86.8518089863434 - type: euclidean_pearson value: 86.31087134689284 - type: euclidean_spearman value: 86.8518520517941 - type: manhattan_pearson value: 86.47203796160612 - type: manhattan_spearman value: 87.1080149734421 - task: type: Reranking dataset: name: MTEB SciDocsRR type: mteb/scidocs-reranking config: default split: test revision: d3c5e1fc0b855ab6097bf1cda04dd73947d7caab metrics: - type: map value: 89.09255369305481 - type: mrr value: 97.10323445617563 - task: type: Retrieval dataset: name: MTEB SciFact type: mteb/scifact config: default split: test revision: 0228b52cf27578f30900b9e5271d331663a030d7 metrics: - type: map_at_1 value: 61.260999999999996 - type: map_at_10 value: 74.043 - type: map_at_100 value: 74.37700000000001 - type: map_at_1000 value: 74.384 - type: map_at_3 value: 71.222 - type: map_at_5 value: 72.875 - type: mrr_at_1 value: 64.333 - type: mrr_at_10 value: 74.984 - type: mrr_at_100 value: 75.247 - type: mrr_at_1000 value: 75.25500000000001 - type: mrr_at_3 value: 73.167 - type: mrr_at_5 value: 74.35000000000001 - type: ndcg_at_1 value: 64.333 - type: ndcg_at_10 value: 79.06 - type: ndcg_at_100 value: 80.416 - type: ndcg_at_1000 value: 80.55600000000001 - type: ndcg_at_3 value: 74.753 - type: ndcg_at_5 value: 76.97500000000001 - type: precision_at_1 value: 64.333 - type: precision_at_10 value: 10.567 - type: precision_at_100 value: 1.1199999999999999 - type: precision_at_1000 value: 0.11299999999999999 - type: precision_at_3 value: 29.889 - type: precision_at_5 value: 19.533 - type: recall_at_1 value: 61.260999999999996 - type: recall_at_10 value: 93.167 - type: recall_at_100 value: 99.0 - type: recall_at_1000 value: 100.0 - type: recall_at_3 value: 81.667 - type: recall_at_5 value: 87.394 - task: type: PairClassification dataset: name: MTEB SprintDuplicateQuestions type: mteb/sprintduplicatequestions-pairclassification config: default split: test revision: d66bd1f72af766a5cc4b0ca5e00c162f89e8cc46 metrics: - type: cos_sim_accuracy value: 99.71980198019801 - type: cos_sim_ap value: 92.81616007802704 - type: cos_sim_f1 value: 85.17548454688318 - type: cos_sim_precision value: 89.43894389438944 - type: cos_sim_recall value: 81.3 - type: dot_accuracy value: 99.71980198019801 - type: dot_ap value: 92.81398760591358 - type: dot_f1 value: 85.17548454688318 - type: dot_precision value: 89.43894389438944 - type: dot_recall value: 81.3 - type: euclidean_accuracy value: 99.71980198019801 - type: euclidean_ap value: 92.81560637245072 - type: euclidean_f1 value: 85.17548454688318 - type: euclidean_precision value: 89.43894389438944 - type: euclidean_recall value: 81.3 - type: manhattan_accuracy value: 99.73069306930694 - type: manhattan_ap value: 93.14005487480794 - type: manhattan_f1 value: 85.56263269639068 - type: manhattan_precision value: 91.17647058823529 - type: manhattan_recall value: 80.60000000000001 - type: max_accuracy value: 99.73069306930694 - type: max_ap value: 93.14005487480794 - type: max_f1 value: 85.56263269639068 - task: type: Clustering dataset: name: MTEB StackExchangeClustering type: mteb/stackexchange-clustering config: default split: test revision: 6cbc1f7b2bc0622f2e39d2c77fa502909748c259 metrics: - type: v_measure value: 79.86443362395185 - task: type: Clustering dataset: name: MTEB StackExchangeClusteringP2P type: mteb/stackexchange-clustering-p2p config: default split: test revision: 815ca46b2622cec33ccafc3735d572c266efdb44 metrics: - type: v_measure value: 49.40897096662564 - task: type: Reranking dataset: name: MTEB StackOverflowDupQuestions type: mteb/stackoverflowdupquestions-reranking config: default split: test revision: e185fbe320c72810689fc5848eb6114e1ef5ec69 metrics: - type: map value: 55.66040806627947 - type: mrr value: 56.58670475766064 - task: type: Summarization dataset: name: MTEB SummEval type: mteb/summeval config: default split: test revision: cda12ad7615edc362dbf25a00fdd61d3b1eaf93c metrics: - type: cos_sim_pearson value: 31.51015090598575 - type: cos_sim_spearman value: 31.35016454939226 - type: dot_pearson value: 31.5150068731 - type: dot_spearman value: 31.34790869023487 - task: type: Retrieval dataset: name: MTEB TRECCOVID type: mteb/trec-covid config: default split: test revision: None metrics: - type: map_at_1 value: 0.254 - type: map_at_10 value: 2.064 - type: map_at_100 value: 12.909 - type: map_at_1000 value: 31.761 - type: map_at_3 value: 0.738 - type: map_at_5 value: 1.155 - type: mrr_at_1 value: 96.0 - type: mrr_at_10 value: 98.0 - type: mrr_at_100 value: 98.0 - type: mrr_at_1000 value: 98.0 - type: mrr_at_3 value: 98.0 - type: mrr_at_5 value: 98.0 - type: ndcg_at_1 value: 93.0 - type: ndcg_at_10 value: 82.258 - type: ndcg_at_100 value: 64.34 - type: ndcg_at_1000 value: 57.912 - type: ndcg_at_3 value: 90.827 - type: ndcg_at_5 value: 86.79 - type: precision_at_1 value: 96.0 - type: precision_at_10 value: 84.8 - type: precision_at_100 value: 66.0 - type: precision_at_1000 value: 25.356 - type: precision_at_3 value: 94.667 - type: precision_at_5 value: 90.4 - type: recall_at_1 value: 0.254 - type: recall_at_10 value: 2.1950000000000003 - type: recall_at_100 value: 16.088 - type: recall_at_1000 value: 54.559000000000005 - type: recall_at_3 value: 0.75 - type: recall_at_5 value: 1.191 - task: type: Retrieval dataset: name: MTEB Touche2020 type: mteb/touche2020 config: default split: test revision: a34f9a33db75fa0cbb21bb5cfc3dae8dc8bec93f metrics: - type: map_at_1 value: 2.976 - type: map_at_10 value: 11.389000000000001 - type: map_at_100 value: 18.429000000000002 - type: map_at_1000 value: 20.113 - type: map_at_3 value: 6.483 - type: map_at_5 value: 8.770999999999999 - type: mrr_at_1 value: 40.816 - type: mrr_at_10 value: 58.118 - type: mrr_at_100 value: 58.489999999999995 - type: mrr_at_1000 value: 58.489999999999995 - type: mrr_at_3 value: 53.061 - type: mrr_at_5 value: 57.041 - type: ndcg_at_1 value: 40.816 - type: ndcg_at_10 value: 30.567 - type: ndcg_at_100 value: 42.44 - type: ndcg_at_1000 value: 53.480000000000004 - type: ndcg_at_3 value: 36.016 - type: ndcg_at_5 value: 34.257 - type: precision_at_1 value: 42.857 - type: precision_at_10 value: 25.714 - type: precision_at_100 value: 8.429 - type: precision_at_1000 value: 1.5939999999999999 - type: precision_at_3 value: 36.735 - type: precision_at_5 value: 33.878 - type: recall_at_1 value: 2.976 - type: recall_at_10 value: 17.854999999999997 - type: recall_at_100 value: 51.833 - type: recall_at_1000 value: 86.223 - type: recall_at_3 value: 7.887 - type: recall_at_5 value: 12.026 - task: type: Classification dataset: name: MTEB ToxicConversationsClassification type: mteb/toxic_conversations_50k config: default split: test revision: d7c0de2777da35d6aae2200a62c6e0e5af397c4c metrics: - type: accuracy value: 85.1174 - type: ap value: 30.169441069345748 - type: f1 value: 69.79254701873245 - task: type: Classification dataset: name: MTEB TweetSentimentExtractionClassification type: mteb/tweet_sentiment_extraction config: default split: test revision: d604517c81ca91fe16a244d1248fc021f9ecee7a metrics: - type: accuracy value: 72.58347481607245 - type: f1 value: 72.74877295564937 - task: type: Clustering dataset: name: MTEB TwentyNewsgroupsClustering type: mteb/twentynewsgroups-clustering config: default split: test revision: 6125ec4e24fa026cec8a478383ee943acfbd5449 metrics: - type: v_measure value: 53.90586138221305 - task: type: PairClassification dataset: name: MTEB TwitterSemEval2015 type: mteb/twittersemeval2015-pairclassification config: default split: test revision: 70970daeab8776df92f5ea462b6173c0b46fd2d1 metrics: - type: cos_sim_accuracy value: 87.35769207844072 - type: cos_sim_ap value: 77.9645072410354 - type: cos_sim_f1 value: 71.32352941176471 - type: cos_sim_precision value: 66.5903890160183 - type: cos_sim_recall value: 76.78100263852242 - type: dot_accuracy value: 87.37557370209214 - type: dot_ap value: 77.96250046429908 - type: dot_f1 value: 71.28932757557064 - type: dot_precision value: 66.95249130938586 - type: dot_recall value: 76.22691292875989 - type: euclidean_accuracy value: 87.35173153722357 - type: euclidean_ap value: 77.96520460741593 - type: euclidean_f1 value: 71.32470733210104 - type: euclidean_precision value: 66.91329479768785 - type: euclidean_recall value: 76.35883905013192 - type: manhattan_accuracy value: 87.25636287774931 - type: manhattan_ap value: 77.77752485611796 - type: manhattan_f1 value: 71.18148599269183 - type: manhattan_precision value: 66.10859728506787 - type: manhattan_recall value: 77.0976253298153 - type: max_accuracy value: 87.37557370209214 - type: max_ap value: 77.96520460741593 - type: max_f1 value: 71.32470733210104 - task: type: PairClassification dataset: name: MTEB TwitterURLCorpus type: mteb/twitterurlcorpus-pairclassification config: default split: test revision: 8b6510b0b1fa4e4c4f879467980e9be563ec1cdf metrics: - type: cos_sim_accuracy value: 89.38176737687739 - type: cos_sim_ap value: 86.58811861657401 - type: cos_sim_f1 value: 79.09430644097604 - type: cos_sim_precision value: 75.45085977911366 - type: cos_sim_recall value: 83.10748383122882 - type: dot_accuracy value: 89.38370784336554 - type: dot_ap value: 86.58840606004333 - type: dot_f1 value: 79.10179860068133 - type: dot_precision value: 75.44546153308643 - type: dot_recall value: 83.13058207576223 - type: euclidean_accuracy value: 89.38564830985369 - type: euclidean_ap value: 86.58820721061164 - type: euclidean_f1 value: 79.09070942235888 - type: euclidean_precision value: 75.38729937194697 - type: euclidean_recall value: 83.17677856482906 - type: manhattan_accuracy value: 89.40699344122326 - type: manhattan_ap value: 86.60631843011362 - type: manhattan_f1 value: 79.14949970570925 - type: manhattan_precision value: 75.78191039729502 - type: manhattan_recall value: 82.83030489682784 - type: max_accuracy value: 89.40699344122326 - type: max_ap value: 86.60631843011362 - type: max_f1 value: 79.14949970570925 - task: type: STS dataset: name: MTEB AFQMC type: C-MTEB/AFQMC config: default split: validation revision: b44c3b011063adb25877c13823db83bb193913c4 metrics: - type: cos_sim_pearson value: 65.58442135663871 - type: cos_sim_spearman value: 72.2538631361313 - type: euclidean_pearson value: 70.97255486607429 - type: euclidean_spearman value: 72.25374250228647 - type: manhattan_pearson value: 70.83250199989911 - type: manhattan_spearman value: 72.14819496536272 - task: type: STS dataset: name: MTEB ATEC type: C-MTEB/ATEC config: default split: test revision: 0f319b1142f28d00e055a6770f3f726ae9b7d865 metrics: - type: cos_sim_pearson value: 59.99478404929932 - type: cos_sim_spearman value: 62.61836216999812 - type: euclidean_pearson value: 66.86429811933593 - type: euclidean_spearman value: 62.6183520374191 - type: manhattan_pearson value: 66.8063778911633 - type: manhattan_spearman value: 62.569607573241115 - task: type: Classification dataset: name: MTEB AmazonReviewsClassification (zh) type: mteb/amazon_reviews_multi config: zh split: test revision: 1399c76144fd37290681b995c656ef9b2e06e26d metrics: - type: accuracy value: 53.98400000000001 - type: f1 value: 51.21447361350723 - task: type: STS dataset: name: MTEB BQ type: C-MTEB/BQ config: default split: test revision: e3dda5e115e487b39ec7e618c0c6a29137052a55 metrics: - type: cos_sim_pearson value: 79.11941660686553 - type: cos_sim_spearman value: 81.25029594540435 - type: euclidean_pearson value: 82.06973504238826 - type: euclidean_spearman value: 81.2501989488524 - type: manhattan_pearson value: 82.10094630392753 - type: manhattan_spearman value: 81.27987244392389 - task: type: Clustering dataset: name: MTEB CLSClusteringP2P type: C-MTEB/CLSClusteringP2P config: default split: test revision: 4b6227591c6c1a73bc76b1055f3b7f3588e72476 metrics: - type: v_measure value: 47.07270168705156 - task: type: Clustering dataset: name: MTEB CLSClusteringS2S type: C-MTEB/CLSClusteringS2S config: default split: test revision: e458b3f5414b62b7f9f83499ac1f5497ae2e869f metrics: - type: v_measure value: 45.98511703185043 - task: type: Reranking dataset: name: MTEB CMedQAv1 type: C-MTEB/CMedQAv1-reranking config: default split: test revision: 8d7f1e942507dac42dc58017c1a001c3717da7df metrics: - type: map value: 88.19895157194931 - type: mrr value: 90.21424603174603 - task: type: Reranking dataset: name: MTEB CMedQAv2 type: C-MTEB/CMedQAv2-reranking config: default split: test revision: 23d186750531a14a0357ca22cd92d712fd512ea0 metrics: - type: map value: 88.03317320980119 - type: mrr value: 89.9461507936508 - task: type: Retrieval dataset: name: MTEB CmedqaRetrieval type: C-MTEB/CmedqaRetrieval config: default split: dev revision: cd540c506dae1cf9e9a59c3e06f42030d54e7301 metrics: - type: map_at_1 value: 29.037000000000003 - type: map_at_10 value: 42.001 - type: map_at_100 value: 43.773 - type: map_at_1000 value: 43.878 - type: map_at_3 value: 37.637 - type: map_at_5 value: 40.034 - type: mrr_at_1 value: 43.136 - type: mrr_at_10 value: 51.158 - type: mrr_at_100 value: 52.083 - type: mrr_at_1000 value: 52.12 - type: mrr_at_3 value: 48.733 - type: mrr_at_5 value: 50.025 - type: ndcg_at_1 value: 43.136 - type: ndcg_at_10 value: 48.685 - type: ndcg_at_100 value: 55.513 - type: ndcg_at_1000 value: 57.242000000000004 - type: ndcg_at_3 value: 43.329 - type: ndcg_at_5 value: 45.438 - type: precision_at_1 value: 43.136 - type: precision_at_10 value: 10.56 - type: precision_at_100 value: 1.6129999999999998 - type: precision_at_1000 value: 0.184 - type: precision_at_3 value: 24.064 - type: precision_at_5 value: 17.269000000000002 - type: recall_at_1 value: 29.037000000000003 - type: recall_at_10 value: 59.245000000000005 - type: recall_at_100 value: 87.355 - type: recall_at_1000 value: 98.74000000000001 - type: recall_at_3 value: 42.99 - type: recall_at_5 value: 49.681999999999995 - task: type: PairClassification dataset: name: MTEB Cmnli type: C-MTEB/CMNLI config: default split: validation revision: 41bc36f332156f7adc9e38f53777c959b2ae9766 metrics: - type: cos_sim_accuracy value: 82.68190018039687 - type: cos_sim_ap value: 90.18017125327886 - type: cos_sim_f1 value: 83.64080906868193 - type: cos_sim_precision value: 79.7076890489303 - type: cos_sim_recall value: 87.98223053542202 - type: dot_accuracy value: 82.68190018039687 - type: dot_ap value: 90.18782350103646 - type: dot_f1 value: 83.64242087729039 - type: dot_precision value: 79.65313028764805 - type: dot_recall value: 88.05237315875614 - type: euclidean_accuracy value: 82.68190018039687 - type: euclidean_ap value: 90.1801957900632 - type: euclidean_f1 value: 83.63636363636364 - type: euclidean_precision value: 79.52772506852203 - type: euclidean_recall value: 88.19265840542437 - type: manhattan_accuracy value: 82.14070956103427 - type: manhattan_ap value: 89.96178420101427 - type: manhattan_f1 value: 83.21087838578791 - type: manhattan_precision value: 78.35605121850475 - type: manhattan_recall value: 88.70703764320785 - type: max_accuracy value: 82.68190018039687 - type: max_ap value: 90.18782350103646 - type: max_f1 value: 83.64242087729039 - task: type: Retrieval dataset: name: MTEB CovidRetrieval type: C-MTEB/CovidRetrieval config: default split: dev revision: 1271c7809071a13532e05f25fb53511ffce77117 metrics: - type: map_at_1 value: 72.234 - type: map_at_10 value: 80.10000000000001 - type: map_at_100 value: 80.36 - type: map_at_1000 value: 80.363 - type: map_at_3 value: 78.315 - type: map_at_5 value: 79.607 - type: mrr_at_1 value: 72.392 - type: mrr_at_10 value: 80.117 - type: mrr_at_100 value: 80.36999999999999 - type: mrr_at_1000 value: 80.373 - type: mrr_at_3 value: 78.469 - type: mrr_at_5 value: 79.633 - type: ndcg_at_1 value: 72.392 - type: ndcg_at_10 value: 83.651 - type: ndcg_at_100 value: 84.749 - type: ndcg_at_1000 value: 84.83000000000001 - type: ndcg_at_3 value: 80.253 - type: ndcg_at_5 value: 82.485 - type: precision_at_1 value: 72.392 - type: precision_at_10 value: 9.557 - type: precision_at_100 value: 1.004 - type: precision_at_1000 value: 0.101 - type: precision_at_3 value: 28.732000000000003 - type: precision_at_5 value: 18.377 - type: recall_at_1 value: 72.234 - type: recall_at_10 value: 94.573 - type: recall_at_100 value: 99.368 - type: recall_at_1000 value: 100.0 - type: recall_at_3 value: 85.669 - type: recall_at_5 value: 91.01700000000001 - task: type: Retrieval dataset: name: MTEB DuRetrieval type: C-MTEB/DuRetrieval config: default split: dev revision: a1a333e290fe30b10f3f56498e3a0d911a693ced metrics: - type: map_at_1 value: 26.173999999999996 - type: map_at_10 value: 80.04 - type: map_at_100 value: 82.94500000000001 - type: map_at_1000 value: 82.98100000000001 - type: map_at_3 value: 55.562999999999995 - type: map_at_5 value: 69.89800000000001 - type: mrr_at_1 value: 89.5 - type: mrr_at_10 value: 92.996 - type: mrr_at_100 value: 93.06400000000001 - type: mrr_at_1000 value: 93.065 - type: mrr_at_3 value: 92.658 - type: mrr_at_5 value: 92.84599999999999 - type: ndcg_at_1 value: 89.5 - type: ndcg_at_10 value: 87.443 - type: ndcg_at_100 value: 90.253 - type: ndcg_at_1000 value: 90.549 - type: ndcg_at_3 value: 85.874 - type: ndcg_at_5 value: 84.842 - type: precision_at_1 value: 89.5 - type: precision_at_10 value: 41.805 - type: precision_at_100 value: 4.827 - type: precision_at_1000 value: 0.49 - type: precision_at_3 value: 76.85 - type: precision_at_5 value: 64.8 - type: recall_at_1 value: 26.173999999999996 - type: recall_at_10 value: 89.101 - type: recall_at_100 value: 98.08099999999999 - type: recall_at_1000 value: 99.529 - type: recall_at_3 value: 57.902 - type: recall_at_5 value: 74.602 - task: type: Retrieval dataset: name: MTEB EcomRetrieval type: C-MTEB/EcomRetrieval config: default split: dev revision: 687de13dc7294d6fd9be10c6945f9e8fec8166b9 metrics: - type: map_at_1 value: 56.10000000000001 - type: map_at_10 value: 66.15299999999999 - type: map_at_100 value: 66.625 - type: map_at_1000 value: 66.636 - type: map_at_3 value: 63.632999999999996 - type: map_at_5 value: 65.293 - type: mrr_at_1 value: 56.10000000000001 - type: mrr_at_10 value: 66.15299999999999 - type: mrr_at_100 value: 66.625 - type: mrr_at_1000 value: 66.636 - type: mrr_at_3 value: 63.632999999999996 - type: mrr_at_5 value: 65.293 - type: ndcg_at_1 value: 56.10000000000001 - type: ndcg_at_10 value: 71.146 - type: ndcg_at_100 value: 73.27799999999999 - type: ndcg_at_1000 value: 73.529 - type: ndcg_at_3 value: 66.09 - type: ndcg_at_5 value: 69.08999999999999 - type: precision_at_1 value: 56.10000000000001 - type: precision_at_10 value: 8.68 - type: precision_at_100 value: 0.964 - type: precision_at_1000 value: 0.098 - type: precision_at_3 value: 24.4 - type: precision_at_5 value: 16.1 - type: recall_at_1 value: 56.10000000000001 - type: recall_at_10 value: 86.8 - type: recall_at_100 value: 96.39999999999999 - type: recall_at_1000 value: 98.3 - type: recall_at_3 value: 73.2 - type: recall_at_5 value: 80.5 - task: type: Classification dataset: name: MTEB IFlyTek type: C-MTEB/IFlyTek-classification config: default split: validation revision: 421605374b29664c5fc098418fe20ada9bd55f8a metrics: - type: accuracy value: 54.52096960369373 - type: f1 value: 40.930845295808695 - task: type: Classification dataset: name: MTEB JDReview type: C-MTEB/JDReview-classification config: default split: test revision: b7c64bd89eb87f8ded463478346f76731f07bf8b metrics: - type: accuracy value: 86.51031894934334 - type: ap value: 55.9516014323483 - type: f1 value: 81.54813679326381 - task: type: STS dataset: name: MTEB LCQMC type: C-MTEB/LCQMC config: default split: test revision: 17f9b096f80380fce5ed12a9be8be7784b337daf metrics: - type: cos_sim_pearson value: 69.67437838574276 - type: cos_sim_spearman value: 73.81314174653045 - type: euclidean_pearson value: 72.63430276680275 - type: euclidean_spearman value: 73.81358736777001 - type: manhattan_pearson value: 72.58743833842829 - type: manhattan_spearman value: 73.7590419009179 - task: type: Reranking dataset: name: MTEB MMarcoReranking type: C-MTEB/Mmarco-reranking config: default split: dev revision: None metrics: - type: map value: 31.648613483640254 - type: mrr value: 30.37420634920635 - task: type: Retrieval dataset: name: MTEB MMarcoRetrieval type: C-MTEB/MMarcoRetrieval config: default split: dev revision: 539bbde593d947e2a124ba72651aafc09eb33fc2 metrics: - type: map_at_1 value: 73.28099999999999 - type: map_at_10 value: 81.977 - type: map_at_100 value: 82.222 - type: map_at_1000 value: 82.22699999999999 - type: map_at_3 value: 80.441 - type: map_at_5 value: 81.46600000000001 - type: mrr_at_1 value: 75.673 - type: mrr_at_10 value: 82.41000000000001 - type: mrr_at_100 value: 82.616 - type: mrr_at_1000 value: 82.621 - type: mrr_at_3 value: 81.094 - type: mrr_at_5 value: 81.962 - type: ndcg_at_1 value: 75.673 - type: ndcg_at_10 value: 85.15599999999999 - type: ndcg_at_100 value: 86.151 - type: ndcg_at_1000 value: 86.26899999999999 - type: ndcg_at_3 value: 82.304 - type: ndcg_at_5 value: 84.009 - type: precision_at_1 value: 75.673 - type: precision_at_10 value: 10.042 - type: precision_at_100 value: 1.052 - type: precision_at_1000 value: 0.106 - type: precision_at_3 value: 30.673000000000002 - type: precision_at_5 value: 19.326999999999998 - type: recall_at_1 value: 73.28099999999999 - type: recall_at_10 value: 94.446 - type: recall_at_100 value: 98.737 - type: recall_at_1000 value: 99.649 - type: recall_at_3 value: 86.984 - type: recall_at_5 value: 91.024 - task: type: Classification dataset: name: MTEB MassiveIntentClassification (zh-CN) type: mteb/amazon_massive_intent config: zh-CN split: test revision: 31efe3c427b0bae9c22cbb560b8f15491cc6bed7 metrics: - type: accuracy value: 81.08607935440484 - type: f1 value: 78.24879986066307 - task: type: Classification dataset: name: MTEB MassiveScenarioClassification (zh-CN) type: mteb/amazon_massive_scenario config: zh-CN split: test revision: 7d571f92784cd94a019292a1f45445077d0ef634 metrics: - type: accuracy value: 86.05917955615332 - type: f1 value: 85.05279279434997 - task: type: Retrieval dataset: name: MTEB MedicalRetrieval type: C-MTEB/MedicalRetrieval config: default split: dev revision: 2039188fb5800a9803ba5048df7b76e6fb151fc6 metrics: - type: map_at_1 value: 56.2 - type: map_at_10 value: 62.57899999999999 - type: map_at_100 value: 63.154999999999994 - type: map_at_1000 value: 63.193 - type: map_at_3 value: 61.217 - type: map_at_5 value: 62.012 - type: mrr_at_1 value: 56.3 - type: mrr_at_10 value: 62.629000000000005 - type: mrr_at_100 value: 63.205999999999996 - type: mrr_at_1000 value: 63.244 - type: mrr_at_3 value: 61.267 - type: mrr_at_5 value: 62.062 - type: ndcg_at_1 value: 56.2 - type: ndcg_at_10 value: 65.592 - type: ndcg_at_100 value: 68.657 - type: ndcg_at_1000 value: 69.671 - type: ndcg_at_3 value: 62.808 - type: ndcg_at_5 value: 64.24499999999999 - type: precision_at_1 value: 56.2 - type: precision_at_10 value: 7.5 - type: precision_at_100 value: 0.899 - type: precision_at_1000 value: 0.098 - type: precision_at_3 value: 22.467000000000002 - type: precision_at_5 value: 14.180000000000001 - type: recall_at_1 value: 56.2 - type: recall_at_10 value: 75.0 - type: recall_at_100 value: 89.9 - type: recall_at_1000 value: 97.89999999999999 - type: recall_at_3 value: 67.4 - type: recall_at_5 value: 70.89999999999999 - task: type: Classification dataset: name: MTEB MultilingualSentiment type: C-MTEB/MultilingualSentiment-classification config: default split: validation revision: 46958b007a63fdbf239b7672c25d0bea67b5ea1a metrics: - type: accuracy value: 76.87666666666667 - type: f1 value: 76.7317686219665 - task: type: PairClassification dataset: name: MTEB Ocnli type: C-MTEB/OCNLI config: default split: validation revision: 66e76a618a34d6d565d5538088562851e6daa7ec metrics: - type: cos_sim_accuracy value: 79.64266377910124 - type: cos_sim_ap value: 84.78274442344829 - type: cos_sim_f1 value: 81.16947472745292 - type: cos_sim_precision value: 76.47058823529412 - type: cos_sim_recall value: 86.48363252375924 - type: dot_accuracy value: 79.64266377910124 - type: dot_ap value: 84.7851404063692 - type: dot_f1 value: 81.16947472745292 - type: dot_precision value: 76.47058823529412 - type: dot_recall value: 86.48363252375924 - type: euclidean_accuracy value: 79.64266377910124 - type: euclidean_ap value: 84.78068373762378 - type: euclidean_f1 value: 81.14794656110837 - type: euclidean_precision value: 76.35009310986965 - type: euclidean_recall value: 86.58922914466737 - type: manhattan_accuracy value: 79.48023822414727 - type: manhattan_ap value: 84.72928897427576 - type: manhattan_f1 value: 81.32084770823064 - type: manhattan_precision value: 76.24768946395564 - type: manhattan_recall value: 87.11721224920802 - type: max_accuracy value: 79.64266377910124 - type: max_ap value: 84.7851404063692 - type: max_f1 value: 81.32084770823064 - task: type: Classification dataset: name: MTEB OnlineShopping type: C-MTEB/OnlineShopping-classification config: default split: test revision: e610f2ebd179a8fda30ae534c3878750a96db120 metrics: - type: accuracy value: 94.3 - type: ap value: 92.8664032274438 - type: f1 value: 94.29311102997727 - task: type: STS dataset: name: MTEB PAWSX type: C-MTEB/PAWSX config: default split: test revision: 9c6a90e430ac22b5779fb019a23e820b11a8b5e1 metrics: - type: cos_sim_pearson value: 48.51392279882909 - type: cos_sim_spearman value: 54.06338895994974 - type: euclidean_pearson value: 52.58480559573412 - type: euclidean_spearman value: 54.06417276612201 - type: manhattan_pearson value: 52.69525121721343 - type: manhattan_spearman value: 54.048147455389675 - task: type: STS dataset: name: MTEB QBQTC type: C-MTEB/QBQTC config: default split: test revision: 790b0510dc52b1553e8c49f3d2afb48c0e5c48b7 metrics: - type: cos_sim_pearson value: 29.728387290757325 - type: cos_sim_spearman value: 31.366121633635284 - type: euclidean_pearson value: 29.14588368552961 - type: euclidean_spearman value: 31.36764411112844 - type: manhattan_pearson value: 29.63517350523121 - type: manhattan_spearman value: 31.94157020583762 - task: type: STS dataset: name: MTEB STS22 (zh) type: mteb/sts22-crosslingual-sts config: zh split: test revision: eea2b4fe26a775864c896887d910b76a8098ad3f metrics: - type: cos_sim_pearson value: 63.64868296271406 - type: cos_sim_spearman value: 66.12800618164744 - type: euclidean_pearson value: 63.21405767340238 - type: euclidean_spearman value: 66.12786567790748 - type: manhattan_pearson value: 64.04300276525848 - type: manhattan_spearman value: 66.5066857145652 - task: type: STS dataset: name: MTEB STSB type: C-MTEB/STSB config: default split: test revision: 0cde68302b3541bb8b3c340dc0644b0b745b3dc0 metrics: - type: cos_sim_pearson value: 81.2302623912794 - type: cos_sim_spearman value: 81.16833673266562 - type: euclidean_pearson value: 79.47647843876024 - type: euclidean_spearman value: 81.16944349524972 - type: manhattan_pearson value: 79.84947238492208 - type: manhattan_spearman value: 81.64626599410026 - task: type: Reranking dataset: name: MTEB T2Reranking type: C-MTEB/T2Reranking config: default split: dev revision: 76631901a18387f85eaa53e5450019b87ad58ef9 metrics: - type: map value: 67.80129586475687 - type: mrr value: 77.77402311635554 - task: type: Retrieval dataset: name: MTEB T2Retrieval type: C-MTEB/T2Retrieval config: default split: dev revision: 8731a845f1bf500a4f111cf1070785c793d10e64 metrics: - type: map_at_1 value: 28.666999999999998 - type: map_at_10 value: 81.063 - type: map_at_100 value: 84.504 - type: map_at_1000 value: 84.552 - type: map_at_3 value: 56.897 - type: map_at_5 value: 70.073 - type: mrr_at_1 value: 92.087 - type: mrr_at_10 value: 94.132 - type: mrr_at_100 value: 94.19800000000001 - type: mrr_at_1000 value: 94.19999999999999 - type: mrr_at_3 value: 93.78999999999999 - type: mrr_at_5 value: 94.002 - type: ndcg_at_1 value: 92.087 - type: ndcg_at_10 value: 87.734 - type: ndcg_at_100 value: 90.736 - type: ndcg_at_1000 value: 91.184 - type: ndcg_at_3 value: 88.78 - type: ndcg_at_5 value: 87.676 - type: precision_at_1 value: 92.087 - type: precision_at_10 value: 43.46 - type: precision_at_100 value: 5.07 - type: precision_at_1000 value: 0.518 - type: precision_at_3 value: 77.49000000000001 - type: precision_at_5 value: 65.194 - type: recall_at_1 value: 28.666999999999998 - type: recall_at_10 value: 86.632 - type: recall_at_100 value: 96.646 - type: recall_at_1000 value: 98.917 - type: recall_at_3 value: 58.333999999999996 - type: recall_at_5 value: 72.974 - task: type: Classification dataset: name: MTEB TNews type: C-MTEB/TNews-classification config: default split: validation revision: 317f262bf1e6126357bbe89e875451e4b0938fe4 metrics: - type: accuracy value: 52.971999999999994 - type: f1 value: 50.2898280984929 - task: type: Clustering dataset: name: MTEB ThuNewsClusteringP2P type: C-MTEB/ThuNewsClusteringP2P config: default split: test revision: 5798586b105c0434e4f0fe5e767abe619442cf93 metrics: - type: v_measure value: 86.0797948663824 - task: type: Clustering dataset: name: MTEB ThuNewsClusteringS2S type: C-MTEB/ThuNewsClusteringS2S config: default split: test revision: 8a8b2caeda43f39e13c4bc5bea0f8a667896e10d metrics: - type: v_measure value: 85.10759092255017 - task: type: Retrieval dataset: name: MTEB VideoRetrieval type: C-MTEB/VideoRetrieval config: default split: dev revision: 58c2597a5943a2ba48f4668c3b90d796283c5639 metrics: - type: map_at_1 value: 65.60000000000001 - type: map_at_10 value: 74.773 - type: map_at_100 value: 75.128 - type: map_at_1000 value: 75.136 - type: map_at_3 value: 73.05 - type: map_at_5 value: 74.13499999999999 - type: mrr_at_1 value: 65.60000000000001 - type: mrr_at_10 value: 74.773 - type: mrr_at_100 value: 75.128 - type: mrr_at_1000 value: 75.136 - type: mrr_at_3 value: 73.05 - type: mrr_at_5 value: 74.13499999999999 - type: ndcg_at_1 value: 65.60000000000001 - type: ndcg_at_10 value: 78.84299999999999 - type: ndcg_at_100 value: 80.40899999999999 - type: ndcg_at_1000 value: 80.57 - type: ndcg_at_3 value: 75.40599999999999 - type: ndcg_at_5 value: 77.351 - type: precision_at_1 value: 65.60000000000001 - type: precision_at_10 value: 9.139999999999999 - type: precision_at_100 value: 0.984 - type: precision_at_1000 value: 0.1 - type: precision_at_3 value: 27.400000000000002 - type: precision_at_5 value: 17.380000000000003 - type: recall_at_1 value: 65.60000000000001 - type: recall_at_10 value: 91.4 - type: recall_at_100 value: 98.4 - type: recall_at_1000 value: 99.6 - type: recall_at_3 value: 82.19999999999999 - type: recall_at_5 value: 86.9 - task: type: Classification dataset: name: MTEB Waimai type: C-MTEB/waimai-classification config: default split: test revision: 339287def212450dcaa9df8c22bf93e9980c7023 metrics: - type: accuracy value: 89.47 - type: ap value: 75.59561751845389 - type: f1 value: 87.95207751382563 --- # fishbone64/gte-Qwen2-7B-instruct-Q8_0-GGUF This model was converted to GGUF format from [`Alibaba-NLP/gte-Qwen2-7B-instruct`](https://huggingface.co/Alibaba-NLP/gte-Qwen2-7B-instruct) using llama.cpp via the ggml.ai's [GGUF-my-repo](https://huggingface.co/spaces/ggml-org/gguf-my-repo) space. Refer to the [original model card](https://huggingface.co/Alibaba-NLP/gte-Qwen2-7B-instruct) for more details on the model. ## Use with llama.cpp Install llama.cpp through brew (works on Mac and Linux) ```bash brew install llama.cpp ``` Invoke the llama.cpp server or the CLI. ### CLI: ```bash llama-cli --hf-repo fishbone64/gte-Qwen2-7B-instruct-Q8_0-GGUF --hf-file gte-qwen2-7b-instruct-q8_0.gguf -p "The meaning to life and the universe is" ``` ### Server: ```bash llama-server --hf-repo fishbone64/gte-Qwen2-7B-instruct-Q8_0-GGUF --hf-file gte-qwen2-7b-instruct-q8_0.gguf -c 2048 ``` Note: You can also use this checkpoint directly through the [usage steps](https://github.com/ggerganov/llama.cpp?tab=readme-ov-file#usage) listed in the Llama.cpp repo as well. Step 1: Clone llama.cpp from GitHub. ``` git clone https://github.com/ggerganov/llama.cpp ``` Step 2: Move into the llama.cpp folder and build it with `LLAMA_CURL=1` flag along with other hardware-specific flags (for ex: LLAMA_CUDA=1 for Nvidia GPUs on Linux). ``` cd llama.cpp && LLAMA_CURL=1 make ``` Step 3: Run inference through the main binary. ``` ./llama-cli --hf-repo fishbone64/gte-Qwen2-7B-instruct-Q8_0-GGUF --hf-file gte-qwen2-7b-instruct-q8_0.gguf -p "The meaning to life and the universe is" ``` or ``` ./llama-server --hf-repo fishbone64/gte-Qwen2-7B-instruct-Q8_0-GGUF --hf-file gte-qwen2-7b-instruct-q8_0.gguf -c 2048 ```
[ "SUMMARIZATION" ]
[ "BIOSSES", "SCIFACT" ]
RichardErkhov/BSC-LT_-_salamandra-2b-instruct-gguf
RichardErkhov
null
[ "gguf", "arxiv:2403.14009", "arxiv:2403.20266", "arxiv:2101.00027", "arxiv:2207.00220", "arxiv:1810.06694", "arxiv:1911.05507", "arxiv:1906.03741", "arxiv:2406.17557", "arxiv:2402.06619", "arxiv:1803.09010", "endpoints_compatible", "region:us", "conversational" ]
2024-10-14T18:33:06
2024-10-14T19:26:09
105
0
--- {} --- Quantization made by Richard Erkhov. [Github](https://github.com/RichardErkhov) [Discord](https://discord.gg/pvy7H8DZMG) [Request more models](https://github.com/RichardErkhov/quant_request) salamandra-2b-instruct - GGUF - Model creator: https://huggingface.co/BSC-LT/ - Original model: https://huggingface.co/BSC-LT/salamandra-2b-instruct/ | Name | Quant method | Size | | ---- | ---- | ---- | | [salamandra-2b-instruct.Q2_K.gguf](https://huggingface.co/RichardErkhov/BSC-LT_-_salamandra-2b-instruct-gguf/blob/main/salamandra-2b-instruct.Q2_K.gguf) | Q2_K | 1.01GB | | [salamandra-2b-instruct.IQ3_XS.gguf](https://huggingface.co/RichardErkhov/BSC-LT_-_salamandra-2b-instruct-gguf/blob/main/salamandra-2b-instruct.IQ3_XS.gguf) | IQ3_XS | 1.11GB | | [salamandra-2b-instruct.IQ3_S.gguf](https://huggingface.co/RichardErkhov/BSC-LT_-_salamandra-2b-instruct-gguf/blob/main/salamandra-2b-instruct.IQ3_S.gguf) | IQ3_S | 1.13GB | | [salamandra-2b-instruct.Q3_K_S.gguf](https://huggingface.co/RichardErkhov/BSC-LT_-_salamandra-2b-instruct-gguf/blob/main/salamandra-2b-instruct.Q3_K_S.gguf) | Q3_K_S | 1.13GB | | [salamandra-2b-instruct.IQ3_M.gguf](https://huggingface.co/RichardErkhov/BSC-LT_-_salamandra-2b-instruct-gguf/blob/main/salamandra-2b-instruct.IQ3_M.gguf) | IQ3_M | 1.16GB | | [salamandra-2b-instruct.Q3_K.gguf](https://huggingface.co/RichardErkhov/BSC-LT_-_salamandra-2b-instruct-gguf/blob/main/salamandra-2b-instruct.Q3_K.gguf) | Q3_K | 1.19GB | | [salamandra-2b-instruct.Q3_K_M.gguf](https://huggingface.co/RichardErkhov/BSC-LT_-_salamandra-2b-instruct-gguf/blob/main/salamandra-2b-instruct.Q3_K_M.gguf) | Q3_K_M | 1.19GB | | [salamandra-2b-instruct.Q3_K_L.gguf](https://huggingface.co/RichardErkhov/BSC-LT_-_salamandra-2b-instruct-gguf/blob/main/salamandra-2b-instruct.Q3_K_L.gguf) | Q3_K_L | 1.23GB | | [salamandra-2b-instruct.IQ4_XS.gguf](https://huggingface.co/RichardErkhov/BSC-LT_-_salamandra-2b-instruct-gguf/blob/main/salamandra-2b-instruct.IQ4_XS.gguf) | IQ4_XS | 1.28GB | | [salamandra-2b-instruct.Q4_0.gguf](https://huggingface.co/RichardErkhov/BSC-LT_-_salamandra-2b-instruct-gguf/blob/main/salamandra-2b-instruct.Q4_0.gguf) | Q4_0 | 1.31GB | | [salamandra-2b-instruct.IQ4_NL.gguf](https://huggingface.co/RichardErkhov/BSC-LT_-_salamandra-2b-instruct-gguf/blob/main/salamandra-2b-instruct.IQ4_NL.gguf) | IQ4_NL | 1.32GB | | [salamandra-2b-instruct.Q4_K_S.gguf](https://huggingface.co/RichardErkhov/BSC-LT_-_salamandra-2b-instruct-gguf/blob/main/salamandra-2b-instruct.Q4_K_S.gguf) | Q4_K_S | 1.35GB | | [salamandra-2b-instruct.Q4_K.gguf](https://huggingface.co/RichardErkhov/BSC-LT_-_salamandra-2b-instruct-gguf/blob/main/salamandra-2b-instruct.Q4_K.gguf) | Q4_K | 1.4GB | | [salamandra-2b-instruct.Q4_K_M.gguf](https://huggingface.co/RichardErkhov/BSC-LT_-_salamandra-2b-instruct-gguf/blob/main/salamandra-2b-instruct.Q4_K_M.gguf) | Q4_K_M | 1.4GB | | [salamandra-2b-instruct.Q4_1.gguf](https://huggingface.co/RichardErkhov/BSC-LT_-_salamandra-2b-instruct-gguf/blob/main/salamandra-2b-instruct.Q4_1.gguf) | Q4_1 | 1.41GB | | [salamandra-2b-instruct.Q5_0.gguf](https://huggingface.co/RichardErkhov/BSC-LT_-_salamandra-2b-instruct-gguf/blob/main/salamandra-2b-instruct.Q5_0.gguf) | Q5_0 | 1.51GB | | [salamandra-2b-instruct.Q5_K_S.gguf](https://huggingface.co/RichardErkhov/BSC-LT_-_salamandra-2b-instruct-gguf/blob/main/salamandra-2b-instruct.Q5_K_S.gguf) | Q5_K_S | 1.53GB | | [salamandra-2b-instruct.Q5_K.gguf](https://huggingface.co/RichardErkhov/BSC-LT_-_salamandra-2b-instruct-gguf/blob/main/salamandra-2b-instruct.Q5_K.gguf) | Q5_K | 1.57GB | | [salamandra-2b-instruct.Q5_K_M.gguf](https://huggingface.co/RichardErkhov/BSC-LT_-_salamandra-2b-instruct-gguf/blob/main/salamandra-2b-instruct.Q5_K_M.gguf) | Q5_K_M | 1.57GB | | [salamandra-2b-instruct.Q5_1.gguf](https://huggingface.co/RichardErkhov/BSC-LT_-_salamandra-2b-instruct-gguf/blob/main/salamandra-2b-instruct.Q5_1.gguf) | Q5_1 | 1.61GB | | [salamandra-2b-instruct.Q6_K.gguf](https://huggingface.co/RichardErkhov/BSC-LT_-_salamandra-2b-instruct-gguf/blob/main/salamandra-2b-instruct.Q6_K.gguf) | Q6_K | 1.79GB | | [salamandra-2b-instruct.Q8_0.gguf](https://huggingface.co/RichardErkhov/BSC-LT_-_salamandra-2b-instruct-gguf/blob/main/salamandra-2b-instruct.Q8_0.gguf) | Q8_0 | 2.24GB | Original model description: --- license: apache-2.0 library_name: transformers pipeline_tag: text-generation language: - bg - ca - code - cs - cy - da - de - el - en - es - et - eu - fi - fr - ga - gl - hr - hu - it - lt - lv - mt - nl - nn - \no - oc - pl - pt - ro - ru - sh - sk - sl - sr - sv - uk --- ![](./images/salamandra_header.png) # Salamandra Model Card Salamandra is a highly multilingual model pre-trained from scratch that comes in three different sizes — 2B, 7B and 40B parameters — with their respective base and instruction-tuned variants. This model card corresponds to the 7B instructed version. To visit the model cards of other Salamandra versions, please refer to the [Model Index](#model-index). The entire Salamandra family is released under a permissive [Apache 2.0 license]((https://www.apache.org/licenses/LICENSE-2.0)). Along with the open weights, all training scripts and configuration files are made publicly available in [this GitHub repository](https://github.com/langtech-bsc/salamandra). > [!WARNING] > **DISCLAIMER:** This model is a first proof-of-concept designed to demonstrate the instruction-following capabilities of recently released base models. > It has been optimized to engage in conversation but has *NOT* been aligned through RLHF to filter or avoid sensitive topics. > As a result, it may generate harmful or inappropriate content. > The team is actively working to enhance its performance through further instruction and alignment with RL techniques. --- ## Model Details ### Description Transformer-based decoder-only language model that has been pre-trained from scratch on 7.8 trillion tokens of highly curated data. The pre-training corpus contains text in 35 European languages and code. ### Hyperparameters The full list of hyperparameters for each model can be found [here](https://github.com/langtech-bsc/salamandra/tree/main/configs). ### Architecture | | | |-------------------------|:--------------| | Total Parameters | 2,253,490,176 | | Embedding Parameters | 524,288,000 | | Layers | 24 | | Hidden size | 2,048 | | Attention heads | 16 | | Context length | 8,192 | | Vocabulary size | 256,000 | | Precision | bfloat16 | | Embedding type | RoPE | | Activation Function | SwiGLU | | Layer normalization | RMS Norm | | Flash attention | ✅ | | Grouped Query Attention | ❌ | | Num. query groups | N/A | --- ## Intended Use ### Direct Use The models are intended for both research and commercial use in any of the languages included in the training data. The base models are intended either for language generation or to be further fine-tuned for specific use-cases. The instruction-tuned variants can be used as general-purpose assistants, as long as the user is fully aware of the model’s limitations. ### Out-of-scope Use The model is not intended for malicious activities, such as harming others or violating human rights. Any downstream application must comply with current laws and regulations. Irresponsible usage in production environments without proper risk assessment and mitigation is also discouraged. --- ## Hardware and Software ### Training Framework Pre-training was conducted using NVIDIA’s [NeMo Framework](https://docs.nvidia.com/nemo-framework/index.html), which leverages PyTorch Lightning for efficient model training in highly distributed settings. The instruction-tuned versions were produced with [FastChat](https://github.com/lm-sys/FastChat). ### Compute Infrastructure All models were trained on [MareNostrum 5](https://www.bsc.es/ca/marenostrum/marenostrum-5), a pre-exascale EuroHPC supercomputer hosted and operated by Barcelona Supercomputing Center. The accelerated partition is composed of 1,120 nodes with the following specifications: - 4x Nvidia Hopper GPUs with 64 HBM2 memory - 2x Intel Sapphire Rapids 8460Y+ at 2.3Ghz and 32c each (64 cores) - 4x NDR200 (BW per node 800Gb/s) - 512 GB of Main memory (DDR5) - 460GB on NVMe storage |Model|Nodes|GPUs| |:---:|:---:|:---:| |2B|64|256| |7B|128|512| |40B|256 / 512|1,024 / 2,048| --- ## How to use The instruction-following models use the commonly adopted ChatML template: ```jinja {%- if not date_string is defined %}{%- set date_string = "2024-09-30" %}{%- endif %}{{ "<|im_start|>system\nsystem_message\nToday Date: "+ date_string +"<|im_end|>\n" }}{% for message in messages %}{{'<|im_start|>' + message['role'] + '\n' + message['content'] + '<|im_end|>' + '\n'}}{% endfor %}{% if add_generation_prompt %}{{ '<|im_start|>assistant\n' }}{% endif %} ``` Where `system_message` is used to guide the model during generation and `date_string` can be set to allow the model to respond with the current date. The exact same chat template should be used for an enhanced conversational experience. The easiest way to apply it is by using the tokenizer's built-in functions, as shown in the following snippet. ```python from datetime import datetime from transformers import AutoTokenizer, AutoModelForCausalLM import transformers import torch model_id = "BSC-LT/salamandra-2b-instruct" text = "At what temperature does water boil?" tokenizer = AutoTokenizer.from_pretrained(model_id) model = AutoModelForCausalLM.from_pretrained( model_id, device_map="auto", torch_dtype=torch.bfloat16 ) message = [ { "role": "user", "content": text } ] date_string = datetime.today().strftime('%Y-%m-%d') prompt = tokenizer.apply_chat_template( message, tokenize=False, add_generation_prompt=True, date_string=date_string ) inputs = tokenizer.encode(prompt, add_special_tokens=False, return_tensors="pt") outputs = model.generate(input_ids=inputs.to(model.device), max_new_tokens=200) print(tokenizer.decode(outputs[0], skip_special_tokens=True)) ``` Using this template, each turn is preceded by a `<|im_start|>` delimiter and the role of the entity (either `user`, for content supplied by the user, or `assistant` for LLM responses), and finished with the `<|im_end|>` token. --- ## Data ### Pretraining Data The training corpus consists of 2.4 trillion tokens, including 35 European languages and 92 programming languages. It amounts to a total of 33TB of pre-processed text. Languages were sampled manually by giving x2 oversampling to Spain's co-official languages (Spanish, Catalan, Galician and Basque), code was undersampled by half, and the rest of the languages were kept as is, resulting in the following distribution: ![lang distrib](./images/corpus_languages.png) This highly multilingual corpus is predominantly composed of data from Colossal OSCAR, which contributes a significant 66.06% of the total tokens. Following this, Starcoder provides 11.91%, and Spanish Crawling adds 3.34%. The next largest sources are French FR at 3.12% and Proof Pile at 1.98%. Other notable contributions include Macocu, Pile of Law, and Eurlex, each contributing around 1.5% to 1.3%. These major sources collectively form the bulk of the corpus, ensuring a rich and diverse dataset for training the language model. The remaining 10% comes from smaller sources in various languages. Feel free to click the expand button below to see the full list of sources. <details> <summary>Data Sources</summary> | Dataset | Language | Source | |-----------------------------------------------|---------------------------------------------------------------------------------------------------------------|-----------------------------------------------------------------------------------------------------| | Parlamint corpus | at, bg, cz, dk, ee, es, es-ga, fi, fr, gb, gr, hr, hu, it, lv, nl, no, pl, pt, rs, se, si | Erjavec et al., 2021 | | Bulgarian National Corpus | bg | [Link](http://old.dcl.bas.bg/dataset/BulNC.7z) | | Crawl of Bulgarian news websites | bg | [Link](http://old.dcl.bas.bg/dataset/Bulgarian_news.7z) | | Colossal OSCAR 1.0 | bg, ca, cs, cy, da, de, el, en, es, et, eu, fi, fr, ga, gl, hr, hu, it, lt, lv, mt, nl, nn, no, oc, pl, pt, ro, ru, sh, sk, sl, sr, sv, uk | Brack et al., 2024 | | Wikimedia dumps | bg, ca, cs, da, de, el, en, es, et, eu, fi, fr, ga, gl, hr, hu, it, lt, lv, mt, nl, nn, no, pl, pt, ro, sh, sk, sl, sr, uk | [Link](https://dumps.wikimedia.org/) | | OpenSubtitlesv2016 | bg, ca, cs, da, de, el, en, es, et, eu, fi, fr, gl, hr, it, lt, lv, nl, no, pl, pt, ro, sk, sl, sr, sv, uk | Lison & Tiedemann, 2016 | | MaCoCu web corpus | bg, ca, el, hr, mt, sl, sr, uk | Bañón et al., 2022 | | EurLEX-Resources | bg, cs, da, de, el, en, es, et, fi, fr, ga, hr, hu, it, lt, lv, mt, nl, pl, pt, ro, sk, sl, sv | [Link](https://huggingface.co/datasets/joelniklaus/eurlex_resources) | | MC4-Legal | bg, cs, da, de, el, en, es, et, fi, fr, ga, hu, it, lt, lv, mt, nl, pl, pt, ro, sk, sl, sv | [Link](https://huggingface.co/datasets/joelito/legal-mc4) | | CURLICAT Corpus | bg, hr, hu, pl, ro, sk, sl | Váradi et al., 2022 | | CATalog | ca | Palomar-Giner et al., 2024 | | Spanish Crawling | ca, es, eu, gl | Relevant Spanish websites crawling | | Starcoder | code | Li et al., 2023 | | SYN v9: large corpus of written Czech | cs | Křen et al., 2021 | | Welsh-GOV | cy | Crawling from [Link](https://www.llyw.cymru) | | DaNewsroom | da | Varab & Schluter, 2020 | | Danish GigaWord | da | Strømberg-Derczynski et al., 2021 | | DK-CLARIN Reference Corpus of General Danish | da | [Link](https://korpus.dsl.dk/clarin/) | | The Danish Parliament Corpus 2009 - 2017, v1 | da | Hansen, 2018 | | DeWaC | de | [Link](https://docs.sslmit.unibo.it/doku.php?id=corpora:dewac) | | Open Legal Data - German court decisions and laws | de | Ostendorff et al., 2020 | | Greek Legal Code | el | Papaloukas et al., 2021 | | Greek Web Corpus | el | Outsios et al., 2018 | | Auxiliary Mathematics Problems and Solutions (AMPS) dataset | en | Hendrycks et al., 2021 | | BIGPATENT | en | Sharma et al., 2019 | | FineWeb-Edu (350BT subset) | en | Penedo et al., 2024 | | peS2o | en | Soldaini & Lo, 2023 | | PG-19 | en | Rae et al., 2019 | | Pile of Law (selected subsets) | en | Henderson* et al., 2022 | | proof-pile | en | [Link](https://huggingface.co/datasets/hoskinson-center/proof-pile) | | RedPajama-Data T1 (StackExchange subset) | en | Computer, 2023 | | The Pile (PhilPapers subset) | en | Gao et al., 2021 | | Biomedical | es | Internally generated scientific dataset: Dialnet, Scielo, CSIC, TDX, BSC, UCM | | HPLTDatasets v1 - Spanish | es | de Gibert et al., 2024 | | Legal | es | Internally generated legal dataset: BOE, BORME, Senado, Congreso, Spanish court orders, DOGC | | Scientific | es | Internally generated scientific dataset: Wikipedia LS, Pubmed, MeSpEn, patents, clinical cases, medical crawler | | Spanish Legal Domain Corpora | es | Gutiérrez-Fandiño et al., 2021 | | Estonian National Corpus 2021 | et | Koppel & Kallas, 2022 | | Estonian Reference Corpus | et | [Link](https://www.cl.ut.ee/korpused/segakorpus/) | | EusCrawl (w/o Wikipedia or NC-licenses) | eu | Artetxe et al., 2022 | | Latxa Corpus v1.1 | eu | Etxaniz et al., 2024 [Link](https://huggingface.co/datasets/HiTZ/latxa-corpus-v1.1) | | Aya Dataset (w/o Evaluation Suite) | eu, hr, nl, fi, ka, hu, lt, nn, ro, sk, lv, cy, bg, cs, en, fr, de, ga, mt, pl, ru, sl, sv, ca, da, et, gl, el, it, no, pt, sr, es, uk | Singh et al., 2024 | | Yle Finnish News Archive | fi | [Link](http://urn.fi/urn:nbn:fi:lb-2021050401) | | CaBeRnet: a New French Balanced Reference Corpus | fr | Popa-Fabre et al., 2020 | | French Public Domain Books | fr | [Link](https://huggingface.co/datasets/PleIAs/French-PD-Books) | | French Public Domain Newspapers | fr | [Link](https://huggingface.co/datasets/PleIAs/French-PD-Newspapers) | | Irish Universal Dependencies | ga | [Link](https://universaldependencies.org/ga/index.html) | | The Gaois bilingual corpus of English-Irish legislation (Irish legislation) | ga | [Link](https://portulanclarin.net/repository/browse/the-gaois-bilingual-corpus-of-english-irish-legislation-processed/daeac17c9e3511ea9b7f02420a000407b83de243dc0b469aab41084386c5b80f/) | | CorpusNÓS | gl | de-Dios-Flores et al., 2024 | | Croatian web corpus hrWaC 2.1 | hr | Ljubešić & Klubička, 2014 | | ITWaC | it | [Link](https://docs.sslmit.unibo.it/doku.php?id=corpora:itwac) | | Corpus of State-related content from the Latvian Web (Processed) | lv | [Link](https://catalog.elra.info/en-us/repository/browse/ELRA-W0169/) | | Korpus Malti | mt | Micallef et al., 2022 | | SoNaR Corpus NC 1.2 | nl | [Link](https://taalmaterialen.ivdnt.org/download/tstc-sonar-corpus/) | | Norwegian Colossal Corpus | nn, no | Kummervold et al., 2021 | | Occitan Corpus | oc | Provided by [IEA](https://www.institutestudisaranesi.cat/) | | NKJP-PodkorpusMilionowy-1.2 (National Corpus of Polish) | pl | Lewandowska-Tomaszczyk et al., 2013 | | Polish Parliamentary Corpus / Korpus Dyskursu Parlamentarnego | pl | Ogrodniczuk, 2018 | | Brazilian Portuguese Web as Corpus | pt | Wagner Filho et al., 2018 | | ParlamentoPT | pt | Rodrigues et al., 2023 | | MARCELL Romanian legislative subcorpus v2 | ro | [Link](https://elrc-share.eu/reposMARCELL%20Romanian%20legislative%20subcorpus%20v2itory/browse/marcell-romanian-legislative-subcorpus-v2/2da548428b9d11eb9c1a00155d026706ce94a6b59ffc4b0e9fb5cd9cebe6889e/) | | Korpus slovenských právnych predpisov v1.9 | sk | [Link](https://www.juls.savba.sk/data/marcell/legal-sk-20220322-1.9.ver.xz) | | od-justice 2.0 | sk | [Link](https://www.juls.savba.sk/data/od-justice/od-justice-2.0.ver.xz) | | Corpus of academic Slovene KAS 2.0 | sl | Žagar et al., 2022 | | slWaC web corpus | sl | Erjavec et al., 2015 | | SrpKorSubset (news, legal, academic, conversation, literary) | sr | [Link](http://www.korpus.matf.bg.ac.rs/) | | The Swedish Culturomics Gigaword Corpus | sv | Rødven-Eide, 2016 | | Corpus of laws and legal acts of Ukraine | uk | [Link](https://lang.org.ua/en/corpora/#anchor7) | <details> <summary>References</summary> - Abadji, J., Suárez, P. J. O., Romary, L., & Sagot, B. (2021). Ungoliant: An optimized pipeline for the generation of a very large-scale multilingual web corpus (H. Lüngen, M. Kupietz, P. Bański, A. Barbaresi, S. Clematide, & I. Pisetta, Eds.; pp. 1–9). Leibniz-Institut für Deutsche Sprache. [Link](https://doi.org/10.14618/ids-pub-10468) - Artetxe, M., Aldabe, I., Agerri, R., Perez-de-Viñaspre, O., & Soroa, A. (2022). Does Corpus Quality Really Matter for Low-Resource Languages? - Bañón, M., Esplà-Gomis, M., Forcada, M. L., García-Romero, C., Kuzman, T., Ljubešić, N., van Noord, R., Sempere, L. P., Ramírez-Sánchez, G., Rupnik, P., Suchomel, V., Toral, A., van der Werff, T., & Zaragoza, J. (2022). MaCoCu: Massive collection and curation of monolingual and bilingual data: Focus on under-resourced languages. Proceedings of the 23rd Annual Conference of the European Association for Machine Translation, 303–304. [Link](https://aclanthology.org/2022.eamt-1.41) - Brack, M., Ostendorff, M., Suarez, P. O., Saiz, J. J., Castilla, I. L., Palomar-Giner, J., Shvets, A., Schramowski, P., Rehm, G., Villegas, M., & Kersting, K. (2024). Community OSCAR: A Community Effort for Multilingual Web Data. [Link](https://occiglot.eu/papers/Community_Oscar.pdf) - Computer, T. (2023). RedPajama: An Open Source Recipe to Reproduce LLaMA training dataset [Computer software]. [Link](https://github.com/togethercomputer/RedPajama-Data) - de Gibert, O., Nail, G., Arefyev, N., Bañón, M., van der Linde, J., Ji, S., Zaragoza-Bernabeu, J., Aulamo, M., Ramírez-Sánchez, G., Kutuzov, A., Pyysalo, S., Oepen, S., & Tiedemann, J. (2024). A New Massive Multilingual Dataset for High-Performance Language Technologies (arXiv:2403.14009). arXiv. [Link](http://arxiv.org/abs/2403.14009) - Dodge, J., Sap, M., Marasović, A., Agnew, W., Ilharco, G., Groeneveld, D., Mitchell, M., & Gardner, M. (2021). Documenting Large Webtext Corpora: A Case Study on the Colossal Clean Crawled Corpus. In M.-F. Moens, X. Huang, L. Specia, & S. W. Yih (Eds.), Proceedings of the 2021 Conference on Empirical Methods in Natural Language Processing (pp. 1286–1305). Association for Computational Linguistics. [Link](https://doi.org/10.18653/v1/2021.emnlp-main.98) - Erjavec, T., Ljubešić, N., & Logar, N. (2015). The slWaC corpus of the Slovene web. Informatica (Slovenia), 39, 35–42. - Erjavec, T., Ogrodniczuk, M., Osenova, P., Ljubešić, N., Simov, K., Grigorova, V., Rudolf, M., Pančur, A., Kopp, M., Barkarson, S., Steingrímsson, S. hór, van der Pol, H., Depoorter, G., de Does, J., Jongejan, B., Haltrup Hansen, D., Navarretta, C., Calzada Pérez, M., de Macedo, L. D., … Rayson, P. (2021). Linguistically annotated multilingual comparable corpora of parliamentary debates ParlaMint.ana 2.1. [Link](http://hdl.handle.net/11356/1431) - Etxaniz, J., Sainz, O., Perez, N., Aldabe, I., Rigau, G., Agirre, E., Ormazabal, A., Artetxe, M., & Soroa, A. (2024). Latxa: An Open Language Model and Evaluation Suite for Basque. [Link] (https://arxiv.org/abs/2403.20266) - Gao, L., Biderman, S., Black, S., Golding, L., Hoppe, T., Foster, C., Phang, J., He, H., Thite, A., Nabeshima, N., Presser, S., & Leahy, C. (2021). The Pile: An 800GB Dataset of Diverse Text for Language Modeling. CoRR, abs/2101.00027. [Link](https://arxiv.org/abs/2101.00027) - Gutiérrez-Fandiño, A., Armengol-Estapé, J., Gonzalez-Agirre, A., & Villegas, M. (2021). Spanish Legalese Language Model and Corpora. - Hansen, D. H. (2018). The Danish Parliament Corpus 2009—2017, v1. [Link](http://hdl.handle.net/20.500.12115/8) - Henderson*, P., Krass*, M. S., Zheng, L., Guha, N., Manning, C. D., Jurafsky, D., & Ho, D. E. (2022). Pile of Law: Learning Responsible Data Filtering from the Law and a 256GB Open-Source Legal Dataset. arXiv. [Link](https://arxiv.org/abs/2207.00220) - Hendrycks, D., Burns, C., Kadavath, S., Arora, A., Basart, S., Tang, E., Song, D., & Steinhardt, J. (2021). Measuring Mathematical Problem Solving With the MATH Dataset. NeurIPS. - Jansen, T., Tong, Y., Zevallos, V., & Suarez, P. O. (2022). Perplexed by Quality: A Perplexity-based Method for Adult and Harmful Content Detection in Multilingual Heterogeneous Web Data. - Koppel, K., & Kallas, J. (2022). Eesti keele ühendkorpuste sari 2013–2021: Mahukaim eestikeelsete digitekstide kogu. Eesti Rakenduslingvistika Ühingu Aastaraamat Estonian Papers in Applied Linguistics, 18, 207–228. [Link](https://doi.org/10.5128/erya18.12) - Křen, M., Cvrček, V., Henyš, J., Hnátková, M., Jelínek, T., Kocek, J., Kováříková, D., Křivan, J., Milička, J., Petkevič, V., Procházka, P., Skoumalová, H., Šindlerová, J., & Škrabal, M. (2021). SYN v9: Large corpus of written Czech. [Link](http://hdl.handle.net/11234/1-4635) - Kreutzer, J., Caswell, I., Wang, L., Wahab, A., van Esch, D., Ulzii-Orshikh, N., Tapo, A., Subramani, N., Sokolov, A., Sikasote, C., Setyawan, M., Sarin, S., Samb, S., Sagot, B., Rivera, C., Rios, A., Papadimitriou, I., Osei, S., Suarez, P. O., … Adeyemi, M. (2022). Quality at a Glance: An Audit of Web-Crawled Multilingual Datasets. Transactions of the Association for Computational Linguistics, 10, 50–72. [Link](https://doi.org/10.1162/tacl_a_00447) - Kummervold, P. E., De la Rosa, J., Wetjen, F., & Brygfjeld, S. A. (2021). Operationalizing a National Digital Library: The Case for a Norwegian Transformer Model. In S. Dobnik & L. Øvrelid (Eds.), Proceedings of the 23rd Nordic Conference on Computational Linguistics (NoDaLiDa) (pp. 20–29). Linköping University Electronic Press, Sweden. [Link](https://aclanthology.org/2021.nodalida-main.3) - Lewandowska-Tomaszczyk, B., Górski, R., Łaziński, M., & Przepiórkowski, A. (2013). The National Corpus of Polish (NKJP). Language use and data analysis. 309–319. - Li, R., Allal, L. B., Zi, Y., Muennighoff, N., Kocetkov, D., Mou, C., Marone, M., Akiki, C., Li, J., Chim, J., Liu, Q., Zheltonozhskii, E., Zhuo, T. Y., Wang, T., Dehaene, O., Davaadorj, M., Lamy-Poirier, J., Monteiro, J., Shliazhko, O., … Vries, H. de. (2023). StarCoder: May the source be with you! - Lison, P., & Tiedemann, J. (2016). OpenSubtitles2016: Extracting Large Parallel Corpora from Movie and TV Subtitles. In N. Calzolari, K. Choukri, T. Declerck, S. Goggi, M. Grobelnik, B. Maegaard, J. Mariani, H. Mazo, A. Moreno, J. Odijk, & S. Piperidis (Eds.), Proceedings of the Tenth International Conference on Language Resources and Evaluation (LREC’16) (pp. 923–929). European Language Resources Association (ELRA). [Link](https://aclanthology.org/L16-1147) - Ljubešić, N., & Klubička, F. (2014). Bs,hr,srWaC - Web Corpora of Bosnian, Croatian and Serbian. In F. Bildhauer & R. Schäfer (Eds.), Proceedings of the 9th Web as Corpus Workshop (WaC-9) (pp. 29–35). Association for Computational Linguistics. [Link](https://doi.org/10.3115/v1/W14-0405) - Micallef, K., Gatt, A., Tanti, M., van der Plas, L., & Borg, C. (2022). Pre-training Data Quality and Quantity for a Low-Resource Language: New Corpus and BERT Models for Maltese. Proceedings of the Third Workshop on Deep Learning for Low-Resource Natural Language Processing, 90–101. [Link](https://doi.org/10.18653/v1/2022.deeplo-1.10) - Ogrodniczuk, M. (2018). Polish Parliamentary Corpus. [Link](https://api.semanticscholar.org/CorpusID:235134113) - Ostendorff, M., Blume, T., & Ostendorff, S. (2020). Towards an Open Platform for Legal Information. Proceedings of the ACM/IEEE Joint Conference on Digital Libraries in 2020, 385–388. [Link](https://doi.org/10.1145/3383583.3398616) - Ostendorff, M., Suarez, P. O., Lage, L. F., & Rehm, G. (2024). LLM-Datasets: An Open Framework for Pretraining Datasets of Large Language Models. First Conference on Language Modeling. [Link](https://openreview.net/forum?id=5RdIMlGLXL) - Outsios, S., Skianis, K., Meladianos, P., Xypolopoulos, C., & Vazirgiannis, M. (2018). Word Embeddings from Large-Scale Greek Web content. arXiv Preprint arXiv:1810.06694. - Palomar-Giner, J., Saiz, J. J., Espuña, F., Mina, M., Da Dalt, S., Llop, J., Ostendorff, M., Ortiz Suarez, P., Rehm, G., Gonzalez-Agirre, A., & Villegas, M. (2024). A CURATEd CATalog: Rethinking the Extraction of Pretraining Corpora for Mid-Resourced Languages. In N. Calzolari, M.-Y. Kan, V. Hoste, A. Lenci, S. Sakti, & N. Xue (Eds.), Proceedings of the 2024 Joint International Conference on Computational Linguistics, Language Resources and Evaluation (LREC-COLING 2024) (pp. 335–349). ELRA and ICCL. [Link](https://aclanthology.org/2024.lrec-main.31) - Papaloukas, C., Chalkidis, I., Athinaios, K., Pantazi, D.-A., & Koubarakis, M. (2021). Multi-granular Legal Topic Classification on Greek Legislation. Proceedings of the Natural Legal Language Processing Workshop 2021, 63–75. [Link](https://doi.org/10.48550/arXiv.2109.15298) - Popa-Fabre, M., Ortiz Suárez, P. J., Sagot, B., & de la Clergerie, É. (2020). French Contextualized Word-Embeddings with a sip of CaBeRnet: A New French Balanced Reference Corpus. Proceedings of the 8th Workshop on Challenges in the Management of Large Corpora, 15–23. [Link](https://aclanthology.org/2020.cmlc-1.3) - Rae, J. W., Potapenko, A., Jayakumar, S. M., Hillier, C., & Lillicrap, T. P. (2019). Compressive Transformers for Long-Range Sequence Modelling. arXiv Preprint. [Link](https://arxiv.org/abs/1911.05507) - Rodrigues, J., Gomes, L., Silva, J., Branco, A., Santos, R., Cardoso, H. L., & Osório, T. (2023). Advancing Neural Encoding of Portuguese with Transformer Albertina PT-\*. - Rødven-Eide, S. (2016). The Swedish Culturomics Gigaword CorpusThe Swedish Culturomics Gigaword Corpus [Dataset]. Språkbanken Text. [Link](https://doi.org/10.23695/3WMV-1Z09) - Sharma, E., Li, C., & Wang, L. (2019). BIGPATENT: A Large-Scale Dataset for Abstractive and Coherent Summarization. CoRR, abs/1906.03741. [Link](http://arxiv.org/abs/1906.03741) - Soldaini, L., & Lo, K. (2023). peS2o (Pretraining Efficiently on S2ORC) Dataset. Allen Institute for AI. - Strømberg-Derczynski, L., Ciosici, M., Baglini, R., Christiansen, M. H., Dalsgaard, J. A., Fusaroli, R., Henrichsen, P. J., Hvingelby, R., Kirkedal, A., Kjeldsen, A. S., Ladefoged, C., Nielsen, F. Å., Madsen, J., Petersen, M. L., Rystrøm, J. H., & Varab, D. (2021). The Danish Gigaword Corpus. Proceedings of the 23rd Nordic Conference on Computational Linguistics (NoDaLiDa), 413–421. [Link](https://aclanthology.org/2021.nodalida-main.46) - Subramani, N., Luccioni, S., Dodge, J., & Mitchell, M. (2023). Detecting Personal Information in Training Corpora: An Analysis. 208–220. [Link](https://doi.org/10.18653/v1/2023.trustnlp-1.18) - Varab, D., & Schluter, N. (2020). DaNewsroom: A Large-scale Danish Summarisation Dataset. Proceedings of The 12th Language Resources and Evaluation Conference, 6731–6739. [Link](https://www.aclweb.org/anthology/2020.lrec-1.831) - Váradi, T., Nyéki, B., Koeva, S., Tadić, M., Štefanec, V., Ogrodniczuk, M., Nitoń, B., Pezik, P., Barbu Mititelu, V., Irimia, E., Mitrofan, M., Tufi\textcommabelows, D., Garabík, R., Krek, S., & Repar, A. (2022). Introducing the CURLICAT Corpora: Seven-language Domain Specific Annotated Corpora from Curated Sources. In N. Calzolari, F. Béchet, P. Blache, K. Choukri, C. Cieri, T. Declerck, S. Goggi, H. Isahara, B. Maegaard, J. Mariani, H. Mazo, J. Odijk, & S. Piperidis (Eds.), Proceedings of the Thirteenth Language Resources and Evaluation Conference (pp. 100–108). European Language Resources Association. [Link](https://aclanthology.org/2022.lrec-1.11) - Wagner Filho, J. A., Wilkens, R., Idiart, M., & Villavicencio, A. (2018). The brwac corpus: A new open resource for brazilian portuguese. Proceedings of the Eleventh International Conference on Language Resources and Evaluation (LREC 2018). - Žagar, A., Kavaš, M., Robnik-Šikonja, M., Erjavec, T., Fišer, D., Ljubešić, N., Ferme, M., Borovič, M., Boškovič, B., Ojsteršek, M., & Hrovat, G. (2022). Corpus of academic Slovene KAS 2.0. [Link](http://hdl.handle.net/11356/1448) - Alicia Parrish, Angelica Chen, Nikita Nangia, Vishakh Padmakumar, Jason Phang, Jana Thompson, Phu Mon Htut, and Samuel Bowman. 2022. BBQ: A hand-built bias benchmark for question answering. In Findings of the Association for Computational Linguistics: ACL 2022, pages 2086–2105, Dublin, Ireland. Association for Computational Linguistics. - Emily Sheng, Kai-Wei Chang, Premkumar Natarajan, and Nanyun Peng. 2019. The Woman Worked as a Babysitter: On Biases in Language Generation. In Proceedings of the 2019 Conference on Empirical Methods in Natural Language Processing and the 9th International Joint Conference on Natural Language Processing (EMNLP-IJCNLP), pages 3407–3412, Hong Kong, China. Association for Computational Linguistics. - Clark, P., Cowhey, I., Etzioni, O., Khot, T., Sabharwal, A., Schoenick, C., & Tafjord, O. (2018). Think you have Solved Question Answering? Try ARC, the AI2 Reasoning Challenge. arXiv:1803. 05457v1. - Richard Socher, Alex Perelygin, Jean Wu, Jason Chuang, Christopher D. Manning, Andrew Ng, and Christopher Potts. 2013. Recursive Deep Models for Semantic Compositionality Over a Sentiment Treebank. In Proceedings of the 2013 Conference on Empirical Methods in Natural Language Processing, pages 1631–1642, Seattle, Washington, USA. Association for Computational Linguistics. - Penedo, G., Kydlíček, H., allal, L. B., Lozhkov, A., Mitchell, M., Raffel, C., Von Werra, L., & Wolf, T. (2024). The FineWeb Datasets: Decanting the Web for the Finest Text Data at Scale (arXiv:2406.17557). arXiv. http://arxiv.org/abs/2406.17557 - Singh, S., Vargus, F., Dsouza, D., Karlsson, B. F., Mahendiran, A., Ko, W.-Y., Shandilya, H., Patel, J., Mataciunas, D., OMahony, L., Zhang, M., Hettiarachchi, R., Wilson, J., Machado, M., Moura, L. S., Krzemiński, D., Fadaei, H., Ergün, I., Okoh, I., … Hooker, S. (2024). Aya Dataset: An Open-Access Collection for Multilingual Instruction Tuning (arXiv:2402.06619). arXiv. http://arxiv.org/abs/2402.06619 </details> </details> The model was trained for 3 epochs, with two final rounds of 0.3B higher-quality tokens each, meaning that the total number of tokens seen during pre-training amounts to roughly 7.8 trillion tokens. We provide an extense Datasheet section following the best practices defined by [(Gebru et al., 2021)](https://arxiv.org/pdf/1803.09010). <details> <summary>Datasheet</summary> #### Motivation **For what purpose was the dataset created? Was there a specific task in mind? Was there a specific gap that needed to be filled? Please provide a description.** The purpose of creating this dataset is to pre-train the Salamandra family of multilingual models with high performance in a large number of European languages (35) and code (including 92 different programming languages). In addition, we aim to represent especially the co-official languages of Spain: Spanish, Catalan, Galician, and Basque. This is the reason why we carry out an oversampling of these languages. We detected that there is a great lack of massive multilingual data, especially in minority languages (Ostendorff & Rehm, 2023), so part of our efforts in the creation of this pre-training dataset have resulted in the contribution to large projects such as the Community OSCAR (Brack et al., 2024), which includes 151 languages and 40T words, or CATalog (Palomar-Giner et al., 2024), the largest open dataset in Catalan in the world. **Who created the dataset (e.g., which team, research group) and on behalf of which entity (e.g., company, institution, organization)?** The dataset has been created by the Language Technologies unit (LangTech) of the Barcelona Supercomputing Center - Centro Nacional de Supercomputación (BSC-CNS), which aims to advance the field of natural language processing through cutting-edge research and development and the use of HPC. In particular, it was created by the unit's data team, the main contributors being Javier Saiz, Ferran Espuña, and Jorge Palomar. However, the creation of the dataset would not have been possible without the collaboration of a large number of collaborators, partners, and public institutions, which can be found in detail in the acknowledgements. **Who funded the creation of the dataset? If there is an associated grant, please provide the name of the grantor and the grant name and number.** This work/research has been promoted and financed by the Government of Catalonia through the [Aina project](https://projecteaina.cat/). #### Composition **What do the instances that comprise the dataset represent (e.g., documents, photos, people, countries)? Are there multiple types of instances (e.g., movies, users, and ratings; people and interactions between them; nodes and edges)? Please provide a description.** The dataset consists entirely of text documents in various languages. Specifically, data was mainly sourced from the following databases and repositories: - **Common Crawl:** Repository that holds website data and is run by the Common Crawl non-profit organization. It is updated monthly and is distributed under the CC0 1.0 public domain license. - **GitHub:** Community platform that allows developers to create, store, manage, and share their code. Repositories are crawled and then distributed with their original licenses, which may vary from permissive to non-commercial licenses. - **Wikimedia:** Database that holds the collection databases managed by the Wikimedia Foundation, including Wikipedia, Wikibooks, Wikinews, Wikiquote, Wikisource, and Wikivoyage. It is updated monthly and is distributed under Creative Commons Attribution-ShareAlike License 4.0. - **EurLex:** Repository that holds the collection of legal documents from the European Union, available in all of the EU’s 24 official languages and run by the Publications Office of the European Union. It is updated daily and is distributed under the Creative Commons Attribution 4.0 International license. - **Other repositories:** Specific repositories were crawled under permission for domain-specific corpora, which include academic, legal, and newspaper repositories. We provide a complete list of dataset sources at the end of this section. **How many instances are there in total (of each type, if appropriate)?** The dataset contains a diverse range of instances across multiple languages, with notable adjustments for certain languages. English represents the largest portion, accounting for 39.08% of the total data. Spanish was upsampled by a factor of 2, bringing its share to 16.59%, while Catalan (1.84%), Basque (0.26%), and Galician (0.36%) were also upsampled by 2. On the other hand, code-related data was downsampled by half, making up 6.42% of the total. Other prominent languages include French (6.59%), Russian (5.39%), German (4.25%), and Hungarian (3.93%), with several additional languages contributing between 1% and 2%, and smaller portions represented by a variety of others. **Does the dataset contain all possible instances or is it a sample (not necessarily random) of instances from a larger set? If the dataset is a sample, then what is the larger set? Is the sample representative of the larger set (e.g., geographic coverage)? If so, please describe how this representativeness was validated/verified. If it is not representative of the larger set, please describe why not (e.g., to cover a more diverse range of instances, because instances were withheld or unavailable).** The dataset is a sample from multiple sources, with different weights based on the primary language of the content: Spanish, Catalan, Basque, and Galician content was upsampled by a factor of two, while programming languages were downsampled by a factor of half. Other sources were sampled in proportion to their occurrence. **What data does each instance consist of? “Raw” data (e.g., unprocessed text or images) or features? In either case, please provide a description.** Each instance consists of a text document processed for deduplication, language identification, and source-specific filtering. Some documents required optical character recognition (OCR) to extract text from non-text formats such as PDFs. **Is there a label or target associated with each instance? If so, please provide a description.** Each instance is labeled with a unique identifier, the primary language of the content, and the URL for web-sourced instances. Additional labels were automatically assigned to detect specific types of content —harmful or toxic content— and to assign preliminary indicators of undesired qualities —very short documents, high density of symbols, etc.— which were used for filtering instances. **Is any information missing from individual instances? If so, please provide a description, explaining why this information is missing (e.g., because it was unavailable). This does not include intentionally removed information, but might include, e.g., redacted text.** No significant information is missing from the instances. **Are relationships between individual instances made explicit (e.g., users’ movie ratings, social network links)? If so, please describe how these relationships are made explicit.** Instances are related through shared metadata, such as source and language identifiers. **Are there recommended data splits (e.g., training, development/validation, testing)? If so, please provide a description of these splits, explaining the rationale behind them.** The dataset is split randomly into training, validation, and test sets. **Are there any errors, sources of noise, or redundancies in the dataset? If so, please provide a description.** Despite removing duplicated instances within each source, redundancy remains at the paragraph and sentence levels, particularly in web-sourced instances where SEO techniques and templates contribute to repeated textual patterns. Some instances may also be duplicated across sources due to format variations. **Is the dataset self-contained, or does it link to or otherwise rely on external resources (e.g., websites, tweets, other datasets)? If it links to or relies on external resources, a) are there guarantees that they will exist, and remain constant, over time; b) are there official archival versions of the complete dataset (i.e., including the external resources as they existed at the time the dataset was created); c) are there any restrictions (e.g., licenses, fees) associated with any of the external resources that might apply to a dataset consumer? Please provide descriptions of all external resources and any restrictions associated with them, as well as links or other access points, as appropriate.** The dataset is self-contained and does not rely on external resources. **Does the dataset contain data that might be considered confidential (e.g., data that is protected by legal privilege or by doctor–patient confidentiality, data that includes the content of individuals’ non-public communications)? If so, please provide a description.** The dataset does not contain confidential data. **Does the dataset contain data that, if viewed directly, might be offensive, insulting, threatening, or might otherwise cause anxiety? If so, please describe why. If the dataset does not relate to people, you may skip the remaining questions in this section.** The dataset includes web-crawled content, which may overrepresent pornographic material across languages (Kreutzer et al., 2022). Although pre-processing techniques were applied to mitigate offensive content, the heterogeneity and scale of web-sourced data make exhaustive filtering challenging, which makes it next to impossible to identify all adult content without falling into excessive filtering, which may negatively influence certain demographic groups (Dodge et al., 2021). **Does the dataset identify any subpopulations (e.g., by age, gender)? If so, please describe how these subpopulations are identified and provide a description of their respective distributions within the dataset.** The dataset does not explicitly identify any subpopulations. **Is it possible to identify individuals (i.e., one or more natural persons), either directly or indirectly (i.e., in combination with other data) from the dataset? If so, please describe how.** Web-sourced instances in the dataset may contain personally identifiable information (PII) that is publicly available on the Web, such as names, IP addresses, email addresses, and phone numbers. While it would be possible to indirectly identify individuals through the combination of multiple data points, the nature and scale of web data makes it difficult to parse such information. In any case, efforts are made to filter or anonymize sensitive data during pre-processing, but some identifiable information may remain in the dataset. **Does the dataset contain data that might be considered sensitive in any way? If so, please provide a description.** Given that the dataset includes web-sourced content and other publicly available documents, instances may inadvertently reveal financial information, health-related details, or forms of government identification, such as social security numbers (Subramani et al., 2023), especially if the content originates from less-regulated sources or user-generated platforms. #### Collection Process **How was the data collected?** This dataset is constituted by combining several sources, whose acquisition methods can be classified into three groups: - Web-sourced datasets with some preprocessing available under permissive license (p.e. Common Crawl). - Domain-specific or language-specific raw crawls (p.e. Spanish Crawling). - Manually curated data obtained through collaborators, data providers (by means of legal assignment agreements) or open source projects (p.e. CATalog). **What mechanisms or procedures were used to collect the data? How were these mechanisms or procedures validated?** According to the three groups previously defined, these are the mechanisms used in each of them: - Open direct download. Validation: data integrity tests. - Ad-hoc scrapers or crawlers. Validation: software unit and data integrity tests. - Direct download via FTP, SFTP, API or S3. Validation: data integrity tests. **If the dataset is a sample from a larger set, what was the sampling strategy?** The sampling strategy was to use the whole dataset resulting from the filtering explained in the ‘preprocessing/cleaning/labelling’ section, with the particularity that an upsampling of 2 (i.e. twice the probability of sampling a document) was performed for the co-official languages of Spain (Spanish, Catalan, Galician, Basque), and a downsampling of 1/2 was applied for code (half the probability of sampling a code document, evenly distributed among all programming languages). **Who was involved in the data collection process and how were they compensated?** This data is generally extracted, filtered and sampled by automated processes. The code required to run these processes has been developed entirely by members of the LangTech data team, or otherwise obtained from open-source software. Furthermore, there has been no monetary consideration for acquiring data from suppliers. **Over what timeframe was the data collected? Does this timeframe match the creation timeframe of the data associated with the instances? If not, please describe the timeframe in which the data associated with the instances was created.** Data were acquired and processed from April 2023 to April 2024. However, as mentioned, much data has been obtained from open projects such as Common Crawl, which contains data from 2014, so it is the end date (04/2024) rather than the start date that is important. **Were any ethical review processes conducted? If so, please provide a description of these review processes, including the outcomes, as well as a link or other access point to any supporting documentation.** No particular ethical review process has been carried out as the data is mostly open and not particularly sensitive. However, we have an internal evaluation team and a bias team to monitor ethical issues. In addition, we work closely with ‘Observatori d'Ètica en Intel·ligència Artificial’ (OEIAC) and ‘Agencia Española de Supervisión de la Inteligencia Artificial’ (AESIA) to audit the processes we carry out from an ethical and legal point of view, respectively. #### Preprocessing **Was any preprocessing/cleaning/labeling of the data done? If so, please provide a description. If not, you may skip the remaining questions in this section.** Instances of text documents were not altered, but web-sourced documents were filtered based on specific criteria along two dimensions: - Quality: documents with a score lower than 0.8, based on undesired qualities, such as documents with low number of lines, very short sentences, presence of long footers and headers, and high percentage of punctuation, obtained through CURATE (Palomar-Giner et al., 2024) were filtered out. - Harmful or adult content: documents originating from Colossal OSCAR were filtered using LLM-Datasets (Ostendorff et al., 2024) based on the perplexity from a language model (‘harmful_pp’ field) provided by the Ungoliant pipeline (Abadji et al., 2021). **Was the “raw” data saved in addition to the preprocessed/cleaned/labeled data? If so, please provide a link or other access point to the “raw” data.** The original raw data was not kept. **Is the software that was used to preprocess/clean/label the data available? If so, please provide a link or other access point.** Yes, the preprocessing and filtering software is open-sourced. The [CURATE](https://github.com/langtech-bsc/CURATE) pipeline was used for Spanish Crawling and CATalog, and the [Ungoliant](https://github.com/oscar-project/ungoliant) pipeline was used for the OSCAR project. #### Uses **Has the dataset been used for any tasks already? If so, please provide a description.** Pre-train the Salamandra model family. **What (other) tasks could the dataset be used for?** The data can be used primarily to pre-train other language models, which can then be used for a wide range of use cases. The dataset could also be used for other tasks such as fine-tuning language models, cross-lingual NLP tasks, machine translation, domain-specific text generation, and language-specific data analysis. **Is there anything about the composition of the dataset or the way it was collected and preprocessed/cleaned/labeled that might impact future uses? Is there anything a dataset consumer could do to mitigate these risks or harms?** Web-crawled content is over-represented with standard language varieties, impacting language model performance for minority languages. Language diversity in data is crucial to avoid bias, especially in encoding non-standard dialects, preventing the exclusion of demographic groups. Moreover, despite legal uncertainties in web-scraped data, we prioritize permissive licenses and privacy protection measures, acknowledging the challenges posed by personally identifiable information (PII) within large-scale datasets. Our ongoing efforts aim to address privacy concerns and contribute to a more inclusive linguistic dataset. **Are there tasks for which the dataset should not be used?** - #### Distribution **Will the dataset be distributed to third parties outside of the entity on behalf of which the dataset was created? If so, please provide a description.** The dataset will not be released or distributed to third parties. Any related question to distribution is omitted in this section. #### Maintenance **Who will be supporting/hosting/maintaining the dataset?** The dataset will be hosted by the Language Technologies unit (LangTech) of the Barcelona Supercomputing Center (BSC). The team will ensure regular updates and monitor the dataset for any issues related to content integrity, legal compliance, and bias for the sources they are responsible for. **How can the owner/curator/manager of the dataset be contacted?** The data owner may be contacted with the email address [email protected]. **Will the dataset be updated?** The dataset will not be updated. **If the dataset relates to people, are there applicable limits on the retention of the data associated with the instances? If so, please describe these limits and explain how they will be enforced.** The dataset does not keep sensitive data that could allow direct identification of individuals, apart from the data that is publicly available in web-sourced content. Due to the sheer volume and diversity of web data, it is not feasible to notify individuals or manage data retention on an individual basis. However, efforts are made to mitigate the risks associated with sensitive information through pre-processing and filtering to remove identifiable or harmful content. Despite these measures, vigilance is maintained to address potential privacy and ethical issues. **Will older versions of the dataset continue to be supported/hosted/maintained? If so, please describe how. If not, please describe how its obsolescence will be communicated to dataset consumers.** Since the dataset will not be updated, only the final version will be kept. **If others want to extend/augment/build on/contribute to the dataset, is there a mechanism for them to do so?** The dataset does not allow for external contributions. </details> ### Finetuning Data This instruction-tuned variant has been trained with a mixture of 276k English, Spanish, and Catalan multi-turn instructions gathered from open datasets: | Dataset | ca | en | es | |-----------------------|:------:|:------:|:------:| | alpaca-cleaned | - | 50,000 | - | | aya-dataset | - | 3,944 | 3,854 | | CoQCat | 4,797 | - | - | | databricks-dolly-15k | - | 15,011 | - | | dolly-3k-ca | 3,232 | - | - | | flores-instr | 1,994 | 1,994 | 3,988 | | MentorCA | 7,122 | - | - | | MentorES | - | - | 7,122 | | no-robots | - | 9,499 | - | | oasst-ca | 2,518 | - | - | | oasst2 | 750 | 31,086 | 15,438 | | open-orca | - | 50,000 | - | | RagMultilingual | 16,043 | 14,997 | 11,263 | | tower-blocks | - | 19,895 | 2,000 | | **Total** | **36,456** | **196,426** | **43,665** | --- ## Evaluation ### Gold-standard benchmarks Evaluation is done using the Language Model Evaluation Harness (Gao et al., 2024). We evaluate on a set of tasks taken from [SpanishBench](https://github.com/EleutherAI/lm-evaluation-harness/tree/main/lm_eval/tasks/spanish_bench), [CatalanBench](https://github.com/EleutherAI/lm-evaluation-harness/tree/main/lm_eval/tasks/catalan_bench), [BasqueBench](https://github.com/EleutherAI/lm-evaluation-harness/tree/main/lm_eval/tasks/basque_bench) and [GalicianBench](https://github.com/EleutherAI/lm-evaluation-harness/tree/main/lm_eval/tasks/galician_bench). These benchmarks include both new and existing tasks and datasets. Given that this is an instructed model, we add LM Evaluation Harness's native feature of `chat-template` to the setup. In the tables below, we include the results in a selection of evaluation datasets that represent model's performance across a variety of tasks within these benchmarks. We only use tasks that are either human generated, human translated, or with a strong human-in-the-loop (i.e., machine translation followed by professional revision or machine generation followed by human revision and annotation). This is the reason behind the variety in number of tasks reported across languages. As more tasks that fulfill these requirements are published, we will update the presented results. We also intend to expand the evaluation to other languages, as long as the datasets meet our quality standards. During the implementation of the evaluation we observed a series of issues worth considering when replicating and interpreting the results presented. These issues include ≈1.5% variances in performance in some tasks depending on the version of the `transformers` library used, and depending on the use (or lack of use) of tensor parallelism when loading a model. When implementing existing tasks, we carry out a comprehensive quality evaluation of the dataset, the Harness task itself, and what kind of input models see during evaluation. Our implementation (see links above) addresses multiple existing problems such as errors in datasets and prompts, and lack of pre-processing. All this means that results will vary if using other Harness implementations, and may slightly vary depending on the replication setup. It should be noted that these results are subject to all the drawbacks of every current gold-standard evaluation, and that the figures do not fully represent the models capabilities and potential. We thus advise caution when reading and interpreting the results. A full list of results compared to other baselines, a discussion of the model's performance across tasks and its implications, and details regarding problem-solving with task implementation will soon be available in the technical report. All results reported below are on a 0-shot setting. #### Spanish <table><thead> <tr> <th>Category</th> <th>Task</th> <th>Metric</th> <th>Result</th> </tr></thead> <tbody> <tr> <td>Commonsense Reasoning</td> <td>xstorycloze_es</td> <td>acc</td> <td>62.34</td> </tr> <tr> <td rowspan="2">NLI</td> <td>wnli_es</td> <td>acc</td> <td>47.89</td> </tr> <tr> <td>xnli_es</td> <td>acc</td> <td>47.03</td> </tr> <tr> <td>Paraphrasing</td> <td>paws_es</td> <td>acc</td> <td>55.5</td> </tr> <tr> <td>QA</td> <td>xquad_es</td> <td>acc</td> <td>42.21</td> </tr> <tr> <td>Translation</td> <td>flores_es</td> <td>bleu</td> <td>20.27</td> </tr> </tbody> </table> #### Catalan <table><thead> <tr> <th>Category</th> <th>Task</th> <th>Metric</th> <th>Result</th> </tr></thead> <tbody> <tr> <td rowspan="2">Commonsense Reasoning</td> <td>copa_ca</td> <td>acc</td> <td>70.4</td> </tr> <tr> <td>xstorycloze_ca</td> <td>acc</td> <td>63.07</td> </tr> <tr> <td rowspan="2">NLI</td> <td>wnli_ca</td> <td>acc</td> <td>52.11</td> </tr> <tr> <td>xnli_ca</td> <td>acc</td> <td>51.69</td> </tr> <tr> <td rowspan="2">Paraphrasing</td> <td>parafraseja</td> <td>acc</td> <td>61.88</td> </tr> <tr> <td>paws_ca</td> <td>acc</td> <td>57.7</td> </tr> <tr> <td rowspan="5">QA</td> <td>arc_ca_easy</td> <td>acc</td> <td>51.94</td> </tr> <tr> <td>arc_ca_challenge</td> <td>acc</td> <td>29.52</td> </tr> <tr> <td>openbookqa_ca</td> <td>acc</td> <td>26.4</td> </tr> <tr> <td>piqa_ca</td> <td>acc</td> <td>62.89</td> </tr> <tr> <td>siqa_ca</td> <td>acc</td> <td>42.63</td> </tr> <tr> <td>Translation</td> <td>flores_ca</td> <td>bleu</td> <td>24.48</td> </tr> </tbody></table> #### Basque <table><thead> <tr> <th>Category</th> <th>Task</th> <th>Metric</th> <th>Result</th> </tr></thead> <tbody> <tr> <td rowspan="2">Commonsense Reasoning</td> <td>xcopa_eu</td> <td>acc</td> <td>53.6</td> </tr> <tr> <td>xstorycloze_eu</td> <td>acc</td> <td>56.39</td> </tr> <tr> <td rowspan="2">NLI</td> <td>wnli_eu</td> <td>acc</td> <td>45.07</td> </tr> <tr> <td>xnli_eu</td> <td>acc</td> <td>39.44</td> </tr> <tr> <td rowspan="3">QA</td> <td>eus_exams</td> <td>acc</td> <td>25.35</td> </tr> <tr> <td>eus_proficiency</td> <td>acc</td> <td>26.37</td> </tr> <tr> <td>eus_trivia</td> <td>acc</td> <td>26.24</td> </tr> <tr> <td>Reading Comprehension</td> <td>eus_reading</td> <td>acc</td> <td>24.72</td> </tr> <tr> <td>Translation</td> <td>flores_eu</td> <td>bleu</td> <td>9.67</td> </tr> </tbody></table> #### Galician <table><thead> <tr> <th>Category</th> <th>Task</th> <th>Metric</th> <th>Result</th> </tr></thead> <tbody> <tr> <td rowspan="2">Paraphrasing</td> <td>parafrases_gl</td> <td>acc</td> <td>50.00</td> </tr> <tr> <td>paws_gl</td> <td>acc</td> <td>52.20</td> </tr> <tr> <td>QA</td> <td>openbookqa_gl</td> <td>acc</td> <td>33.2</td> </tr> <tr> <td>Translation</td> <td>flores_gl</td> <td>bleu</td> <td>22.39</td> </tr> </tbody> </table> --- ## Ethical Considerations and Limitations We examine the presence of undesired societal and cognitive biases present in this model using different benchmarks. For societal biases, we test performance using the BBQ dataset (Parrish et al., 2022) in the original English and the Regard dataset (Sheng et al., 2019). We report that moderate accuracies (between 0.5 and 0.6 depending on the social groups) in disambiguated settings, the model performs very poorly in ambiguous setting. Taken together, these results suggest the pervasiveness of social biases that may have an effect on task performance Our cognitive bias analysis focuses on positional effects in 0-shot settings, and majority class bias in few-shot settings. For positional effects, we leverage the ARC Multiple Choice Question dataset (Clark et al., 2018). We observe significant, but moderate weak primacy effects, whereby the model shows a preference for answers towards the beginning of the list of provided answers. We measure effects of majority class effects in few-shot settings using SST-2 (Socher et al., 2013). We again detect significant effects, with a small effect size. This suggests that the model is relatively robust against the examined cognitive biases. We highlight that our analyses of these biases are by no means exhaustive and are limited by the relative scarcity of adequate resources in all languages present in the training data. We aim to gradually extend and expand our analyses in future work. These results can be expected from a model that has undergone only a preliminary instruction tuning. These tests are performed in order to show the biases the model may contain. We urge developers to take them into account and perform safety testing and tuning tailored to their specific applications of the model. --- ## Additional information ### Author The Language Technologies Unit from Barcelona Supercomputing Center. ### Contact For further information, please send an email to <[email protected]>. ### Copyright Copyright(c) 2024 by Language Technologies Unit, Barcelona Supercomputing Center. ### Funding This work has been promoted and financed by the Government of Catalonia through the [Aina Project](https://projecteaina.cat/). This work is funded by the _Ministerio para la Transformación Digital y de la Función Pública_ - Funded by EU – NextGenerationEU within the framework of [ILENIA Project](https://proyectoilenia.es/) with reference 2022/TL22/00215337. ### Acknowledgements This project has benefited from the contributions of numerous teams and institutions, mainly through data contributions, knowledge transfer or technical support. In Catalonia, many institutions have been involved in the project. Our thanks to Òmnium Cultural, Parlament de Catalunya, Institut d'Estudis Aranesos, Racó Català, Vilaweb, ACN, Nació Digital, El món and Aquí Berguedà. At national level, we are especially grateful to our ILENIA project partners: CENID, HiTZ and CiTIUS for their participation. We also extend our genuine gratitude to the Spanish Senate and Congress, Fundación Dialnet, Fundación Elcano and the ‘Instituto Universitario de Sistemas Inteligentes y Aplicaciones Numéricas en Ingeniería (SIANI)’ of the University of Las Palmas de Gran Canaria. At the international level, we thank the Welsh government, DFKI, Occiglot project, especially Malte Ostendorff, and The Common Crawl Foundation, especially Pedro Ortiz, for their collaboration. We would also like to give special thanks to the NVIDIA team, with whom we have met regularly, specially to: Ignacio Sarasua, Adam Henryk Grzywaczewski, Oleg Sudakov, Sergio Perez, Miguel Martinez, Felipes Soares and Meriem Bendris. Their constant support has been especially appreciated throughout the entire process. Their valuable efforts have been instrumental in the development of this work. ### Disclaimer Be aware that the model may contain biases or other unintended distortions. When third parties deploy systems or provide services based on this model, or use the model themselves, they bear the responsibility for mitigating any associated risks and ensuring compliance with applicable regulations, including those governing the use of Artificial Intelligence. The Barcelona Supercomputing Center, as the owner and creator of the model, shall not be held liable for any outcomes resulting from third-party use. ### Citation Technical report and paper coming soon. ### License [Apache License, Version 2.0](https://www.apache.org/licenses/LICENSE-2.0) ## Model Index |Model|Base|Instruct| |:---:|:---:|:---:| |2B| [Link](https://huggingface.co/BSC-LT/salamandra-2b) | [Link](https://huggingface.co/BSC-LT/salamandra-2b-instruct) | |7B| [Link](https://huggingface.co/BSC-LT/salamandra-7b) | [Link](https://huggingface.co/BSC-LT/salamandra-7b-instruct) | |40B| WiP | WiP |
[ "QUESTION_ANSWERING", "TRANSLATION", "SUMMARIZATION", "PARAPHRASING" ]
[ "BEAR", "SCIELO" ]
tarekziade/distilbert-reuters21578
tarekziade
text-classification
[ "transformers", "pytorch", "onnx", "safetensors", "distilbert", "text-classification", "generated_from_trainer", "news_classification", "multi_label", "en", "dataset:reuters21578", "base_model:distilbert/distilbert-base-cased", "base_model:quantized:distilbert/distilbert-base-cased", "license:apache-2.0", "model-index", "autotrain_compatible", "endpoints_compatible", "region:us" ]
2023-12-17T18:29:49
2023-12-17T18:39:54
104
0
--- base_model: distilbert-base-cased datasets: - reuters21578 language: - en license: apache-2.0 metrics: - f1 - accuracy pipeline_tag: text-classification tags: - generated_from_trainer - news_classification - multi_label widget: - text: JAPAN TO REVISE LONG-TERM ENERGY DEMAND DOWNWARDS The Ministry of International Trade and Industry (MITI) will revise its long-term energy supply/demand outlook by August to meet a forecast downtrend in Japanese energy demand, ministry officials said. MITI is expected to lower the projection for primary energy supplies in the year 2000 to 550 mln kilolitres (kl) from 600 mln, they said. The decision follows the emergence of structural changes in Japanese industry following the rise in the value of the yen and a decline in domestic electric power demand. MITI is planning to work out a revised energy supply/demand outlook through deliberations of committee meetings of the Agency of Natural Resources and Energy, the officials said. They said MITI will also review the breakdown of energy supply sources, including oil, nuclear, coal and natural gas. Nuclear energy provided the bulk of Japan's electric power in the fiscal year ended March 31, supplying an estimated 27 pct on a kilowatt/hour basis, followed by oil (23 pct) and liquefied natural gas (21 pct), they noted. REUTER example_title: Example-1 model-index: - name: distilbert-finetuned-reuters21578-multilabel results: - task: type: text-classification name: Text Classification dataset: name: reuters21578 type: reuters21578 config: ModApte split: test args: ModApte metrics: - type: f1 value: 0.8628858578607322 name: F1 - type: accuracy value: 0.8195625759416768 name: Accuracy --- <!-- This model card has been generated automatically according to the information the Trainer had access to. You should probably proofread and complete it, then remove this comment. --> ## Origin of this model This model was forked from https://huggingface.co/lxyuan/distilbert-finetuned-reuters21578-multilabel -- I just generated the onnx versions in /onnx ## Motivation Fine-tuning on the Reuters-21578 multilabel dataset is a valuable exercise, especially as it's frequently used in take-home tests during interviews. The dataset's complexity is just right for testing multilabel classification skills within a limited timeframe, while its real-world relevance helps simulate practical challenges. Experimenting with this dataset not only helps candidates prepare for interviews but also hones various skills including preprocessing, feature extraction, and model evaluation. This model is a fine-tuned version of [distilbert-base-cased](https://huggingface.co/distilbert-base-cased) on the reuters21578 dataset. ## Inference Example ```python from transformers import pipeline pipe = pipeline("text-classification", model="lxyuan/distilbert-finetuned-reuters21578-multilabel", return_all_scores=True) # dataset["test"]["text"][2] news_article = ( "JAPAN TO REVISE LONG-TERM ENERGY DEMAND DOWNWARDS The Ministry of International Trade and " "Industry (MITI) will revise its long-term energy supply/demand " "outlook by August to meet a forecast downtrend in Japanese " "energy demand, ministry officials said. " "MITI is expected to lower the projection for primary energy " "supplies in the year 2000 to 550 mln kilolitres (kl) from 600 " "mln, they said. " "The decision follows the emergence of structural changes in " "Japanese industry following the rise in the value of the yen " "and a decline in domestic electric power demand. " "MITI is planning to work out a revised energy supply/demand " "outlook through deliberations of committee meetings of the " "Agency of Natural Resources and Energy, the officials said. " "They said MITI will also review the breakdown of energy " "supply sources, including oil, nuclear, coal and natural gas. " "Nuclear energy provided the bulk of Japan's electric power " "in the fiscal year ended March 31, supplying an estimated 27 " "pct on a kilowatt/hour basis, followed by oil (23 pct) and " "liquefied natural gas (21 pct), they noted. " "REUTER" ) # dataset["test"]["topics"][2] target_topics = ['crude', 'nat-gas'] fn_kwargs={"padding": "max_length", "truncation": True, "max_length": 512} output = pipe(example, function_to_apply="sigmoid", **fn_kwargs) for item in output[0]: if item["score"]>=0.5: print(item["label"], item["score"]) >>> crude 0.7355073690414429 nat-gas 0.8600426316261292 ``` ## Overall Summary and Comparison Table | Metric | Baseline (Scikit-learn) | Transformer Model | | ------------------- | ----------------------- | ----------------- | | Micro-Averaged F1 | 0.77 | 0.86 | | Macro-Averaged F1 | 0.29 | 0.33 | | Weighted Average F1 | 0.70 | 0.84 | | Samples Average F1 | 0.75 | 0.80 | **Precision vs Recall**: Both models prioritize high precision over recall. In our client-facing news classification model, precision takes precedence over recall. This is because the repercussions of false positives are more severe and harder to justify to clients compared to false negatives. When the model incorrectly tags a news item with a topic, it's challenging to explain this error. On the other hand, if the model misses a topic, it's easier to defend by stating that the topic wasn't sufficiently emphasized in the news article. **Class Imbalance Handling**: Both models suffer from the same general issue of not performing well on minority classes, as reflected in the low macro-averaged F1-scores. However, the transformer model shows a slight improvement, albeit marginal, in macro-averaged F1-score (0.33 vs 0.29). **Issue of Zero Support Labels**: Both models have the problem of zero support for several labels, meaning these labels did not appear in the test set. This lack of "support" can significantly skew the performance metrics and may suggest that either the models are not well-tuned to predict these minority classes, or the dataset itself lacks sufficient examples of these classes. Given that both models struggle with low macro-averaged F1 scores, this issue further emphasizes the need for improved minority class handling in the models. **General Performance**: The transformer model surpasses the scikit-learn baseline in terms of weighted and samples average F1-scores, indicating better overall performance and better handling of label imbalance. **Conclusion**: While both models exhibit high precision, which is a business requirement, the transformer model slightly outperforms the scikit-learn baseline model in all metrics considered. It provides a better trade-off between precision and recall, as well as some improvement, albeit small, in handling minority classes. Thus, despite sharing similar weaknesses with the baseline, the transformer model demonstrates incremental improvements that could be significant in a production setting. ## Training and evaluation data We remove single appearance label from both training and test sets using the following code: ```python # Find Single Appearance Labels def find_single_appearance_labels(y): """Find labels that appear only once in the dataset.""" all_labels = list(chain.from_iterable(y)) label_count = Counter(all_labels) single_appearance_labels = [label for label, count in label_count.items() if count == 1] return single_appearance_labels # Remove Single Appearance Labels from Dataset def remove_single_appearance_labels(dataset, single_appearance_labels): """Remove samples with single-appearance labels from both train and test sets.""" for split in ['train', 'test']: dataset[split] = dataset[split].filter(lambda x: all(label not in single_appearance_labels for label in x['topics'])) return dataset dataset = load_dataset("reuters21578", "ModApte") # Find and Remove Single Appearance Labels y_train = [item['topics'] for item in dataset['train']] single_appearance_labels = find_single_appearance_labels(y_train) print(f"Single appearance labels: {single_appearance_labels}") >>> Single appearance labels: ['lin-oil', 'rye', 'red-bean', 'groundnut-oil', 'citruspulp', 'rape-meal', 'corn-oil', 'peseta', 'cotton-oil', 'ringgit', 'castorseed', 'castor-oil', 'lit', 'rupiah', 'skr', 'nkr', 'dkr', 'sun-meal', 'lin-meal', 'cruzado'] print("Removing samples with single-appearance labels...") dataset = remove_single_appearance_labels(dataset, single_appearance_labels) unique_labels = set(chain.from_iterable(dataset['train']["topics"])) print(f"We have {len(unique_labels)} unique labels:\n{unique_labels}") >>> We have 95 unique labels: {'veg-oil', 'gold', 'platinum', 'ipi', 'acq', 'carcass', 'wool', 'coconut-oil', 'linseed', 'copper', 'soy-meal', 'jet', 'dlr', 'copra-cake', 'hog', 'rand', 'strategic-metal', 'can', 'tea', 'sorghum', 'livestock', 'barley', 'lumber', 'earn', 'wheat', 'trade', 'soy-oil', 'cocoa', 'inventories', 'income', 'rubber', 'tin', 'iron-steel', 'ship', 'rapeseed', 'wpi', 'sun-oil', 'pet-chem', 'palmkernel', 'nat-gas', 'gnp', 'l-cattle', 'propane', 'rice', 'lead', 'alum', 'instal-debt', 'saudriyal', 'cpu', 'jobs', 'meal-feed', 'oilseed', 'dmk', 'plywood', 'zinc', 'retail', 'dfl', 'cpi', 'crude', 'pork-belly', 'gas', 'money-fx', 'corn', 'tapioca', 'palladium', 'lei', 'cornglutenfeed', 'sunseed', 'potato', 'silver', 'sugar', 'grain', 'groundnut', 'naphtha', 'orange', 'soybean', 'coconut', 'stg', 'cotton', 'yen', 'rape-oil', 'palm-oil', 'oat', 'reserves', 'housing', 'interest', 'coffee', 'fuel', 'austdlr', 'money-supply', 'heat', 'fishmeal', 'bop', 'nickel', 'nzdlr'} ``` ## Training procedure [EDA on Reuters-21578 dataset](https://github.com/LxYuan0420/nlp/blob/main/notebooks/eda_reuters.ipynb): This notebook provides an Exploratory Data Analysis (EDA) of the Reuters-21578 dataset. It includes visualizations and statistical summaries that offer insights into the dataset's structure, label distribution, and text characteristics. [Reuters Baseline Scikit-Learn Model](https://github.com/LxYuan0420/nlp/blob/main/notebooks/scikit_learn_reuters.ipynb): This notebook establishes a baseline model for text classification on the Reuters-21578 dataset using scikit-learn. It guides you through data preprocessing, feature extraction, model training, and evaluation. [Reuters Transformer Model](https://github.com/LxYuan0420/nlp/blob/main/notebooks/transformer_reuters.ipynb): This notebook delves into advanced text classification using a Transformer model on the Reuters-21578 dataset. It covers the implementation details, training process, and performance metrics of using Transformer-based models for this specific task. [Multilabel Stratified Sampling & Hypyerparameter Search on Reuters Dataset](https://github.com/LxYuan0420/nlp/blob/main/notebooks/transformer_reuters_hyperparameter_tuning.ipynb): In this notebook, we explore advanced machine learning techniques through the lens of the Hugging Face Trainer API, specifically targeting Multilabel Iterative Stratified Splitting and Hyperparameter Search. The former aims to fairly distribute imbalanced datasets across multiple labels in k-fold cross-validation, maintaining a distribution closely resembling that of the complete dataset. The latter walks users through a structured hyperparameter search to fine-tune model performance for optimal results. ## Evaluation results <details> <summary>Transformer Model Evaluation Result</summary> Classification Report: precision recall f1-score support acq 0.97 0.93 0.95 719 alum 1.00 0.70 0.82 23 austdlr 0.00 0.00 0.00 0 barley 1.00 0.50 0.67 12 bop 0.79 0.50 0.61 30 can 0.00 0.00 0.00 0 carcass 0.67 0.67 0.67 18 cocoa 1.00 1.00 1.00 18 coconut 0.00 0.00 0.00 2 coconut-oil 0.00 0.00 0.00 2 coffee 0.86 0.89 0.87 27 copper 1.00 0.78 0.88 18 copra-cake 0.00 0.00 0.00 1 corn 0.84 0.87 0.86 55 cornglutenfeed 0.00 0.00 0.00 0 cotton 0.92 0.67 0.77 18 cpi 0.86 0.43 0.57 28 cpu 0.00 0.00 0.00 1 crude 0.87 0.93 0.90 189 dfl 0.00 0.00 0.00 1 dlr 0.72 0.64 0.67 44 dmk 0.00 0.00 0.00 4 earn 0.98 0.99 0.98 1087 fishmeal 0.00 0.00 0.00 0 fuel 0.00 0.00 0.00 10 gas 0.80 0.71 0.75 17 gnp 0.79 0.66 0.72 35 gold 0.95 0.67 0.78 30 grain 0.94 0.92 0.93 146 groundnut 0.00 0.00 0.00 4 heat 0.00 0.00 0.00 5 hog 1.00 0.33 0.50 6 housing 0.00 0.00 0.00 4 income 0.00 0.00 0.00 7 instal-debt 0.00 0.00 0.00 1 interest 0.89 0.67 0.77 131 inventories 0.00 0.00 0.00 0 ipi 1.00 0.58 0.74 12 iron-steel 0.90 0.64 0.75 14 jet 0.00 0.00 0.00 1 jobs 0.92 0.57 0.71 21 l-cattle 0.00 0.00 0.00 2 lead 0.00 0.00 0.00 14 lei 0.00 0.00 0.00 3 linseed 0.00 0.00 0.00 0 livestock 0.63 0.79 0.70 24 lumber 0.00 0.00 0.00 6 meal-feed 0.00 0.00 0.00 17 money-fx 0.78 0.81 0.80 177 money-supply 0.80 0.71 0.75 34 naphtha 0.00 0.00 0.00 4 nat-gas 0.82 0.60 0.69 30 nickel 0.00 0.00 0.00 1 nzdlr 0.00 0.00 0.00 2 oat 0.00 0.00 0.00 4 oilseed 0.64 0.61 0.63 44 orange 1.00 0.36 0.53 11 palladium 0.00 0.00 0.00 1 palm-oil 1.00 0.56 0.71 9 palmkernel 0.00 0.00 0.00 1 pet-chem 0.00 0.00 0.00 12 platinum 0.00 0.00 0.00 7 plywood 0.00 0.00 0.00 0 pork-belly 0.00 0.00 0.00 0 potato 0.00 0.00 0.00 3 propane 0.00 0.00 0.00 3 rand 0.00 0.00 0.00 1 rape-oil 0.00 0.00 0.00 1 rapeseed 0.00 0.00 0.00 8 reserves 0.83 0.56 0.67 18 retail 0.00 0.00 0.00 2 rice 1.00 0.57 0.72 23 rubber 0.82 0.75 0.78 12 saudriyal 0.00 0.00 0.00 0 ship 0.95 0.81 0.87 89 silver 1.00 0.12 0.22 8 sorghum 1.00 0.12 0.22 8 soy-meal 0.00 0.00 0.00 12 soy-oil 0.00 0.00 0.00 8 soybean 0.72 0.56 0.63 32 stg 0.00 0.00 0.00 0 strategic-metal 0.00 0.00 0.00 11 sugar 1.00 0.80 0.89 35 sun-oil 0.00 0.00 0.00 0 sunseed 0.00 0.00 0.00 5 tapioca 0.00 0.00 0.00 0 tea 0.00 0.00 0.00 3 tin 1.00 0.42 0.59 12 trade 0.78 0.79 0.79 116 veg-oil 0.91 0.59 0.71 34 wheat 0.83 0.83 0.83 69 wool 0.00 0.00 0.00 0 wpi 0.00 0.00 0.00 10 yen 0.57 0.29 0.38 14 zinc 1.00 0.69 0.82 13 micro avg 0.92 0.81 0.86 3694 macro avg 0.41 0.30 0.33 3694 weighted avg 0.87 0.81 0.84 3694 samples avg 0.81 0.80 0.80 3694 </details> <details> <summary>Scikit-learn Baseline Model Evaluation Result</summary> Classification Report: precision recall f1-score support acq 0.98 0.87 0.92 719 alum 1.00 0.00 0.00 23 austdlr 1.00 1.00 1.00 0 barley 1.00 0.00 0.00 12 bop 1.00 0.30 0.46 30 can 1.00 1.00 1.00 0 carcass 1.00 0.06 0.11 18 cocoa 1.00 0.61 0.76 18 coconut 1.00 0.00 0.00 2 coconut-oil 1.00 0.00 0.00 2 coffee 0.94 0.59 0.73 27 copper 1.00 0.22 0.36 18 copra-cake 1.00 0.00 0.00 1 corn 0.97 0.51 0.67 55 cornglutenfeed 1.00 1.00 1.00 0 cotton 1.00 0.06 0.11 18 cpi 1.00 0.14 0.25 28 cpu 1.00 0.00 0.00 1 crude 0.94 0.69 0.80 189 dfl 1.00 0.00 0.00 1 dlr 0.86 0.43 0.58 44 dmk 1.00 0.00 0.00 4 earn 0.99 0.97 0.98 1087 fishmeal 1.00 1.00 1.00 0 fuel 1.00 0.00 0.00 10 gas 1.00 0.00 0.00 17 gnp 1.00 0.31 0.48 35 gold 0.83 0.17 0.28 30 grain 1.00 0.65 0.79 146 groundnut 1.00 0.00 0.00 4 heat 1.00 0.00 0.00 5 hog 1.00 0.00 0.00 6 housing 1.00 0.00 0.00 4 income 1.00 0.00 0.00 7 instal-debt 1.00 0.00 0.00 1 interest 0.88 0.40 0.55 131 inventories 1.00 1.00 1.00 0 ipi 1.00 0.00 0.00 12 iron-steel 1.00 0.00 0.00 14 jet 1.00 0.00 0.00 1 jobs 1.00 0.14 0.25 21 l-cattle 1.00 0.00 0.00 2 lead 1.00 0.00 0.00 14 lei 1.00 0.00 0.00 3 linseed 1.00 1.00 1.00 0 livestock 0.67 0.08 0.15 24 lumber 1.00 0.00 0.00 6 meal-feed 1.00 0.00 0.00 17 money-fx 0.80 0.50 0.62 177 money-supply 0.88 0.41 0.56 34 naphtha 1.00 0.00 0.00 4 nat-gas 1.00 0.27 0.42 30 nickel 1.00 0.00 0.00 1 nzdlr 1.00 0.00 0.00 2 oat 1.00 0.00 0.00 4 oilseed 0.62 0.11 0.19 44 orange 1.00 0.00 0.00 11 palladium 1.00 0.00 0.00 1 palm-oil 1.00 0.22 0.36 9 palmkernel 1.00 0.00 0.00 1 pet-chem 1.00 0.00 0.00 12 platinum 1.00 0.00 0.00 7 plywood 1.00 1.00 1.00 0 pork-belly 1.00 1.00 1.00 0 potato 1.00 0.00 0.00 3 propane 1.00 0.00 0.00 3 rand 1.00 0.00 0.00 1 rape-oil 1.00 0.00 0.00 1 rapeseed 1.00 0.00 0.00 8 reserves 1.00 0.00 0.00 18 retail 1.00 0.00 0.00 2 rice 1.00 0.00 0.00 23 rubber 1.00 0.17 0.29 12 saudriyal 1.00 1.00 1.00 0 ship 0.92 0.26 0.40 89 silver 1.00 0.00 0.00 8 sorghum 1.00 0.00 0.00 8 soy-meal 1.00 0.00 0.00 12 soy-oil 1.00 0.00 0.00 8 soybean 1.00 0.16 0.27 32 stg 1.00 1.00 1.00 0 strategic-metal 1.00 0.00 0.00 11 sugar 1.00 0.60 0.75 35 sun-oil 1.00 1.00 1.00 0 sunseed 1.00 0.00 0.00 5 tapioca 1.00 1.00 1.00 0 tea 1.00 0.00 0.00 3 tin 1.00 0.00 0.00 12 trade 0.92 0.61 0.74 116 veg-oil 1.00 0.12 0.21 34 wheat 0.97 0.55 0.70 69 wool 1.00 1.00 1.00 0 wpi 1.00 0.00 0.00 10 yen 1.00 0.00 0.00 14 zinc 1.00 0.00 0.00 13 micro avg 0.97 0.64 0.77 3694 macro avg 0.98 0.25 0.29 3694 weighted avg 0.96 0.64 0.70 3694 samples avg 0.98 0.74 0.75 3694 </details> ### Training hyperparameters The following hyperparameters were used during training: - learning_rate: 2e-05 - train_batch_size: 32 - eval_batch_size: 32 - seed: 42 - optimizer: Adam with betas=(0.9,0.999) and epsilon=1e-08 - lr_scheduler_type: linear - num_epochs: 20 ### Training results | Training Loss | Epoch | Step | Validation Loss | F1 | Roc Auc | Accuracy | | :-----------: | :---: | :--: | :-------------: | :----: | :-----: | :------: | | 0.1801 | 1.0 | 300 | 0.0439 | 0.3896 | 0.6210 | 0.3566 | | 0.0345 | 2.0 | 600 | 0.0287 | 0.6289 | 0.7318 | 0.5954 | | 0.0243 | 3.0 | 900 | 0.0219 | 0.6721 | 0.7579 | 0.6084 | | 0.0178 | 4.0 | 1200 | 0.0177 | 0.7505 | 0.8128 | 0.6908 | | 0.014 | 5.0 | 1500 | 0.0151 | 0.7905 | 0.8376 | 0.7278 | | 0.0115 | 6.0 | 1800 | 0.0135 | 0.8132 | 0.8589 | 0.7555 | | 0.0096 | 7.0 | 2100 | 0.0124 | 0.8291 | 0.8727 | 0.7725 | | 0.0082 | 8.0 | 2400 | 0.0124 | 0.8335 | 0.8757 | 0.7822 | | 0.0071 | 9.0 | 2700 | 0.0119 | 0.8392 | 0.8847 | 0.7883 | | 0.0064 | 10.0 | 3000 | 0.0123 | 0.8339 | 0.8810 | 0.7828 | | 0.0058 | 11.0 | 3300 | 0.0114 | 0.8538 | 0.8999 | 0.8047 | | 0.0053 | 12.0 | 3600 | 0.0113 | 0.8525 | 0.8967 | 0.8044 | | 0.0048 | 13.0 | 3900 | 0.0115 | 0.8520 | 0.8982 | 0.8029 | | 0.0045 | 14.0 | 4200 | 0.0111 | 0.8566 | 0.8962 | 0.8104 | | 0.0042 | 15.0 | 4500 | 0.0110 | 0.8610 | 0.9060 | 0.8165 | | 0.0039 | 16.0 | 4800 | 0.0112 | 0.8583 | 0.9021 | 0.8138 | | 0.0037 | 17.0 | 5100 | 0.0110 | 0.8620 | 0.9055 | 0.8196 | | 0.0035 | 18.0 | 5400 | 0.0110 | 0.8629 | 0.9063 | 0.8196 | | 0.0035 | 19.0 | 5700 | 0.0111 | 0.8624 | 0.9062 | 0.8180 | | 0.0034 | 20.0 | 6000 | 0.0111 | 0.8626 | 0.9055 | 0.8177 | ### Framework versions - Transformers 4.33.0.dev0 - Pytorch 2.0.1+cu117 - Datasets 2.14.3 - Tokenizers 0.13.3
[ "TEXT_CLASSIFICATION" ]
[ "CPI" ]
sdadas/mmlw-e5-large
sdadas
sentence-similarity
[ "sentence-transformers", "pytorch", "safetensors", "xlm-roberta", "feature-extraction", "sentence-similarity", "transformers", "mteb", "pl", "arxiv:2402.13350", "license:apache-2.0", "model-index", "autotrain_compatible", "text-embeddings-inference", "endpoints_compatible", "region:us" ]
2023-11-17T18:51:09
2024-11-05T16:53:25
103
0
--- language: pl license: apache-2.0 pipeline_tag: sentence-similarity tags: - sentence-transformers - feature-extraction - sentence-similarity - transformers - mteb widget: - source_sentence: 'query: Jak dożyć 100 lat?' sentences: - 'passage: Trzeba zdrowo się odżywiać i uprawiać sport.' - 'passage: Trzeba pić alkohol, imprezować i jeździć szybkimi autami.' - 'passage: Gdy trwała kampania politycy zapewniali, że rozprawią się z zakazem niedzielnego handlu.' model-index: - name: mmlw-e5-large results: - task: type: Clustering dataset: name: MTEB 8TagsClustering type: PL-MTEB/8tags-clustering config: default split: test revision: None metrics: - type: v_measure value: 30.623921415441725 - task: type: Classification dataset: name: MTEB AllegroReviews type: PL-MTEB/allegro-reviews config: default split: test revision: None metrics: - type: accuracy value: 37.683896620278325 - type: f1 value: 34.19193027014284 - task: type: Retrieval dataset: name: MTEB ArguAna-PL type: arguana-pl config: default split: test revision: None metrics: - type: map_at_1 value: 38.407000000000004 - type: map_at_10 value: 55.147 - type: map_at_100 value: 55.757 - type: map_at_1000 value: 55.761 - type: map_at_3 value: 51.268 - type: map_at_5 value: 53.696999999999996 - type: mrr_at_1 value: 40.043 - type: mrr_at_10 value: 55.840999999999994 - type: mrr_at_100 value: 56.459 - type: mrr_at_1000 value: 56.462999999999994 - type: mrr_at_3 value: 52.074 - type: mrr_at_5 value: 54.364999999999995 - type: ndcg_at_1 value: 38.407000000000004 - type: ndcg_at_10 value: 63.248000000000005 - type: ndcg_at_100 value: 65.717 - type: ndcg_at_1000 value: 65.79 - type: ndcg_at_3 value: 55.403999999999996 - type: ndcg_at_5 value: 59.760000000000005 - type: precision_at_1 value: 38.407000000000004 - type: precision_at_10 value: 8.862 - type: precision_at_100 value: 0.991 - type: precision_at_1000 value: 0.1 - type: precision_at_3 value: 22.451 - type: precision_at_5 value: 15.576 - type: recall_at_1 value: 38.407000000000004 - type: recall_at_10 value: 88.62 - type: recall_at_100 value: 99.075 - type: recall_at_1000 value: 99.57300000000001 - type: recall_at_3 value: 67.354 - type: recall_at_5 value: 77.881 - task: type: Classification dataset: name: MTEB CBD type: PL-MTEB/cbd config: default split: test revision: None metrics: - type: accuracy value: 66.14999999999999 - type: ap value: 21.69513674684204 - type: f1 value: 56.48142830893528 - task: type: PairClassification dataset: name: MTEB CDSC-E type: PL-MTEB/cdsce-pairclassification config: default split: test revision: None metrics: - type: cos_sim_accuracy value: 89.4 - type: cos_sim_ap value: 76.83228768203222 - type: cos_sim_f1 value: 65.3658536585366 - type: cos_sim_precision value: 60.909090909090914 - type: cos_sim_recall value: 70.52631578947368 - type: dot_accuracy value: 84.1 - type: dot_ap value: 57.26072201751864 - type: dot_f1 value: 62.75395033860045 - type: dot_precision value: 54.9407114624506 - type: dot_recall value: 73.15789473684211 - type: euclidean_accuracy value: 89.4 - type: euclidean_ap value: 76.59095263388942 - type: euclidean_f1 value: 65.21739130434783 - type: euclidean_precision value: 60.26785714285714 - type: euclidean_recall value: 71.05263157894737 - type: manhattan_accuracy value: 89.4 - type: manhattan_ap value: 76.58825999753456 - type: manhattan_f1 value: 64.72019464720195 - type: manhattan_precision value: 60.18099547511312 - type: manhattan_recall value: 70.0 - type: max_accuracy value: 89.4 - type: max_ap value: 76.83228768203222 - type: max_f1 value: 65.3658536585366 - task: type: STS dataset: name: MTEB CDSC-R type: PL-MTEB/cdscr-sts config: default split: test revision: None metrics: - type: cos_sim_pearson value: 93.73949495291659 - type: cos_sim_spearman value: 93.50397366192922 - type: euclidean_pearson value: 92.47498888987636 - type: euclidean_spearman value: 93.39315936230747 - type: manhattan_pearson value: 92.47250250777654 - type: manhattan_spearman value: 93.36739690549109 - task: type: Retrieval dataset: name: MTEB DBPedia-PL type: dbpedia-pl config: default split: test revision: None metrics: - type: map_at_1 value: 8.434 - type: map_at_10 value: 18.424 - type: map_at_100 value: 26.428 - type: map_at_1000 value: 28.002 - type: map_at_3 value: 13.502 - type: map_at_5 value: 15.577 - type: mrr_at_1 value: 63.0 - type: mrr_at_10 value: 72.714 - type: mrr_at_100 value: 73.021 - type: mrr_at_1000 value: 73.028 - type: mrr_at_3 value: 70.75 - type: mrr_at_5 value: 72.3 - type: ndcg_at_1 value: 52.75 - type: ndcg_at_10 value: 39.839999999999996 - type: ndcg_at_100 value: 44.989000000000004 - type: ndcg_at_1000 value: 52.532999999999994 - type: ndcg_at_3 value: 45.198 - type: ndcg_at_5 value: 42.015 - type: precision_at_1 value: 63.0 - type: precision_at_10 value: 31.05 - type: precision_at_100 value: 10.26 - type: precision_at_1000 value: 1.9879999999999998 - type: precision_at_3 value: 48.25 - type: precision_at_5 value: 40.45 - type: recall_at_1 value: 8.434 - type: recall_at_10 value: 24.004 - type: recall_at_100 value: 51.428 - type: recall_at_1000 value: 75.712 - type: recall_at_3 value: 15.015 - type: recall_at_5 value: 18.282999999999998 - task: type: Retrieval dataset: name: MTEB FiQA-PL type: fiqa-pl config: default split: test revision: None metrics: - type: map_at_1 value: 19.088 - type: map_at_10 value: 31.818 - type: map_at_100 value: 33.689 - type: map_at_1000 value: 33.86 - type: map_at_3 value: 27.399 - type: map_at_5 value: 29.945 - type: mrr_at_1 value: 38.117000000000004 - type: mrr_at_10 value: 47.668 - type: mrr_at_100 value: 48.428 - type: mrr_at_1000 value: 48.475 - type: mrr_at_3 value: 45.242 - type: mrr_at_5 value: 46.716 - type: ndcg_at_1 value: 38.272 - type: ndcg_at_10 value: 39.903 - type: ndcg_at_100 value: 46.661 - type: ndcg_at_1000 value: 49.625 - type: ndcg_at_3 value: 35.921 - type: ndcg_at_5 value: 37.558 - type: precision_at_1 value: 38.272 - type: precision_at_10 value: 11.358 - type: precision_at_100 value: 1.8190000000000002 - type: precision_at_1000 value: 0.23500000000000001 - type: precision_at_3 value: 24.434 - type: precision_at_5 value: 18.395 - type: recall_at_1 value: 19.088 - type: recall_at_10 value: 47.355999999999995 - type: recall_at_100 value: 72.451 - type: recall_at_1000 value: 90.257 - type: recall_at_3 value: 32.931 - type: recall_at_5 value: 39.878 - task: type: Retrieval dataset: name: MTEB HotpotQA-PL type: hotpotqa-pl config: default split: test revision: None metrics: - type: map_at_1 value: 39.095 - type: map_at_10 value: 62.529 - type: map_at_100 value: 63.425 - type: map_at_1000 value: 63.483000000000004 - type: map_at_3 value: 58.887 - type: map_at_5 value: 61.18599999999999 - type: mrr_at_1 value: 78.123 - type: mrr_at_10 value: 84.231 - type: mrr_at_100 value: 84.408 - type: mrr_at_1000 value: 84.414 - type: mrr_at_3 value: 83.286 - type: mrr_at_5 value: 83.94 - type: ndcg_at_1 value: 78.19 - type: ndcg_at_10 value: 70.938 - type: ndcg_at_100 value: 73.992 - type: ndcg_at_1000 value: 75.1 - type: ndcg_at_3 value: 65.863 - type: ndcg_at_5 value: 68.755 - type: precision_at_1 value: 78.19 - type: precision_at_10 value: 14.949000000000002 - type: precision_at_100 value: 1.733 - type: precision_at_1000 value: 0.188 - type: precision_at_3 value: 42.381 - type: precision_at_5 value: 27.711000000000002 - type: recall_at_1 value: 39.095 - type: recall_at_10 value: 74.747 - type: recall_at_100 value: 86.631 - type: recall_at_1000 value: 93.923 - type: recall_at_3 value: 63.571999999999996 - type: recall_at_5 value: 69.27799999999999 - task: type: Retrieval dataset: name: MTEB MSMARCO-PL type: msmarco-pl config: default split: validation revision: None metrics: - type: map_at_1 value: 19.439999999999998 - type: map_at_10 value: 30.264000000000003 - type: map_at_100 value: 31.438 - type: map_at_1000 value: 31.495 - type: map_at_3 value: 26.735 - type: map_at_5 value: 28.716 - type: mrr_at_1 value: 19.914 - type: mrr_at_10 value: 30.753999999999998 - type: mrr_at_100 value: 31.877 - type: mrr_at_1000 value: 31.929000000000002 - type: mrr_at_3 value: 27.299 - type: mrr_at_5 value: 29.254 - type: ndcg_at_1 value: 20.014000000000003 - type: ndcg_at_10 value: 36.472 - type: ndcg_at_100 value: 42.231 - type: ndcg_at_1000 value: 43.744 - type: ndcg_at_3 value: 29.268 - type: ndcg_at_5 value: 32.79 - type: precision_at_1 value: 20.014000000000003 - type: precision_at_10 value: 5.814 - type: precision_at_100 value: 0.8710000000000001 - type: precision_at_1000 value: 0.1 - type: precision_at_3 value: 12.426 - type: precision_at_5 value: 9.238 - type: recall_at_1 value: 19.439999999999998 - type: recall_at_10 value: 55.535000000000004 - type: recall_at_100 value: 82.44399999999999 - type: recall_at_1000 value: 94.217 - type: recall_at_3 value: 35.963 - type: recall_at_5 value: 44.367000000000004 - task: type: Classification dataset: name: MTEB MassiveIntentClassification (pl) type: mteb/amazon_massive_intent config: pl split: test revision: 31efe3c427b0bae9c22cbb560b8f15491cc6bed7 metrics: - type: accuracy value: 72.01412239408205 - type: f1 value: 70.04544187503352 - task: type: Classification dataset: name: MTEB MassiveScenarioClassification (pl) type: mteb/amazon_massive_scenario config: pl split: test revision: 7d571f92784cd94a019292a1f45445077d0ef634 metrics: - type: accuracy value: 75.26899798251513 - type: f1 value: 75.55876166863844 - task: type: Retrieval dataset: name: MTEB NFCorpus-PL type: nfcorpus-pl config: default split: test revision: None metrics: - type: map_at_1 value: 5.772 - type: map_at_10 value: 12.708 - type: map_at_100 value: 16.194 - type: map_at_1000 value: 17.630000000000003 - type: map_at_3 value: 9.34 - type: map_at_5 value: 10.741 - type: mrr_at_1 value: 43.344 - type: mrr_at_10 value: 53.429 - type: mrr_at_100 value: 53.88699999999999 - type: mrr_at_1000 value: 53.925 - type: mrr_at_3 value: 51.342 - type: mrr_at_5 value: 52.456 - type: ndcg_at_1 value: 41.641 - type: ndcg_at_10 value: 34.028000000000006 - type: ndcg_at_100 value: 31.613000000000003 - type: ndcg_at_1000 value: 40.428 - type: ndcg_at_3 value: 38.991 - type: ndcg_at_5 value: 36.704 - type: precision_at_1 value: 43.034 - type: precision_at_10 value: 25.324999999999996 - type: precision_at_100 value: 7.889 - type: precision_at_1000 value: 2.069 - type: precision_at_3 value: 36.739 - type: precision_at_5 value: 32.074000000000005 - type: recall_at_1 value: 5.772 - type: recall_at_10 value: 16.827 - type: recall_at_100 value: 32.346000000000004 - type: recall_at_1000 value: 62.739 - type: recall_at_3 value: 10.56 - type: recall_at_5 value: 12.655 - task: type: Retrieval dataset: name: MTEB NQ-PL type: nq-pl config: default split: test revision: None metrics: - type: map_at_1 value: 26.101000000000003 - type: map_at_10 value: 39.912 - type: map_at_100 value: 41.037 - type: map_at_1000 value: 41.077000000000005 - type: map_at_3 value: 35.691 - type: map_at_5 value: 38.155 - type: mrr_at_1 value: 29.403000000000002 - type: mrr_at_10 value: 42.376999999999995 - type: mrr_at_100 value: 43.248999999999995 - type: mrr_at_1000 value: 43.277 - type: mrr_at_3 value: 38.794000000000004 - type: mrr_at_5 value: 40.933 - type: ndcg_at_1 value: 29.519000000000002 - type: ndcg_at_10 value: 47.33 - type: ndcg_at_100 value: 52.171 - type: ndcg_at_1000 value: 53.125 - type: ndcg_at_3 value: 39.316 - type: ndcg_at_5 value: 43.457 - type: precision_at_1 value: 29.519000000000002 - type: precision_at_10 value: 8.03 - type: precision_at_100 value: 1.075 - type: precision_at_1000 value: 0.117 - type: precision_at_3 value: 18.009 - type: precision_at_5 value: 13.221 - type: recall_at_1 value: 26.101000000000003 - type: recall_at_10 value: 67.50399999999999 - type: recall_at_100 value: 88.64699999999999 - type: recall_at_1000 value: 95.771 - type: recall_at_3 value: 46.669 - type: recall_at_5 value: 56.24 - task: type: Classification dataset: name: MTEB PAC type: laugustyniak/abusive-clauses-pl config: default split: test revision: None metrics: - type: accuracy value: 63.76773819866782 - type: ap value: 74.87896817642536 - type: f1 value: 61.420506092721425 - task: type: PairClassification dataset: name: MTEB PPC type: PL-MTEB/ppc-pairclassification config: default split: test revision: None metrics: - type: cos_sim_accuracy value: 82.1 - type: cos_sim_ap value: 91.09417013497443 - type: cos_sim_f1 value: 84.78437754271766 - type: cos_sim_precision value: 83.36 - type: cos_sim_recall value: 86.25827814569537 - type: dot_accuracy value: 75.9 - type: dot_ap value: 86.82680649789796 - type: dot_f1 value: 80.5379746835443 - type: dot_precision value: 77.12121212121212 - type: dot_recall value: 84.27152317880795 - type: euclidean_accuracy value: 81.6 - type: euclidean_ap value: 90.81248760600693 - type: euclidean_f1 value: 84.35374149659863 - type: euclidean_precision value: 86.7132867132867 - type: euclidean_recall value: 82.11920529801324 - type: manhattan_accuracy value: 81.6 - type: manhattan_ap value: 90.81272803548767 - type: manhattan_f1 value: 84.33530906011855 - type: manhattan_precision value: 86.30849220103987 - type: manhattan_recall value: 82.45033112582782 - type: max_accuracy value: 82.1 - type: max_ap value: 91.09417013497443 - type: max_f1 value: 84.78437754271766 - task: type: PairClassification dataset: name: MTEB PSC type: PL-MTEB/psc-pairclassification config: default split: test revision: None metrics: - type: cos_sim_accuracy value: 98.05194805194806 - type: cos_sim_ap value: 99.52709687103496 - type: cos_sim_f1 value: 96.83257918552036 - type: cos_sim_precision value: 95.82089552238806 - type: cos_sim_recall value: 97.86585365853658 - type: dot_accuracy value: 92.30055658627087 - type: dot_ap value: 94.12759311032353 - type: dot_f1 value: 87.00906344410878 - type: dot_precision value: 86.22754491017965 - type: dot_recall value: 87.8048780487805 - type: euclidean_accuracy value: 98.05194805194806 - type: euclidean_ap value: 99.49402675624125 - type: euclidean_f1 value: 96.8133535660091 - type: euclidean_precision value: 96.37462235649546 - type: euclidean_recall value: 97.2560975609756 - type: manhattan_accuracy value: 98.05194805194806 - type: manhattan_ap value: 99.50120505935962 - type: manhattan_f1 value: 96.8133535660091 - type: manhattan_precision value: 96.37462235649546 - type: manhattan_recall value: 97.2560975609756 - type: max_accuracy value: 98.05194805194806 - type: max_ap value: 99.52709687103496 - type: max_f1 value: 96.83257918552036 - task: type: Classification dataset: name: MTEB PolEmo2.0-IN type: PL-MTEB/polemo2_in config: default split: test revision: None metrics: - type: accuracy value: 69.45983379501385 - type: f1 value: 68.60917948426784 - task: type: Classification dataset: name: MTEB PolEmo2.0-OUT type: PL-MTEB/polemo2_out config: default split: test revision: None metrics: - type: accuracy value: 43.13765182186235 - type: f1 value: 36.15557441785656 - task: type: Retrieval dataset: name: MTEB Quora-PL type: quora-pl config: default split: test revision: None metrics: - type: map_at_1 value: 67.448 - type: map_at_10 value: 81.566 - type: map_at_100 value: 82.284 - type: map_at_1000 value: 82.301 - type: map_at_3 value: 78.425 - type: map_at_5 value: 80.43400000000001 - type: mrr_at_1 value: 77.61 - type: mrr_at_10 value: 84.467 - type: mrr_at_100 value: 84.63199999999999 - type: mrr_at_1000 value: 84.634 - type: mrr_at_3 value: 83.288 - type: mrr_at_5 value: 84.095 - type: ndcg_at_1 value: 77.66 - type: ndcg_at_10 value: 85.63199999999999 - type: ndcg_at_100 value: 87.166 - type: ndcg_at_1000 value: 87.306 - type: ndcg_at_3 value: 82.32300000000001 - type: ndcg_at_5 value: 84.22 - type: precision_at_1 value: 77.66 - type: precision_at_10 value: 13.136000000000001 - type: precision_at_100 value: 1.522 - type: precision_at_1000 value: 0.156 - type: precision_at_3 value: 36.153 - type: precision_at_5 value: 23.982 - type: recall_at_1 value: 67.448 - type: recall_at_10 value: 93.83200000000001 - type: recall_at_100 value: 99.212 - type: recall_at_1000 value: 99.94 - type: recall_at_3 value: 84.539 - type: recall_at_5 value: 89.71000000000001 - task: type: Retrieval dataset: name: MTEB SCIDOCS-PL type: scidocs-pl config: default split: test revision: None metrics: - type: map_at_1 value: 4.393 - type: map_at_10 value: 11.472 - type: map_at_100 value: 13.584999999999999 - type: map_at_1000 value: 13.918 - type: map_at_3 value: 8.212 - type: map_at_5 value: 9.864 - type: mrr_at_1 value: 21.7 - type: mrr_at_10 value: 32.268 - type: mrr_at_100 value: 33.495000000000005 - type: mrr_at_1000 value: 33.548 - type: mrr_at_3 value: 29.15 - type: mrr_at_5 value: 30.91 - type: ndcg_at_1 value: 21.6 - type: ndcg_at_10 value: 19.126 - type: ndcg_at_100 value: 27.496 - type: ndcg_at_1000 value: 33.274 - type: ndcg_at_3 value: 18.196 - type: ndcg_at_5 value: 15.945 - type: precision_at_1 value: 21.6 - type: precision_at_10 value: 9.94 - type: precision_at_100 value: 2.1999999999999997 - type: precision_at_1000 value: 0.359 - type: precision_at_3 value: 17.2 - type: precision_at_5 value: 14.12 - type: recall_at_1 value: 4.393 - type: recall_at_10 value: 20.166999999999998 - type: recall_at_100 value: 44.678000000000004 - type: recall_at_1000 value: 72.868 - type: recall_at_3 value: 10.473 - type: recall_at_5 value: 14.313 - task: type: PairClassification dataset: name: MTEB SICK-E-PL type: PL-MTEB/sicke-pl-pairclassification config: default split: test revision: None metrics: - type: cos_sim_accuracy value: 82.65389319200979 - type: cos_sim_ap value: 76.13749398520014 - type: cos_sim_f1 value: 66.64355062413314 - type: cos_sim_precision value: 64.93243243243244 - type: cos_sim_recall value: 68.44729344729345 - type: dot_accuracy value: 76.0905014268243 - type: dot_ap value: 58.058968583382494 - type: dot_f1 value: 61.181080324657145 - type: dot_precision value: 50.391885661595204 - type: dot_recall value: 77.84900284900284 - type: euclidean_accuracy value: 82.61312678353036 - type: euclidean_ap value: 76.10290283033221 - type: euclidean_f1 value: 66.50782845473111 - type: euclidean_precision value: 63.6897001303781 - type: euclidean_recall value: 69.58689458689459 - type: manhattan_accuracy value: 82.6742763962495 - type: manhattan_ap value: 76.12712309700966 - type: manhattan_f1 value: 66.59700452803902 - type: manhattan_precision value: 65.16700749829583 - type: manhattan_recall value: 68.09116809116809 - type: max_accuracy value: 82.6742763962495 - type: max_ap value: 76.13749398520014 - type: max_f1 value: 66.64355062413314 - task: type: STS dataset: name: MTEB SICK-R-PL type: PL-MTEB/sickr-pl-sts config: default split: test revision: None metrics: - type: cos_sim_pearson value: 81.23898481255246 - type: cos_sim_spearman value: 76.0416957474899 - type: euclidean_pearson value: 78.96475496102107 - type: euclidean_spearman value: 76.07208683063504 - type: manhattan_pearson value: 78.92666424673251 - type: manhattan_spearman value: 76.04968227583831 - task: type: STS dataset: name: MTEB STS22 (pl) type: mteb/sts22-crosslingual-sts config: pl split: test revision: 6d1ba47164174a496b7fa5d3569dae26a6813b80 metrics: - type: cos_sim_pearson value: 39.13987124398541 - type: cos_sim_spearman value: 40.40194528288759 - type: euclidean_pearson value: 29.14566247168167 - type: euclidean_spearman value: 39.97389932591777 - type: manhattan_pearson value: 29.172993134388935 - type: manhattan_spearman value: 39.85681935287037 - task: type: Retrieval dataset: name: MTEB SciFact-PL type: scifact-pl config: default split: test revision: None metrics: - type: map_at_1 value: 57.260999999999996 - type: map_at_10 value: 66.92399999999999 - type: map_at_100 value: 67.443 - type: map_at_1000 value: 67.47800000000001 - type: map_at_3 value: 64.859 - type: map_at_5 value: 65.71900000000001 - type: mrr_at_1 value: 60.333000000000006 - type: mrr_at_10 value: 67.95400000000001 - type: mrr_at_100 value: 68.42 - type: mrr_at_1000 value: 68.45 - type: mrr_at_3 value: 66.444 - type: mrr_at_5 value: 67.128 - type: ndcg_at_1 value: 60.333000000000006 - type: ndcg_at_10 value: 71.209 - type: ndcg_at_100 value: 73.37 - type: ndcg_at_1000 value: 74.287 - type: ndcg_at_3 value: 67.66799999999999 - type: ndcg_at_5 value: 68.644 - type: precision_at_1 value: 60.333000000000006 - type: precision_at_10 value: 9.467 - type: precision_at_100 value: 1.053 - type: precision_at_1000 value: 0.11299999999999999 - type: precision_at_3 value: 26.778000000000002 - type: precision_at_5 value: 16.933 - type: recall_at_1 value: 57.260999999999996 - type: recall_at_10 value: 83.256 - type: recall_at_100 value: 92.767 - type: recall_at_1000 value: 100.0 - type: recall_at_3 value: 72.933 - type: recall_at_5 value: 75.744 - task: type: Retrieval dataset: name: MTEB TRECCOVID-PL type: trec-covid-pl config: default split: test revision: None metrics: - type: map_at_1 value: 0.22 - type: map_at_10 value: 1.693 - type: map_at_100 value: 9.281 - type: map_at_1000 value: 21.462999999999997 - type: map_at_3 value: 0.609 - type: map_at_5 value: 0.9570000000000001 - type: mrr_at_1 value: 80.0 - type: mrr_at_10 value: 88.73299999999999 - type: mrr_at_100 value: 88.73299999999999 - type: mrr_at_1000 value: 88.73299999999999 - type: mrr_at_3 value: 88.333 - type: mrr_at_5 value: 88.73299999999999 - type: ndcg_at_1 value: 79.0 - type: ndcg_at_10 value: 71.177 - type: ndcg_at_100 value: 52.479 - type: ndcg_at_1000 value: 45.333 - type: ndcg_at_3 value: 77.48 - type: ndcg_at_5 value: 76.137 - type: precision_at_1 value: 82.0 - type: precision_at_10 value: 74.0 - type: precision_at_100 value: 53.68000000000001 - type: precision_at_1000 value: 19.954 - type: precision_at_3 value: 80.667 - type: precision_at_5 value: 80.80000000000001 - type: recall_at_1 value: 0.22 - type: recall_at_10 value: 1.934 - type: recall_at_100 value: 12.728 - type: recall_at_1000 value: 41.869 - type: recall_at_3 value: 0.637 - type: recall_at_5 value: 1.042 --- <h1 align="center">MMLW-e5-large</h1> MMLW (muszę mieć lepszą wiadomość) are neural text encoders for Polish. This is a distilled model that can be used to generate embeddings applicable to many tasks such as semantic similarity, clustering, information retrieval. The model can also serve as a base for further fine-tuning. It transforms texts to 1024 dimensional vectors. The model was initialized with multilingual E5 checkpoint, and then trained with [multilingual knowledge distillation method](https://aclanthology.org/2020.emnlp-main.365/) on a diverse corpus of 60 million Polish-English text pairs. We utilised [English FlagEmbeddings (BGE)](https://huggingface.co/BAAI/bge-base-en) as teacher models for distillation. ## Usage (Sentence-Transformers) ⚠️ Our embedding models require the use of specific prefixes and suffixes when encoding texts. For this model, queries should be prefixed with **"query: "** and passages with **"passage: "** ⚠️ You can use the model like this with [sentence-transformers](https://www.SBERT.net): ```python from sentence_transformers import SentenceTransformer from sentence_transformers.util import cos_sim query_prefix = "query: " answer_prefix = "passage: " queries = [query_prefix + "Jak dożyć 100 lat?"] answers = [ answer_prefix + "Trzeba zdrowo się odżywiać i uprawiać sport.", answer_prefix + "Trzeba pić alkohol, imprezować i jeździć szybkimi autami.", answer_prefix + "Gdy trwała kampania politycy zapewniali, że rozprawią się z zakazem niedzielnego handlu." ] model = SentenceTransformer("sdadas/mmlw-e5-large") queries_emb = model.encode(queries, convert_to_tensor=True, show_progress_bar=False) answers_emb = model.encode(answers, convert_to_tensor=True, show_progress_bar=False) best_answer = cos_sim(queries_emb, answers_emb).argmax().item() print(answers[best_answer]) # Trzeba zdrowo się odżywiać i uprawiać sport. ``` ## Evaluation Results - The model achieves an **Average Score** of **61.17** on the Polish Massive Text Embedding Benchmark (MTEB). See [MTEB Leaderboard](https://huggingface.co/spaces/mteb/leaderboard) for detailed results. - The model achieves **NDCG@10** of **56.09** on the Polish Information Retrieval Benchmark. See [PIRB Leaderboard](https://huggingface.co/spaces/sdadas/pirb) for detailed results. ## Acknowledgements This model was trained with the A100 GPU cluster support delivered by the Gdansk University of Technology within the TASK center initiative. ## Citation ```bibtex @article{dadas2024pirb, title={{PIRB}: A Comprehensive Benchmark of Polish Dense and Hybrid Text Retrieval Methods}, author={Sławomir Dadas and Michał Perełkiewicz and Rafał Poświata}, year={2024}, eprint={2402.13350}, archivePrefix={arXiv}, primaryClass={cs.CL} } ```
[ "SEMANTIC_SIMILARITY" ]
[ "SCIFACT" ]
RichardErkhov/EleutherAI_-_pythia-160m-v0-gguf
RichardErkhov
null
[ "gguf", "arxiv:2101.00027", "arxiv:2201.07311", "endpoints_compatible", "region:us" ]
2024-11-07T01:19:19
2024-11-07T01:30:18
103
0
--- {} --- Quantization made by Richard Erkhov. [Github](https://github.com/RichardErkhov) [Discord](https://discord.gg/pvy7H8DZMG) [Request more models](https://github.com/RichardErkhov/quant_request) pythia-160m-v0 - GGUF - Model creator: https://huggingface.co/EleutherAI/ - Original model: https://huggingface.co/EleutherAI/pythia-160m-v0/ | Name | Quant method | Size | | ---- | ---- | ---- | | [pythia-160m-v0.Q2_K.gguf](https://huggingface.co/RichardErkhov/EleutherAI_-_pythia-160m-v0-gguf/blob/main/pythia-160m-v0.Q2_K.gguf) | Q2_K | 0.07GB | | [pythia-160m-v0.Q3_K_S.gguf](https://huggingface.co/RichardErkhov/EleutherAI_-_pythia-160m-v0-gguf/blob/main/pythia-160m-v0.Q3_K_S.gguf) | Q3_K_S | 0.08GB | | [pythia-160m-v0.Q3_K.gguf](https://huggingface.co/RichardErkhov/EleutherAI_-_pythia-160m-v0-gguf/blob/main/pythia-160m-v0.Q3_K.gguf) | Q3_K | 0.09GB | | [pythia-160m-v0.Q3_K_M.gguf](https://huggingface.co/RichardErkhov/EleutherAI_-_pythia-160m-v0-gguf/blob/main/pythia-160m-v0.Q3_K_M.gguf) | Q3_K_M | 0.09GB | | [pythia-160m-v0.Q3_K_L.gguf](https://huggingface.co/RichardErkhov/EleutherAI_-_pythia-160m-v0-gguf/blob/main/pythia-160m-v0.Q3_K_L.gguf) | Q3_K_L | 0.09GB | | [pythia-160m-v0.IQ4_XS.gguf](https://huggingface.co/RichardErkhov/EleutherAI_-_pythia-160m-v0-gguf/blob/main/pythia-160m-v0.IQ4_XS.gguf) | IQ4_XS | 0.09GB | | [pythia-160m-v0.Q4_0.gguf](https://huggingface.co/RichardErkhov/EleutherAI_-_pythia-160m-v0-gguf/blob/main/pythia-160m-v0.Q4_0.gguf) | Q4_0 | 0.1GB | | [pythia-160m-v0.IQ4_NL.gguf](https://huggingface.co/RichardErkhov/EleutherAI_-_pythia-160m-v0-gguf/blob/main/pythia-160m-v0.IQ4_NL.gguf) | IQ4_NL | 0.1GB | | [pythia-160m-v0.Q4_K_S.gguf](https://huggingface.co/RichardErkhov/EleutherAI_-_pythia-160m-v0-gguf/blob/main/pythia-160m-v0.Q4_K_S.gguf) | Q4_K_S | 0.1GB | | [pythia-160m-v0.Q4_K.gguf](https://huggingface.co/RichardErkhov/EleutherAI_-_pythia-160m-v0-gguf/blob/main/pythia-160m-v0.Q4_K.gguf) | Q4_K | 0.1GB | | [pythia-160m-v0.Q4_K_M.gguf](https://huggingface.co/RichardErkhov/EleutherAI_-_pythia-160m-v0-gguf/blob/main/pythia-160m-v0.Q4_K_M.gguf) | Q4_K_M | 0.1GB | | [pythia-160m-v0.Q4_1.gguf](https://huggingface.co/RichardErkhov/EleutherAI_-_pythia-160m-v0-gguf/blob/main/pythia-160m-v0.Q4_1.gguf) | Q4_1 | 0.1GB | | [pythia-160m-v0.Q5_0.gguf](https://huggingface.co/RichardErkhov/EleutherAI_-_pythia-160m-v0-gguf/blob/main/pythia-160m-v0.Q5_0.gguf) | Q5_0 | 0.11GB | | [pythia-160m-v0.Q5_K_S.gguf](https://huggingface.co/RichardErkhov/EleutherAI_-_pythia-160m-v0-gguf/blob/main/pythia-160m-v0.Q5_K_S.gguf) | Q5_K_S | 0.11GB | | [pythia-160m-v0.Q5_K.gguf](https://huggingface.co/RichardErkhov/EleutherAI_-_pythia-160m-v0-gguf/blob/main/pythia-160m-v0.Q5_K.gguf) | Q5_K | 0.12GB | | [pythia-160m-v0.Q5_K_M.gguf](https://huggingface.co/RichardErkhov/EleutherAI_-_pythia-160m-v0-gguf/blob/main/pythia-160m-v0.Q5_K_M.gguf) | Q5_K_M | 0.12GB | | [pythia-160m-v0.Q5_1.gguf](https://huggingface.co/RichardErkhov/EleutherAI_-_pythia-160m-v0-gguf/blob/main/pythia-160m-v0.Q5_1.gguf) | Q5_1 | 0.12GB | | [pythia-160m-v0.Q6_K.gguf](https://huggingface.co/RichardErkhov/EleutherAI_-_pythia-160m-v0-gguf/blob/main/pythia-160m-v0.Q6_K.gguf) | Q6_K | 0.13GB | | [pythia-160m-v0.Q8_0.gguf](https://huggingface.co/RichardErkhov/EleutherAI_-_pythia-160m-v0-gguf/blob/main/pythia-160m-v0.Q8_0.gguf) | Q8_0 | 0.16GB | Original model description: --- language: - en tags: - pytorch - causal-lm - pythia - pythia_v0 license: apache-2.0 datasets: - the_pile --- The *Pythia Scaling Suite* is a collection of models developed to facilitate interpretability research. It contains two sets of eight models of sizes 70M, 160M, 410M, 1B, 1.4B, 2.8B, 6.9B, and 12B. For each size, there are two models: one trained on the Pile, and one trained on the Pile after the dataset has been globally deduplicated. All 8 model sizes are trained on the exact same data, in the exact same order. All Pythia models are available [on Hugging Face](https://huggingface.co/models?other=pythia). The Pythia model suite was deliberately designed to promote scientific research on large language models, especially interpretability research. Despite not centering downstream performance as a design goal, we find the models <a href="#evaluations">match or exceed</a> the performance of similar and same-sized models, such as those in the OPT and GPT-Neo suites. Please note that all models in the *Pythia* suite were renamed in January 2023. For clarity, a <a href="#naming-convention-and-parameter-count">table comparing the old and new names</a> is provided in this model card, together with exact parameter counts. ## Pythia-160M ### Model Details - Developed by: [EleutherAI](http://eleuther.ai) - Model type: Transformer-based Language Model - Language: English - Learn more: [Pythia's GitHub repository](https://github.com/EleutherAI/pythia) for training procedure, config files, and details on how to use. - Library: [GPT-NeoX](https://github.com/EleutherAI/gpt-neox) - License: Apache 2.0 - Contact: to ask questions about this model, join the [EleutherAI Discord](https://discord.gg/zBGx3azzUn), and post them in `#release-discussion`. Please read the existing *Pythia* documentation before asking about it in the EleutherAI Discord. For general correspondence: [contact@eleuther. ai](mailto:[email protected]). <figure> | Pythia model | Non-Embedding Params | Layers | Model Dim | Heads | Batch Size | Learning Rate | Equivalent Models | | -----------: | -------------------: | :----: | :-------: | :---: | :--------: | :-------------------: | :--------------------: | | 70M | 18,915,328 | 6 | 512 | 8 | 2M | 1.0 x 10<sup>-3</sup> | — | | 160M | 85,056,000 | 12 | 768 | 12 | 4M | 6.0 x 10<sup>-4</sup> | GPT-Neo 125M, OPT-125M | | 410M | 302,311,424 | 24 | 1024 | 16 | 4M | 3.0 x 10<sup>-4</sup> | OPT-350M | | 1.0B | 805,736,448 | 16 | 2048 | 8 | 2M | 3.0 x 10<sup>-4</sup> | — | | 1.4B | 1,208,602,624 | 24 | 2048 | 16 | 4M | 2.0 x 10<sup>-4</sup> | GPT-Neo 1.3B, OPT-1.3B | | 2.8B | 2,517,652,480 | 32 | 2560 | 32 | 2M | 1.6 x 10<sup>-4</sup> | GPT-Neo 2.7B, OPT-2.7B | | 6.9B | 6,444,163,072 | 32 | 4096 | 32 | 2M | 1.2 x 10<sup>-4</sup> | OPT-6.7B | | 12B | 11,327,027,200 | 36 | 5120 | 40 | 2M | 1.2 x 10<sup>-4</sup> | — | <figcaption>Engineering details for the <i>Pythia Suite</i>. Deduped and non-deduped models of a given size have the same hyperparameters. “Equivalent” models have <b>exactly</b> the same architecture, and the same number of non-embedding parameters.</figcaption> </figure> ### Uses and Limitations #### Intended Use The primary intended use of Pythia is research on the behavior, functionality, and limitations of large language models. This suite is intended to provide a controlled setting for performing scientific experiments. To enable the study of how language models change over the course of training, we provide 143 evenly spaced intermediate checkpoints per model. These checkpoints are hosted on Hugging Face as branches. Note that branch `143000` corresponds exactly to the model checkpoint on the `main` branch of each model. You may also further fine-tune and adapt Pythia-160M for deployment, as long as your use is in accordance with the Apache 2.0 license. Pythia models work with the Hugging Face [Transformers Library](https://huggingface.co/docs/transformers/index). If you decide to use pre-trained Pythia-160M as a basis for your fine-tuned model, please conduct your own risk and bias assessment. #### Out-of-scope use The Pythia Suite is **not** intended for deployment. It is not a in itself a product and cannot be used for human-facing interactions. Pythia models are English-language only, and are not suitable for translation or generating text in other languages. Pythia-160M has not been fine-tuned for downstream contexts in which language models are commonly deployed, such as writing genre prose, or commercial chatbots. This means Pythia-160M will **not** respond to a given prompt the way a product like ChatGPT does. This is because, unlike this model, ChatGPT was fine-tuned using methods such as Reinforcement Learning from Human Feedback (RLHF) to better “understand” human instructions. #### Limitations and biases The core functionality of a large language model is to take a string of text and predict the next token. The token deemed statistically most likely by the model need not produce the most “accurate” text. Never rely on Pythia-160M to produce factually accurate output. This model was trained on [the Pile](https://pile.eleuther.ai/), a dataset known to contain profanity and texts that are lewd or otherwise offensive. See [Section 6 of the Pile paper](https://arxiv.org/abs/2101.00027) for a discussion of documented biases with regards to gender, religion, and race. Pythia-160M may produce socially unacceptable or undesirable text, *even if* the prompt itself does not include anything explicitly offensive. If you plan on using text generated through, for example, the Hosted Inference API, we recommend having a human curate the outputs of this language model before presenting it to other people. Please inform your audience that the text was generated by Pythia-160M. ### Quickstart Pythia models can be loaded and used via the following code, demonstrated here for the third `pythia-70m-deduped` checkpoint: ```python from transformers import GPTNeoXForCausalLM, AutoTokenizer model = GPTNeoXForCausalLM.from_pretrained( "EleutherAI/pythia-70m-deduped", revision="step3000", cache_dir="./pythia-70m-deduped/step3000", ) tokenizer = AutoTokenizer.from_pretrained( "EleutherAI/pythia-70m-deduped", revision="step3000", cache_dir="./pythia-70m-deduped/step3000", ) inputs = tokenizer("Hello, I am", return_tensors="pt") tokens = model.generate(**inputs) tokenizer.decode(tokens[0]) ``` Revision/branch `step143000` corresponds exactly to the model checkpoint on the `main` branch of each model.<br> For more information on how to use all Pythia models, see [documentation on GitHub](https://github.com/EleutherAI/pythia). ### Training #### Training data [The Pile](https://pile.eleuther.ai/) is a 825GiB general-purpose dataset in English. It was created by EleutherAI specifically for training large language models. It contains texts from 22 diverse sources, roughly broken down into five categories: academic writing (e.g. arXiv), internet (e.g. CommonCrawl), prose (e.g. Project Gutenberg), dialogue (e.g. YouTube subtitles), and miscellaneous (e.g. GitHub, Enron Emails). See [the Pile paper](https://arxiv.org/abs/2101.00027) for a breakdown of all data sources, methodology, and a discussion of ethical implications. Consult [the datasheet](https://arxiv.org/abs/2201.07311) for more detailed documentation about the Pile and its component datasets. The Pile can be downloaded from the [official website](https://pile.eleuther.ai/), or from a [community mirror](https://the-eye.eu/public/AI/pile/).<br> The Pile was **not** deduplicated before being used to train Pythia-160M. #### Training procedure All models were trained on the exact same data, in the exact same order. Each model saw 299,892,736,000 tokens during training, and 143 checkpoints for each model are saved every 2,097,152,000 tokens, spaced evenly throughout training. This corresponds to training for just under 1 epoch on the Pile for non-deduplicated models, and about 1.5 epochs on the deduplicated Pile. All *Pythia* models trained for the equivalent of 143000 steps at a batch size of 2,097,152 tokens. Two batch sizes were used: 2M and 4M. Models with a batch size of 4M tokens listed were originally trained for 71500 steps instead, with checkpoints every 500 steps. The checkpoints on Hugging Face are renamed for consistency with all 2M batch models, so `step1000` is the first checkpoint for `pythia-1.4b` that was saved (corresponding to step 500 in training), and `step1000` is likewise the first `pythia-6.9b` checkpoint that was saved (corresponding to 1000 “actual” steps).<br> See [GitHub](https://github.com/EleutherAI/pythia) for more details on training procedure, including [how to reproduce it](https://github.com/EleutherAI/pythia/blob/main/README.md#reproducing-training).<br> Pythia uses the same tokenizer as [GPT-NeoX- 20B](https://huggingface.co/EleutherAI/gpt-neox-20b). ### Evaluations All 16 *Pythia* models were evaluated using the [LM Evaluation Harness](https://github.com/EleutherAI/lm-evaluation-harness). You can access the results by model and step at `results/json/*` in the [GitHub repository](https://github.com/EleutherAI/pythia/tree/main/results/json).<br> Expand the sections below to see plots of evaluation results for all Pythia and Pythia-deduped models compared with OPT and BLOOM. <details> <summary>LAMBADA – OpenAI</summary> <img src="/EleutherAI/pythia-12b/resolve/main/eval_plots/lambada_openai.png" style="width:auto"/> </details> <details> <summary>Physical Interaction: Question Answering (PIQA)</summary> <img src="/EleutherAI/pythia-12b/resolve/main/eval_plots/piqa.png" style="width:auto"/> </details> <details> <summary>WinoGrande</summary> <img src="/EleutherAI/pythia-12b/resolve/main/eval_plots/winogrande.png" style="width:auto"/> </details> <details> <summary>AI2 Reasoning Challenge—Challenge Set</summary> <img src="/EleutherAI/pythia-12b/resolve/main/eval_plots/arc_challenge.png" style="width:auto"/> </details> <details> <summary>SciQ</summary> <img src="/EleutherAI/pythia-12b/resolve/main/eval_plots/sciq.png" style="width:auto"/> </details> ### Naming convention and parameter count *Pythia* models were renamed in January 2023. It is possible that the old naming convention still persists in some documentation by accident. The current naming convention (70M, 160M, etc.) is based on total parameter count. <figure style="width:32em"> | current Pythia suffix | old suffix | total params | non-embedding params | | --------------------: | ---------: | -------------: | -------------------: | | 70M | 19M | 70,426,624 | 18,915,328 | | 160M | 125M | 162,322,944 | 85,056,000 | | 410M | 350M | 405,334,016 | 302,311,424 | | 1B | 800M | 1,011,781,632 | 805,736,448 | | 1.4B | 1.3B | 1,414,647,808 | 1,208,602,624 | | 2.8B | 2.7B | 2,775,208,960 | 2,517,652,480 | | 6.9B | 6.7B | 6,857,302,016 | 6,444,163,072 | | 12B | 13B | 11,846,072,320 | 11,327,027,200 | </figure>
[ "QUESTION_ANSWERING", "TRANSLATION" ]
[ "SCIQ" ]
ngtoanrob/vien-translation
ngtoanrob
translation
[ "transformers", "pytorch", "tensorboard", "t5", "text2text-generation", "translation", "vi", "en", "dataset:ngtoanrob/vi-en-v1-dataset", "license:openrail", "autotrain_compatible", "text-generation-inference", "endpoints_compatible", "region:us" ]
2023-02-12T16:19:33
2023-02-24T20:37:46
102
1
--- datasets: - ngtoanrob/vi-en-v1-dataset language: - vi - en license: openrail metrics: - bleu tags: - translation widget: - text: 'vi: Anh yêu em nhiều lắm' --- # EnViT5 Translation [![PWC](https://img.shields.io/endpoint.svg?url=https://paperswithcode.com/badge/mtet-multi-domain-translation-for-english/machine-translation-on-iwslt2015-english-1)](https://paperswithcode.com/sota/machine-translation-on-iwslt2015-english-1?p=mtet-multi-domain-translation-for-english) [![PWC](https://img.shields.io/endpoint.svg?url=https://paperswithcode.com/badge/mtet-multi-domain-translation-for-english-and/on-phomt)](https://paperswithcode.com/sota/on-phomt?p=mtet-multi-domain-translation-for-english-and) State-of-the-art English-Vietnamese and Vietnamese-English Translation models trained on [MTet](https://research.vietai.org/mtet/), [PhoMT](https://github.com/VinAIResearch/PhoMT). ```python from transformers import AutoTokenizer, AutoModelForSeq2SeqLM model_name = "ngtoanrob/vien-translation" tokenizer = AutoTokenizer.from_pretrained(model_name) model = AutoModelForSeq2SeqLM.from_pretrained(model_name) inputs = [ "vi: VietAI là tổ chức phi lợi nhuận với sứ mệnh ươm mầm tài năng về trí tuệ nhân tạo và xây dựng một cộng đồng các chuyên gia trong lĩnh vực trí tuệ nhân tạo đẳng cấp quốc tế tại Việt Nam.", "vi: Theo báo cáo mới nhất của Linkedin về danh sách việc làm triển vọng với mức lương hấp dẫn năm 2020, các chức danh công việc liên quan đến AI như Chuyên gia AI (Artificial Intelligence Specialist), Kỹ sư ML (Machine Learning Engineer) đều xếp thứ hạng cao.", "en: Our teams aspire to make discoveries that impact everyone, and core to our approach is sharing our research and tools to fuel progress in the field.", "en: We're on a journey to advance and democratize artificial intelligence through open source and open science." ] outputs = model.generate(tokenizer(inputs, return_tensors="pt", padding=True).input_ids.to('cuda'), max_length=512) print(tokenizer.batch_decode(outputs, skip_special_tokens=True)) # ['en: VietAI is a non-profit organization with the mission of nurturing artificial intelligence talents and building an international - class community of artificial intelligence experts in Vietnam.', # 'en: According to the latest LinkedIn report on the 2020 list of attractive and promising jobs, AI - related job titles such as AI Specialist, ML Engineer and ML Engineer all rank high.', # 'vi: Nhóm chúng tôi khao khát tạo ra những khám phá có ảnh hưởng đến mọi người, và cốt lõi trong cách tiếp cận của chúng tôi là chia sẻ nghiên cứu và công cụ để thúc đẩy sự tiến bộ trong lĩnh vực này.', # 'vi: Chúng ta đang trên hành trình tiến bộ và dân chủ hoá trí tuệ nhân tạo thông qua mã nguồn mở và khoa học mở.'] ``` ## Results ![image](https://user-images.githubusercontent.com/44376091/195998681-5860e443-2071-4048-8a2b-873dcee14a72.png) ## Citation ``` @misc{https://doi.org/10.48550/arxiv.2210.05610, doi = {10.48550/ARXIV.2210.05610}, author = {Ngo, Chinh and Trinh, Trieu H. and Phan, Long and Tran, Hieu and Dang, Tai and Nguyen, Hieu and Nguyen, Minh and Luong, Minh-Thang}, title = {MTet: Multi-domain Translation for English and Vietnamese}, publisher = {arXiv}, year = {2022}, } ```
[ "TRANSLATION" ]
[ "CHIA" ]