Dataset Viewer
Auto-converted to Parquet
average_CPS
float64
30.2
54.7
config
dict
tasks
dict
42.69061
{ "model_name": "Almawave/Velvet-14B", "num_fewshot": "5", "batch_size": 1, "model": "Almawave/Velvet-14B", "base_model": "MistralForCausalLM", "revision": "8b284698ed73862e5b70c66e1fc07d47b70118a5", "submitted_time": "2025-01-31T00:53:58", "num_params_billion": 14.08027136, "language": "en_fr_de_es_it_pt" }
{ "admission-test": { "prompts": [ { "prompt": "prompt-1", "metric": "acc", "value": 43, "stderr": 0.0222 }, { "prompt": "prompt-2", "metric": "acc", "value": 43, "stderr": 0.0222 }, { "prompt": "prompt-3", "metric": "acc", "value": 60.199999999999996, "stderr": 0.0219 }, { "prompt": "prompt-4", "metric": "acc", "value": 61.199999999999996, "stderr": 0.0218 }, { "prompt": "prompt-5", "metric": "acc", "value": 46, "stderr": 0.0223 }, { "prompt": "prompt-6", "metric": "acc", "value": 45, "stderr": 0.0223 } ], "average_accuracy": 49.73333333333333, "best_prompt": 61.199999999999996, "prompt_id": "prompt-4", "CPS": 54.182399999999994 }, "faq": { "prompts": [ { "prompt": "prompt-1", "metric": "acc", "value": 28.18, "stderr": 0.0225 }, { "prompt": "prompt-2", "metric": "acc", "value": 28.68, "stderr": 0.0226 }, { "prompt": "prompt-3", "metric": "acc", "value": 55.86, "stderr": 0.0248 }, { "prompt": "prompt-4", "metric": "acc", "value": 80.55, "stderr": 0.0198 }, { "prompt": "prompt-5", "metric": "acc", "value": 28.18, "stderr": 0.0225 }, { "prompt": "prompt-6", "metric": "acc", "value": 28.93, "stderr": 0.0227 } ], "average_accuracy": 41.73, "best_prompt": 80.55, "prompt_id": "prompt-4", "CPS": 49.28049 }, "hate-speech-detection": { "prompts": [ { "prompt": "prompt-1", "metric": "f1", "value": 62.9, "stderr": 0.0147 }, { "prompt": "prompt-2", "metric": "f1", "value": 64.42, "stderr": 0.0142 }, { "prompt": "prompt-3", "metric": "f1", "value": 67.52, "stderr": 0.0125 }, { "prompt": "prompt-4", "metric": "f1", "value": 68.97, "stderr": 0.0126 }, { "prompt": "prompt-5", "metric": "f1", "value": 66.27, "stderr": 0.0133 }, { "prompt": "prompt-6", "metric": "f1", "value": 68.04, "stderr": 0.013 } ], "average_accuracy": 66.35333333333332, "best_prompt": 68.97, "prompt_id": "prompt-4", "CPS": 67.165285 }, "lexical-substitution": { "prompts": [ { "prompt": "prompt-1", "metric": "f1", "value": 9.45, "stderr": null }, { "prompt": "prompt-2", "metric": "f1", "value": 8.97, "stderr": null } ], "average_accuracy": 9.21, "best_prompt": 9.45, "prompt_id": "prompt-1", "CPS": 9.42732 }, "evalita NER": { "prompts": [ { "prompt": "prompt-1", "metric": "acc", "value": 24.00793262239457, "stderr": null }, { "prompt": "prompt-2", "metric": "acc", "value": 24.94484003877848, "stderr": null } ], "average_accuracy": 24.476386330586525, "best_prompt": 24.94484003877848, "prompt_id": "prompt-2", "CPS": 24.82798501061427 }, "relation-extraction": { "prompts": [ { "prompt": "prompt-1", "metric": "f1", "value": 15.1, "stderr": null }, { "prompt": "prompt-2", "metric": "f1", "value": 11.05, "stderr": null } ], "average_accuracy": 13.075, "best_prompt": 15.1, "prompt_id": "prompt-1", "CPS": 14.794224999999999 }, "sentiment-analysis": { "prompts": [ { "prompt": "prompt-1", "metric": "f1", "value": 68.93, "stderr": null }, { "prompt": "prompt-2", "metric": "f1", "value": 68.77, "stderr": null }, { "prompt": "prompt-3", "metric": "f1", "value": 66.78, "stderr": null }, { "prompt": "prompt-4", "metric": "f1", "value": 66.79, "stderr": null }, { "prompt": "prompt-5", "metric": "f1", "value": 68.61, "stderr": null }, { "prompt": "prompt-6", "metric": "f1", "value": 68.07, "stderr": null } ], "average_accuracy": 67.99166666666666, "best_prompt": 68.93, "prompt_id": "prompt-1", "CPS": 68.28320683333332 }, "summarization-fanpage": { "prompts": [ { "prompt": "prompt-1", "metric": "rouge1", "value": 33.629999999999995, "stderr": 0.013 }, { "prompt": "prompt-2", "metric": "rouge1", "value": 34.88, "stderr": 0.0121 } ], "average_accuracy": 34.254999999999995, "best_prompt": 34.88, "prompt_id": "prompt-2", "CPS": 34.662 }, "text-entailment": { "prompts": [ { "prompt": "prompt-1", "metric": "acc", "value": 76.5, "stderr": 0.0212 }, { "prompt": "prompt-2", "metric": "acc", "value": 76.75, "stderr": 0.0211 }, { "prompt": "prompt-3", "metric": "acc", "value": 75, "stderr": 0.0217 }, { "prompt": "prompt-4", "metric": "acc", "value": 74.75, "stderr": 0.0217 }, { "prompt": "prompt-5", "metric": "acc", "value": 71.25, "stderr": 0.0227 }, { "prompt": "prompt-6", "metric": "acc", "value": 72.5, "stderr": 0.0224 } ], "average_accuracy": 74.45833333333333, "best_prompt": 76.75, "prompt_id": "prompt-2", "CPS": 74.99114583333333 }, "word-in-context": { "prompts": [ { "prompt": "prompt-1", "metric": "f1", "value": 3.09, "stderr": 0.015 }, { "prompt": "prompt-2", "metric": "f1", "value": 2.34, "stderr": 0.0132 }, { "prompt": "prompt-3", "metric": "f1", "value": 11.110000000000001, "stderr": 0.0262 }, { "prompt": "prompt-4", "metric": "f1", "value": 11.03, "stderr": 0.0261 }, { "prompt": "prompt-5", "metric": "f1", "value": 27.189999999999998, "stderr": 0.0322 }, { "prompt": "prompt-6", "metric": "f1", "value": 37.69, "stderr": 0.031 } ], "average_accuracy": 15.408333333333331, "best_prompt": 37.69, "prompt_id": "prompt-6", "CPS": 29.29203983333333 } }
39.476415
{ "model_name": "Almawave/Velvet-14B", "num_fewshot": "None", "batch_size": 1, "model": "Almawave/Velvet-14B", "base_model": "MistralForCausalLM", "revision": "8b284698ed73862e5b70c66e1fc07d47b70118a5", "submitted_time": "2025-01-31T00:53:58", "num_params_billion": 14.08027136, "language": "en_fr_de_es_it_pt" }
{ "admission-test": { "prompts": [ { "prompt": "prompt-1", "metric": "acc", "value": 31.4, "stderr": 0.0208 }, { "prompt": "prompt-2", "metric": "acc", "value": 31.8, "stderr": 0.0208 }, { "prompt": "prompt-3", "metric": "acc", "value": 54.400000000000006, "stderr": 0.0223 }, { "prompt": "prompt-4", "metric": "acc", "value": 57.4, "stderr": 0.0221 }, { "prompt": "prompt-5", "metric": "acc", "value": 41, "stderr": 0.022 }, { "prompt": "prompt-6", "metric": "acc", "value": 38.4, "stderr": 0.0218 } ], "average_accuracy": 42.4, "best_prompt": 57.4, "prompt_id": "prompt-4", "CPS": 48.79 }, "faq": { "prompts": [ { "prompt": "prompt-1", "metric": "acc", "value": 29.18, "stderr": 0.0227 }, { "prompt": "prompt-2", "metric": "acc", "value": 28.43, "stderr": 0.0226 }, { "prompt": "prompt-3", "metric": "acc", "value": 67.83, "stderr": 0.0234 }, { "prompt": "prompt-4", "metric": "acc", "value": 89.53, "stderr": 0.0153 }, { "prompt": "prompt-5", "metric": "acc", "value": 28.43, "stderr": 0.0226 }, { "prompt": "prompt-6", "metric": "acc", "value": 29.43, "stderr": 0.0228 } ], "average_accuracy": 45.471666666666664, "best_prompt": 89.53, "prompt_id": "prompt-4", "CPS": 50.08457416666667 }, "hate-speech-detection": { "prompts": [ { "prompt": "prompt-1", "metric": "f1", "value": 31.91, "stderr": 0.0181 }, { "prompt": "prompt-2", "metric": "f1", "value": 59.38, "stderr": 0.0137 }, { "prompt": "prompt-3", "metric": "f1", "value": 65.64, "stderr": 0.0114 }, { "prompt": "prompt-4", "metric": "f1", "value": 63.080000000000005, "stderr": 0.0113 }, { "prompt": "prompt-5", "metric": "f1", "value": 50.839999999999996, "stderr": 0.0149 }, { "prompt": "prompt-6", "metric": "f1", "value": 64.73, "stderr": 0.0121 } ], "average_accuracy": 55.93000000000001, "best_prompt": 65.64, "prompt_id": "prompt-3", "CPS": 59.266356 }, "lexical-substitution": { "prompts": [ { "prompt": "prompt-1", "metric": "f1", "value": 0.13, "stderr": null }, { "prompt": "prompt-2", "metric": "f1", "value": 0, "stderr": null } ], "average_accuracy": 0.065, "best_prompt": 0.13, "prompt_id": "prompt-1", "CPS": 0.1299155 }, "evalita NER": { "prompts": [ { "prompt": "prompt-1", "metric": "acc", "value": 15.57877847794474, "stderr": null }, { "prompt": "prompt-2", "metric": "acc", "value": 15.913740911294232, "stderr": null } ], "average_accuracy": 15.746259694619486, "best_prompt": 15.913740911294232, "prompt_id": "prompt-2", "CPS": 15.88708838439753 }, "relation-extraction": { "prompts": [ { "prompt": "prompt-1", "metric": "f1", "value": 9.98, "stderr": null }, { "prompt": "prompt-2", "metric": "f1", "value": 3.93, "stderr": null } ], "average_accuracy": 6.955, "best_prompt": 9.98, "prompt_id": "prompt-1", "CPS": 9.678105 }, "sentiment-analysis": { "prompts": [ { "prompt": "prompt-1", "metric": "f1", "value": 58.089999999999996, "stderr": null }, { "prompt": "prompt-2", "metric": "f1", "value": 48.06, "stderr": null }, { "prompt": "prompt-3", "metric": "f1", "value": 61.970000000000006, "stderr": null }, { "prompt": "prompt-4", "metric": "f1", "value": 61.970000000000006, "stderr": null }, { "prompt": "prompt-5", "metric": "f1", "value": 66.59, "stderr": null }, { "prompt": "prompt-6", "metric": "f1", "value": 66.25, "stderr": null } ], "average_accuracy": 60.48833333333334, "best_prompt": 66.59, "prompt_id": "prompt-5", "CPS": 62.526900166666664 }, "summarization-fanpage": { "prompts": [ { "prompt": "prompt-1", "metric": "rouge1", "value": 31.069999999999997, "stderr": 0.0112 }, { "prompt": "prompt-2", "metric": "rouge1", "value": 31.11, "stderr": 0.0115 } ], "average_accuracy": 31.089999999999996, "best_prompt": 31.11, "prompt_id": "prompt-2", "CPS": 31.103778 }, "text-entailment": { "prompts": [ { "prompt": "prompt-1", "metric": "acc", "value": 78.5, "stderr": 0.0206 }, { "prompt": "prompt-2", "metric": "acc", "value": 75.5, "stderr": 0.0215 }, { "prompt": "prompt-3", "metric": "acc", "value": 66.25, "stderr": 0.0237 }, { "prompt": "prompt-4", "metric": "acc", "value": 60.25, "stderr": 0.0245 }, { "prompt": "prompt-5", "metric": "acc", "value": 60.25, "stderr": 0.0245 }, { "prompt": "prompt-6", "metric": "acc", "value": 65, "stderr": 0.0239 } ], "average_accuracy": 67.625, "best_prompt": 78.5, "prompt_id": "prompt-1", "CPS": 69.963125 }, "word-in-context": { "prompts": [ { "prompt": "prompt-1", "metric": "f1", "value": 0, "stderr": 0 }, { "prompt": "prompt-2", "metric": "f1", "value": 0, "stderr": 0 }, { "prompt": "prompt-3", "metric": "f1", "value": 39.300000000000004, "stderr": 0.0308 }, { "prompt": "prompt-4", "metric": "f1", "value": 63.81, "stderr": 0.0212 }, { "prompt": "prompt-5", "metric": "f1", "value": 64.31, "stderr": 0.0215 }, { "prompt": "prompt-6", "metric": "f1", "value": 60.06, "stderr": 0.0234 } ], "average_accuracy": 37.913333333333334, "best_prompt": 64.31, "prompt_id": "prompt-5", "CPS": 47.33430366666667 } }
49.301524
{ "model_name": "CohereForAI/aya-expanse-8b", "num_fewshot": "5", "batch_size": 1, "model": "CohereForAI/aya-expanse-8b", "base_model": "CohereForCausalLM", "revision": "0f7167f3c60d3530b415b4d4f0aac862e74ba4df", "submitted_time": "2024-10-23T06:34:13", "num_params_billion": null, "language": "en_fr_de_es_it_pt_ja_ko_zh_ar_el_fa_pl_id_cs_he_hi_nl_ro_ru_tr_uk_vi" }
{ "admission-test": { "prompts": [ { "prompt": "prompt-1", "metric": "acc", "value": 37.4, "stderr": 0.0217 }, { "prompt": "prompt-2", "metric": "acc", "value": 37.6, "stderr": 0.0217 }, { "prompt": "prompt-3", "metric": "acc", "value": 62.8, "stderr": 0.0216 }, { "prompt": "prompt-4", "metric": "acc", "value": 63.4, "stderr": 0.0216 }, { "prompt": "prompt-5", "metric": "acc", "value": 38.6, "stderr": 0.0218 }, { "prompt": "prompt-6", "metric": "acc", "value": 37.8, "stderr": 0.0217 } ], "average_accuracy": 46.26666666666667, "best_prompt": 63.4, "prompt_id": "prompt-4", "CPS": 52.537466666666674 }, "faq": { "prompts": [ { "prompt": "prompt-1", "metric": "acc", "value": 31.669999999999998, "stderr": 0.0233 }, { "prompt": "prompt-2", "metric": "acc", "value": 31.169999999999998, "stderr": 0.0232 }, { "prompt": "prompt-3", "metric": "acc", "value": 92.02, "stderr": 0.0135 }, { "prompt": "prompt-4", "metric": "acc", "value": 94.01, "stderr": 0.0119 }, { "prompt": "prompt-5", "metric": "acc", "value": 31.169999999999998, "stderr": 0.0232 }, { "prompt": "prompt-6", "metric": "acc", "value": 31.169999999999998, "stderr": 0.0232 } ], "average_accuracy": 51.86833333333334, "best_prompt": 94.01, "prompt_id": "prompt-4", "CPS": 54.39261916666667 }, "hate-speech-detection": { "prompts": [ { "prompt": "prompt-1", "metric": "f1", "value": 61.919999999999995, "stderr": 0.0136 }, { "prompt": "prompt-2", "metric": "f1", "value": 66.52, "stderr": 0.0126 }, { "prompt": "prompt-3", "metric": "f1", "value": 70.95, "stderr": 0.0116 }, { "prompt": "prompt-4", "metric": "f1", "value": 72.71, "stderr": 0.0115 }, { "prompt": "prompt-5", "metric": "f1", "value": 61.83, "stderr": 0.0136 }, { "prompt": "prompt-6", "metric": "f1", "value": 61.06, "stderr": 0.0136 } ], "average_accuracy": 65.83166666666666, "best_prompt": 72.71, "prompt_id": "prompt-4", "CPS": 67.70876383333334 }, "lexical-substitution": { "prompts": [ { "prompt": "prompt-1", "metric": "f1", "value": 20.14, "stderr": null }, { "prompt": "prompt-2", "metric": "f1", "value": 20.599999999999998, "stderr": null } ], "average_accuracy": 20.369999999999997, "best_prompt": 20.599999999999998, "prompt_id": "prompt-2", "CPS": 20.552619999999997 }, "evalita NER": { "prompts": [ { "prompt": "prompt-1", "metric": "acc", "value": 35.57788293746971, "stderr": null }, { "prompt": "prompt-2", "metric": "acc", "value": 35.76877120698013, "stderr": null } ], "average_accuracy": 35.67332707222492, "best_prompt": 35.76877120698013, "prompt_id": "prompt-2", "CPS": 35.734632012789056 }, "relation-extraction": { "prompts": [ { "prompt": "prompt-1", "metric": "f1", "value": 36.559999999999995, "stderr": null }, { "prompt": "prompt-2", "metric": "f1", "value": 36.559999999999995, "stderr": null } ], "average_accuracy": 36.559999999999995, "best_prompt": 36.559999999999995, "prompt_id": "prompt-1", "CPS": 36.559999999999995 }, "sentiment-analysis": { "prompts": [ { "prompt": "prompt-1", "metric": "f1", "value": 71.67999999999999, "stderr": null }, { "prompt": "prompt-2", "metric": "f1", "value": 70.12, "stderr": null }, { "prompt": "prompt-3", "metric": "f1", "value": 71.36, "stderr": null }, { "prompt": "prompt-4", "metric": "f1", "value": 72.36, "stderr": null }, { "prompt": "prompt-5", "metric": "f1", "value": 73.3, "stderr": null }, { "prompt": "prompt-6", "metric": "f1", "value": 72.59, "stderr": null } ], "average_accuracy": 71.90166666666669, "best_prompt": 73.3, "prompt_id": "prompt-5", "CPS": 72.27502166666667 }, "summarization-fanpage": { "prompts": [ { "prompt": "prompt-1", "metric": "rouge1", "value": 18.33, "stderr": 0.0063 }, { "prompt": "prompt-2", "metric": "rouge1", "value": 18.22, "stderr": 0.0061 } ], "average_accuracy": 18.275, "best_prompt": 18.33, "prompt_id": "prompt-1", "CPS": 18.319918499999996 }, "text-entailment": { "prompts": [ { "prompt": "prompt-1", "metric": "acc", "value": 75.25, "stderr": 0.0216 }, { "prompt": "prompt-2", "metric": "acc", "value": 76.25, "stderr": 0.0213 }, { "prompt": "prompt-3", "metric": "acc", "value": 74.25, "stderr": 0.0219 }, { "prompt": "prompt-4", "metric": "acc", "value": 72.5, "stderr": 0.0224 }, { "prompt": "prompt-5", "metric": "acc", "value": 67.75, "stderr": 0.0234 }, { "prompt": "prompt-6", "metric": "acc", "value": 65.25, "stderr": 0.0238 } ], "average_accuracy": 71.875, "best_prompt": 76.25, "prompt_id": "prompt-2", "CPS": 72.9140625 }, "word-in-context": { "prompts": [ { "prompt": "prompt-1", "metric": "f1", "value": 58.75, "stderr": 0.0272 }, { "prompt": "prompt-2", "metric": "f1", "value": 58.46, "stderr": 0.0269 }, { "prompt": "prompt-3", "metric": "f1", "value": 66.97999999999999, "stderr": 0.0213 }, { "prompt": "prompt-4", "metric": "f1", "value": 55.16, "stderr": 0.0276 }, { "prompt": "prompt-5", "metric": "f1", "value": 57.589999999999996, "stderr": 0.0262 }, { "prompt": "prompt-6", "metric": "f1", "value": 60.51, "stderr": 0.0245 } ], "average_accuracy": 59.574999999999996, "best_prompt": 66.97999999999999, "prompt_id": "prompt-3", "CPS": 62.02013099999999 } }
39.848225
{ "model_name": "CohereForAI/aya-expanse-8b", "num_fewshot": "None", "batch_size": 1, "model": "CohereForAI/aya-expanse-8b", "base_model": "CohereForCausalLM", "revision": "0f7167f3c60d3530b415b4d4f0aac862e74ba4df", "submitted_time": "2024-10-23T06:34:13", "num_params_billion": null, "language": "en_fr_de_es_it_pt_ja_ko_zh_ar_el_fa_pl_id_cs_he_hi_nl_ro_ru_tr_uk_vi" }
{ "admission-test": { "prompts": [ { "prompt": "prompt-1", "metric": "acc", "value": 27.200000000000003, "stderr": 0.0199 }, { "prompt": "prompt-2", "metric": "acc", "value": 28.799999999999997, "stderr": 0.0203 }, { "prompt": "prompt-3", "metric": "acc", "value": 57.4, "stderr": 0.0221 }, { "prompt": "prompt-4", "metric": "acc", "value": 57.8, "stderr": 0.0221 }, { "prompt": "prompt-5", "metric": "acc", "value": 30.4, "stderr": 0.0206 }, { "prompt": "prompt-6", "metric": "acc", "value": 29.599999999999998, "stderr": 0.0204 } ], "average_accuracy": 38.53333333333333, "best_prompt": 57.8, "prompt_id": "prompt-4", "CPS": 46.663866666666664 }, "faq": { "prompts": [ { "prompt": "prompt-1", "metric": "acc", "value": 29.68, "stderr": 0.0228 }, { "prompt": "prompt-2", "metric": "acc", "value": 29.93, "stderr": 0.0229 }, { "prompt": "prompt-3", "metric": "acc", "value": 41.4, "stderr": 0.0246 }, { "prompt": "prompt-4", "metric": "acc", "value": 66.33, "stderr": 0.0236 }, { "prompt": "prompt-5", "metric": "acc", "value": 29.93, "stderr": 0.0229 }, { "prompt": "prompt-6", "metric": "acc", "value": 30.919999999999998, "stderr": 0.0231 } ], "average_accuracy": 38.03166666666666, "best_prompt": 66.33, "prompt_id": "prompt-4", "CPS": 47.559715499999996 }, "hate-speech-detection": { "prompts": [ { "prompt": "prompt-1", "metric": "f1", "value": 59.489999999999995, "stderr": 0.0131 }, { "prompt": "prompt-2", "metric": "f1", "value": 60.92999999999999, "stderr": 0.0122 }, { "prompt": "prompt-3", "metric": "f1", "value": 54.690000000000005, "stderr": 0.0133 }, { "prompt": "prompt-4", "metric": "f1", "value": 62.970000000000006, "stderr": 0.0112 }, { "prompt": "prompt-5", "metric": "f1", "value": 62.92, "stderr": 0.0114 }, { "prompt": "prompt-6", "metric": "f1", "value": 61.89, "stderr": 0.0115 } ], "average_accuracy": 60.48166666666666, "best_prompt": 62.970000000000006, "prompt_id": "prompt-4", "CPS": 61.4030965 }, "lexical-substitution": { "prompts": [ { "prompt": "prompt-1", "metric": "f1", "value": 7.33, "stderr": null }, { "prompt": "prompt-2", "metric": "f1", "value": 15.920000000000002, "stderr": null } ], "average_accuracy": 11.625, "best_prompt": 15.920000000000002, "prompt_id": "prompt-2", "CPS": 15.236236000000002 }, "evalita NER": { "prompts": [ { "prompt": "prompt-1", "metric": "acc", "value": 15.216677169171108, "stderr": null }, { "prompt": "prompt-2", "metric": "acc", "value": 19.49798473097431, "stderr": null } ], "average_accuracy": 17.357330950072708, "best_prompt": 19.49798473097431, "prompt_id": "prompt-2", "CPS": 19.08060038363109 }, "relation-extraction": { "prompts": [ { "prompt": "prompt-1", "metric": "f1", "value": 5.47, "stderr": null }, { "prompt": "prompt-2", "metric": "f1", "value": 9.569999999999999, "stderr": null } ], "average_accuracy": 7.52, "best_prompt": 9.569999999999999, "prompt_id": "prompt-2", "CPS": 9.373814999999999 }, "sentiment-analysis": { "prompts": [ { "prompt": "prompt-1", "metric": "f1", "value": 45.09, "stderr": null }, { "prompt": "prompt-2", "metric": "f1", "value": 47.010000000000005, "stderr": null }, { "prompt": "prompt-3", "metric": "f1", "value": 66.27, "stderr": null }, { "prompt": "prompt-4", "metric": "f1", "value": 70.62, "stderr": null }, { "prompt": "prompt-5", "metric": "f1", "value": 64.92999999999999, "stderr": null }, { "prompt": "prompt-6", "metric": "f1", "value": 67.02, "stderr": null } ], "average_accuracy": 60.156666666666666, "best_prompt": 70.62, "prompt_id": "prompt-4", "CPS": 63.230794 }, "summarization-fanpage": { "prompts": [ { "prompt": "prompt-1", "metric": "rouge1", "value": 18.48, "stderr": 0.0073 }, { "prompt": "prompt-2", "metric": "rouge1", "value": 19.21, "stderr": 0.0075 } ], "average_accuracy": 18.845, "best_prompt": 19.21, "prompt_id": "prompt-2", "CPS": 19.1398835 }, "text-entailment": { "prompts": [ { "prompt": "prompt-1", "metric": "acc", "value": 75, "stderr": 0.0217 }, { "prompt": "prompt-2", "metric": "acc", "value": 65.75, "stderr": 0.0238 }, { "prompt": "prompt-3", "metric": "acc", "value": 64.75, "stderr": 0.0239 }, { "prompt": "prompt-4", "metric": "acc", "value": 60.25, "stderr": 0.0245 }, { "prompt": "prompt-5", "metric": "acc", "value": 52.5, "stderr": 0.025 }, { "prompt": "prompt-6", "metric": "acc", "value": 43.75, "stderr": 0.0248 } ], "average_accuracy": 60.333333333333336, "best_prompt": 75, "prompt_id": "prompt-1", "CPS": 64 }, "word-in-context": { "prompts": [ { "prompt": "prompt-1", "metric": "f1", "value": 9.02, "stderr": 0.0242 }, { "prompt": "prompt-2", "metric": "f1", "value": 5.34, "stderr": 0.0194 }, { "prompt": "prompt-3", "metric": "f1", "value": 66.19, "stderr": 0.0208 }, { "prompt": "prompt-4", "metric": "f1", "value": 63.21, "stderr": 0.0221 }, { "prompt": "prompt-5", "metric": "f1", "value": 65.82000000000001, "stderr": 0.0206 }, { "prompt": "prompt-6", "metric": "f1", "value": 66.13, "stderr": 0.0201 } ], "average_accuracy": 45.95166666666666, "best_prompt": 66.19, "prompt_id": "prompt-3", "CPS": 52.794247166666665 } }
46.9323
{ "model_name": "DeepMount00/Lexora-Medium-7B", "num_fewshot": "None", "batch_size": 1, "model": "DeepMount00/Lexora-Medium-7B", "base_model": "Qwen2ForCausalLM", "revision": "bd14fecfe27f5823fbd6e299984c6196ff6d5d3b", "submitted_time": "2024-09-24T07:06:28", "num_params_billion": 7.615616512, "language": "it_en" }
{ "admission-test": { "prompts": [ { "prompt": "prompt-1", "metric": "acc", "value": 26, "stderr": 0.0196 }, { "prompt": "prompt-2", "metric": "acc", "value": 26, "stderr": 0.0196 }, { "prompt": "prompt-3", "metric": "acc", "value": 74, "stderr": 0.0196 }, { "prompt": "prompt-4", "metric": "acc", "value": 72.39999999999999, "stderr": 0.02 }, { "prompt": "prompt-5", "metric": "acc", "value": 26.8, "stderr": 0.0198 }, { "prompt": "prompt-6", "metric": "acc", "value": 28.999999999999996, "stderr": 0.0203 } ], "average_accuracy": 42.36666666666667, "best_prompt": 74, "prompt_id": "prompt-3", "CPS": 50.59133333333333 }, "faq": { "prompts": [ { "prompt": "prompt-1", "metric": "acc", "value": 29.93, "stderr": 0.0229 }, { "prompt": "prompt-2", "metric": "acc", "value": 30.42, "stderr": 0.023 }, { "prompt": "prompt-3", "metric": "acc", "value": 97.76, "stderr": 0.0074 }, { "prompt": "prompt-4", "metric": "acc", "value": 97.50999999999999, "stderr": 0.0078 }, { "prompt": "prompt-5", "metric": "acc", "value": 30.17, "stderr": 0.023 }, { "prompt": "prompt-6", "metric": "acc", "value": 29.43, "stderr": 0.0228 } ], "average_accuracy": 52.53666666666667, "best_prompt": 97.76, "prompt_id": "prompt-3", "CPS": 53.549669333333334 }, "hate-speech-detection": { "prompts": [ { "prompt": "prompt-1", "metric": "f1", "value": 63.51, "stderr": 0.0114 }, { "prompt": "prompt-2", "metric": "f1", "value": 62.529999999999994, "stderr": 0.0114 }, { "prompt": "prompt-3", "metric": "f1", "value": 67.16, "stderr": 0.0117 }, { "prompt": "prompt-4", "metric": "f1", "value": 65.33, "stderr": 0.0112 }, { "prompt": "prompt-5", "metric": "f1", "value": 59.36, "stderr": 0.0125 }, { "prompt": "prompt-6", "metric": "f1", "value": 59.730000000000004, "stderr": 0.0124 } ], "average_accuracy": 62.93666666666667, "best_prompt": 67.16, "prompt_id": "prompt-3", "CPS": 64.32360933333332 }, "lexical-substitution": { "prompts": [ { "prompt": "prompt-1", "metric": "f1", "value": 39.75, "stderr": null }, { "prompt": "prompt-2", "metric": "f1", "value": 41.17, "stderr": null } ], "average_accuracy": 40.46, "best_prompt": 41.17, "prompt_id": "prompt-2", "CPS": 40.877693 }, "evalita NER": { "prompts": [ { "prompt": "prompt-1", "metric": "acc", "value": 26.95614881240911, "stderr": null }, { "prompt": "prompt-2", "metric": "acc", "value": 27.612395782840522, "stderr": null } ], "average_accuracy": 27.284272297624817, "best_prompt": 27.612395782840522, "prompt_id": "prompt-2", "CPS": 27.52179302744631 }, "relation-extraction": { "prompts": [ { "prompt": "prompt-1", "metric": "f1", "value": 12.790000000000001, "stderr": null }, { "prompt": "prompt-2", "metric": "f1", "value": 7.59, "stderr": null } ], "average_accuracy": 10.190000000000001, "best_prompt": 12.790000000000001, "prompt_id": "prompt-1", "CPS": 12.457460000000001 }, "sentiment-analysis": { "prompts": [ { "prompt": "prompt-1", "metric": "f1", "value": 55.54, "stderr": null }, { "prompt": "prompt-2", "metric": "f1", "value": 38.66, "stderr": null }, { "prompt": "prompt-3", "metric": "f1", "value": 68.83, "stderr": null }, { "prompt": "prompt-4", "metric": "f1", "value": 70.24000000000001, "stderr": null }, { "prompt": "prompt-5", "metric": "f1", "value": 50.92, "stderr": null }, { "prompt": "prompt-6", "metric": "f1", "value": 27.35, "stderr": null } ], "average_accuracy": 51.92333333333334, "best_prompt": 70.24000000000001, "prompt_id": "prompt-4", "CPS": 57.37437333333334 }, "summarization-fanpage": { "prompts": [ { "prompt": "prompt-1", "metric": "rouge1", "value": 26.93, "stderr": 0.0107 }, { "prompt": "prompt-2", "metric": "rouge1", "value": 28.599999999999998, "stderr": 0.0092 } ], "average_accuracy": 27.765, "best_prompt": 28.599999999999998, "prompt_id": "prompt-2", "CPS": 28.361189999999997 }, "text-entailment": { "prompts": [ { "prompt": "prompt-1", "metric": "acc", "value": 73.5, "stderr": 0.0221 }, { "prompt": "prompt-2", "metric": "acc", "value": 64, "stderr": 0.024 }, { "prompt": "prompt-3", "metric": "acc", "value": 81.75, "stderr": 0.0193 }, { "prompt": "prompt-4", "metric": "acc", "value": 83.75, "stderr": 0.0185 }, { "prompt": "prompt-5", "metric": "acc", "value": 57.99999999999999, "stderr": 0.0247 }, { "prompt": "prompt-6", "metric": "acc", "value": 56.25, "stderr": 0.0248 } ], "average_accuracy": 69.54166666666667, "best_prompt": 83.75, "prompt_id": "prompt-4", "CPS": 71.85052083333333 }, "word-in-context": { "prompts": [ { "prompt": "prompt-1", "metric": "f1", "value": 66.67, "stderr": 0.0199 }, { "prompt": "prompt-2", "metric": "f1", "value": 67.04, "stderr": 0.0204 }, { "prompt": "prompt-3", "metric": "f1", "value": 56.26, "stderr": 0.029 }, { "prompt": "prompt-4", "metric": "f1", "value": 47.85, "stderr": 0.0321 }, { "prompt": "prompt-5", "metric": "f1", "value": 66.75999999999999, "stderr": 0.0199 }, { "prompt": "prompt-6", "metric": "f1", "value": 56.269999999999996, "stderr": 0.0238 } ], "average_accuracy": 60.14166666666666, "best_prompt": 67.04, "prompt_id": "prompt-2", "CPS": 62.415357333333326 } }
49.406014
{ "model_name": "DeepMount00/Llama-3-8b-Ita", "num_fewshot": "5", "batch_size": 1, "model": "DeepMount00/Llama-3-8b-Ita", "base_model": "LlamaForCausalLM", "revision": "9e5753beda3704492ae340e0321ad516a58b9ffd", "submitted_time": "2024-05-01T12:43:40", "num_params_billion": 8.030261248, "language": "it_en" }
{ "admission-test": { "prompts": [ { "prompt": "prompt-1", "metric": "acc", "value": 36.4, "stderr": 0.0215 }, { "prompt": "prompt-2", "metric": "acc", "value": 36.6, "stderr": 0.0216 }, { "prompt": "prompt-3", "metric": "acc", "value": 65.60000000000001, "stderr": 0.0213 }, { "prompt": "prompt-4", "metric": "acc", "value": 65.60000000000001, "stderr": 0.0213 }, { "prompt": "prompt-5", "metric": "acc", "value": 37.4, "stderr": 0.0217 }, { "prompt": "prompt-6", "metric": "acc", "value": 36.4, "stderr": 0.0215 } ], "average_accuracy": 46.33333333333334, "best_prompt": 65.60000000000001, "prompt_id": "prompt-3", "CPS": 52.961066666666675 }, "faq": { "prompts": [ { "prompt": "prompt-1", "metric": "acc", "value": 29.93, "stderr": 0.0229 }, { "prompt": "prompt-2", "metric": "acc", "value": 30.42, "stderr": 0.023 }, { "prompt": "prompt-3", "metric": "acc", "value": 94.01, "stderr": 0.0119 }, { "prompt": "prompt-4", "metric": "acc", "value": 94.76, "stderr": 0.0111 }, { "prompt": "prompt-5", "metric": "acc", "value": 29.18, "stderr": 0.0227 }, { "prompt": "prompt-6", "metric": "acc", "value": 30.17, "stderr": 0.023 } ], "average_accuracy": 51.41166666666667, "best_prompt": 94.76, "prompt_id": "prompt-4", "CPS": 53.68311933333332 }, "hate-speech-detection": { "prompts": [ { "prompt": "prompt-1", "metric": "f1", "value": 61.19, "stderr": 0.0143 }, { "prompt": "prompt-2", "metric": "f1", "value": 66.4, "stderr": 0.013 }, { "prompt": "prompt-3", "metric": "f1", "value": 67.66, "stderr": 0.0115 }, { "prompt": "prompt-4", "metric": "f1", "value": 72.86, "stderr": 0.0111 }, { "prompt": "prompt-5", "metric": "f1", "value": 68.51, "stderr": 0.0119 }, { "prompt": "prompt-6", "metric": "f1", "value": 68.31, "stderr": 0.0121 } ], "average_accuracy": 67.48833333333333, "best_prompt": 72.86, "prompt_id": "prompt-4", "CPS": 68.94620366666666 }, "lexical-substitution": { "prompts": [ { "prompt": "prompt-1", "metric": "f1", "value": 25.16, "stderr": null }, { "prompt": "prompt-2", "metric": "f1", "value": 24.560000000000002, "stderr": null } ], "average_accuracy": 24.86, "best_prompt": 25.16, "prompt_id": "prompt-1", "CPS": 25.08452 }, "evalita NER": { "prompts": [ { "prompt": "prompt-1", "metric": "acc", "value": 29.234854580707708, "stderr": null }, { "prompt": "prompt-2", "metric": "acc", "value": 31.30808894813379, "stderr": null } ], "average_accuracy": 30.27147176442075, "best_prompt": 31.30808894813379, "prompt_id": "prompt-2", "CPS": 30.98354391820527 }, "relation-extraction": { "prompts": [ { "prompt": "prompt-1", "metric": "f1", "value": 36.559999999999995, "stderr": null }, { "prompt": "prompt-2", "metric": "f1", "value": 36.559999999999995, "stderr": null } ], "average_accuracy": 36.559999999999995, "best_prompt": 36.559999999999995, "prompt_id": "prompt-1", "CPS": 36.559999999999995 }, "sentiment-analysis": { "prompts": [ { "prompt": "prompt-1", "metric": "f1", "value": 66.75, "stderr": null }, { "prompt": "prompt-2", "metric": "f1", "value": 68.78, "stderr": null }, { "prompt": "prompt-3", "metric": "f1", "value": 74.88, "stderr": null }, { "prompt": "prompt-4", "metric": "f1", "value": 76.59, "stderr": null }, { "prompt": "prompt-5", "metric": "f1", "value": 66.03, "stderr": null }, { "prompt": "prompt-6", "metric": "f1", "value": 69.73, "stderr": null } ], "average_accuracy": 70.46, "best_prompt": 76.59, "prompt_id": "prompt-4", "CPS": 71.895033 }, "summarization-fanpage": { "prompts": [ { "prompt": "prompt-1", "metric": "rouge1", "value": 24.21, "stderr": 0.0086 }, { "prompt": "prompt-2", "metric": "rouge1", "value": 24.310000000000002, "stderr": 0.0087 } ], "average_accuracy": 24.26, "best_prompt": 24.310000000000002, "prompt_id": "prompt-2", "CPS": 24.297845000000002 }, "text-entailment": { "prompts": [ { "prompt": "prompt-1", "metric": "acc", "value": 76, "stderr": 0.0214 }, { "prompt": "prompt-2", "metric": "acc", "value": 76, "stderr": 0.0214 }, { "prompt": "prompt-3", "metric": "acc", "value": 72, "stderr": 0.0225 }, { "prompt": "prompt-4", "metric": "acc", "value": 69.5, "stderr": 0.023 }, { "prompt": "prompt-5", "metric": "acc", "value": 67.75, "stderr": 0.0234 }, { "prompt": "prompt-6", "metric": "acc", "value": 68, "stderr": 0.0234 } ], "average_accuracy": 71.54166666666667, "best_prompt": 76, "prompt_id": "prompt-1", "CPS": 72.61166666666666 }, "word-in-context": { "prompts": [ { "prompt": "prompt-1", "metric": "f1", "value": 61.260000000000005, "stderr": 0.0256 }, { "prompt": "prompt-2", "metric": "f1", "value": 56.489999999999995, "stderr": 0.0273 }, { "prompt": "prompt-3", "metric": "f1", "value": 59.260000000000005, "stderr": 0.0264 }, { "prompt": "prompt-4", "metric": "f1", "value": 48.730000000000004, "stderr": 0.031 }, { "prompt": "prompt-5", "metric": "f1", "value": 49.89, "stderr": 0.0291 }, { "prompt": "prompt-6", "metric": "f1", "value": 50.57000000000001, "stderr": 0.0291 } ], "average_accuracy": 54.36666666666667, "best_prompt": 61.260000000000005, "prompt_id": "prompt-1", "CPS": 57.037144 } }
41.022118
{ "model_name": "DeepMount00/Llama-3-8b-Ita", "num_fewshot": "None", "batch_size": 1, "model": "DeepMount00/Llama-3-8b-Ita", "base_model": "LlamaForCausalLM", "revision": "9e5753beda3704492ae340e0321ad516a58b9ffd", "submitted_time": "2024-05-01T12:43:40", "num_params_billion": 8.030261248, "language": "it_en" }
{ "admission-test": { "prompts": [ { "prompt": "prompt-1", "metric": "acc", "value": 27.400000000000002, "stderr": 0.02 }, { "prompt": "prompt-2", "metric": "acc", "value": 26.8, "stderr": 0.0198 }, { "prompt": "prompt-3", "metric": "acc", "value": 62.2, "stderr": 0.0217 }, { "prompt": "prompt-4", "metric": "acc", "value": 63, "stderr": 0.0216 }, { "prompt": "prompt-5", "metric": "acc", "value": 33.6, "stderr": 0.0211 }, { "prompt": "prompt-6", "metric": "acc", "value": 32, "stderr": 0.0209 } ], "average_accuracy": 40.833333333333336, "best_prompt": 63, "prompt_id": "prompt-4", "CPS": 49.035 }, "faq": { "prompts": [ { "prompt": "prompt-1", "metric": "acc", "value": 29.43, "stderr": 0.0228 }, { "prompt": "prompt-2", "metric": "acc", "value": 30.17, "stderr": 0.023 }, { "prompt": "prompt-3", "metric": "acc", "value": 68.58, "stderr": 0.0232 }, { "prompt": "prompt-4", "metric": "acc", "value": 88.03, "stderr": 0.0162 }, { "prompt": "prompt-5", "metric": "acc", "value": 30.17, "stderr": 0.023 }, { "prompt": "prompt-6", "metric": "acc", "value": 30.919999999999998, "stderr": 0.0231 } ], "average_accuracy": 46.21666666666667, "best_prompt": 88.03, "prompt_id": "prompt-4", "CPS": 51.22172266666667 }, "hate-speech-detection": { "prompts": [ { "prompt": "prompt-1", "metric": "f1", "value": 0, "stderr": 0 }, { "prompt": "prompt-2", "metric": "f1", "value": 0.25, "stderr": 0.0025 }, { "prompt": "prompt-3", "metric": "f1", "value": 55.25, "stderr": 0.013 }, { "prompt": "prompt-4", "metric": "f1", "value": 62.56, "stderr": 0.0112 }, { "prompt": "prompt-5", "metric": "f1", "value": 62.78, "stderr": 0.0113 }, { "prompt": "prompt-6", "metric": "f1", "value": 63.85999999999999, "stderr": 0.0113 } ], "average_accuracy": 40.78333333333333, "best_prompt": 63.85999999999999, "prompt_id": "prompt-6", "CPS": 49.12324066666667 }, "lexical-substitution": { "prompts": [ { "prompt": "prompt-1", "metric": "f1", "value": 20, "stderr": null }, { "prompt": "prompt-2", "metric": "f1", "value": 22.93, "stderr": null } ], "average_accuracy": 21.465, "best_prompt": 22.93, "prompt_id": "prompt-2", "CPS": 22.5940755 }, "evalita NER": { "prompts": [ { "prompt": "prompt-1", "metric": "acc", "value": 26.621103974793986, "stderr": null }, { "prompt": "prompt-2", "metric": "acc", "value": 15.058277993213768, "stderr": null } ], "average_accuracy": 20.839690984003877, "best_prompt": 26.621103974793986, "prompt_id": "prompt-1", "CPS": 25.082028011303503 }, "relation-extraction": { "prompts": [ { "prompt": "prompt-1", "metric": "f1", "value": 10.11, "stderr": null }, { "prompt": "prompt-2", "metric": "f1", "value": 14.06, "stderr": null } ], "average_accuracy": 12.085, "best_prompt": 14.06, "prompt_id": "prompt-2", "CPS": 13.782315 }, "sentiment-analysis": { "prompts": [ { "prompt": "prompt-1", "metric": "f1", "value": 47.61, "stderr": null }, { "prompt": "prompt-2", "metric": "f1", "value": 52.38, "stderr": null }, { "prompt": "prompt-3", "metric": "f1", "value": 62.6, "stderr": null }, { "prompt": "prompt-4", "metric": "f1", "value": 65.58, "stderr": null }, { "prompt": "prompt-5", "metric": "f1", "value": 67.21000000000001, "stderr": null }, { "prompt": "prompt-6", "metric": "f1", "value": 66.60000000000001, "stderr": null } ], "average_accuracy": 60.330000000000005, "best_prompt": 67.21000000000001, "prompt_id": "prompt-5", "CPS": 62.585952000000006 }, "summarization-fanpage": { "prompts": [ { "prompt": "prompt-1", "metric": "rouge1", "value": 22.84, "stderr": 0.0086 }, { "prompt": "prompt-2", "metric": "rouge1", "value": 22.17, "stderr": 0.0078 } ], "average_accuracy": 22.505000000000003, "best_prompt": 22.84, "prompt_id": "prompt-1", "CPS": 22.763486 }, "text-entailment": { "prompts": [ { "prompt": "prompt-1", "metric": "acc", "value": 60, "stderr": 0.0245 }, { "prompt": "prompt-2", "metric": "acc", "value": 63, "stderr": 0.0242 }, { "prompt": "prompt-3", "metric": "acc", "value": 58.25, "stderr": 0.0247 }, { "prompt": "prompt-4", "metric": "acc", "value": 58.5, "stderr": 0.0247 }, { "prompt": "prompt-5", "metric": "acc", "value": 46.5, "stderr": 0.025 }, { "prompt": "prompt-6", "metric": "acc", "value": 49.75, "stderr": 0.025 } ], "average_accuracy": 56, "best_prompt": 63, "prompt_id": "prompt-2", "CPS": 58.589999999999996 }, "word-in-context": { "prompts": [ { "prompt": "prompt-1", "metric": "f1", "value": 32.85, "stderr": 0.0325 }, { "prompt": "prompt-2", "metric": "f1", "value": 5.319999999999999, "stderr": 0.0193 }, { "prompt": "prompt-3", "metric": "f1", "value": 66.4, "stderr": 0.02 }, { "prompt": "prompt-4", "metric": "f1", "value": 66.03, "stderr": 0.0202 }, { "prompt": "prompt-5", "metric": "f1", "value": 60.62, "stderr": 0.024 }, { "prompt": "prompt-6", "metric": "f1", "value": 67.12, "stderr": 0.02 } ], "average_accuracy": 49.723333333333336, "best_prompt": 67.12, "prompt_id": "prompt-6", "CPS": 55.44335733333333 } }
44.932195
{ "model_name": "FairMind/Llama-3-8B-4bit-UltraChat-Ita", "num_fewshot": "5", "batch_size": 1, "model": "FairMind/Llama-3-8B-4bit-UltraChat-Ita", "base_model": "LlamaForCausalLM", "revision": "473a2ee0296e1471ae635fd8977dd3b44404f0e8", "submitted_time": "2024-05-03T05:18:26", "num_params_billion": 8.030261248, "language": "it" }
{ "admission-test": { "prompts": [ { "prompt": "prompt-1", "metric": "acc", "value": 31.6, "stderr": 0.0208 }, { "prompt": "prompt-2", "metric": "acc", "value": 31.4, "stderr": 0.0208 }, { "prompt": "prompt-3", "metric": "acc", "value": 56.8, "stderr": 0.0222 }, { "prompt": "prompt-4", "metric": "acc", "value": 55.00000000000001, "stderr": 0.0223 }, { "prompt": "prompt-5", "metric": "acc", "value": 31, "stderr": 0.0207 }, { "prompt": "prompt-6", "metric": "acc", "value": 31.6, "stderr": 0.0208 } ], "average_accuracy": 39.56666666666667, "best_prompt": 56.8, "prompt_id": "prompt-3", "CPS": 47.01146666666667 }, "faq": { "prompts": [ { "prompt": "prompt-1", "metric": "acc", "value": 29.68, "stderr": 0.0228 }, { "prompt": "prompt-2", "metric": "acc", "value": 28.93, "stderr": 0.0227 }, { "prompt": "prompt-3", "metric": "acc", "value": 84.28999999999999, "stderr": 0.0182 }, { "prompt": "prompt-4", "metric": "acc", "value": 88.53, "stderr": 0.0159 }, { "prompt": "prompt-5", "metric": "acc", "value": 29.18, "stderr": 0.0227 }, { "prompt": "prompt-6", "metric": "acc", "value": 28.93, "stderr": 0.0227 } ], "average_accuracy": 48.25666666666666, "best_prompt": 88.53, "prompt_id": "prompt-4", "CPS": 52.876017999999995 }, "hate-speech-detection": { "prompts": [ { "prompt": "prompt-1", "metric": "f1", "value": 54.92, "stderr": 0.0141 }, { "prompt": "prompt-2", "metric": "f1", "value": 57.63, "stderr": 0.0136 }, { "prompt": "prompt-3", "metric": "f1", "value": 63.44, "stderr": 0.0122 }, { "prompt": "prompt-4", "metric": "f1", "value": 64.16, "stderr": 0.012 }, { "prompt": "prompt-5", "metric": "f1", "value": 60.24, "stderr": 0.0132 }, { "prompt": "prompt-6", "metric": "f1", "value": 59.8, "stderr": 0.0129 } ], "average_accuracy": 60.031666666666666, "best_prompt": 64.16, "prompt_id": "prompt-4", "CPS": 61.51126133333333 }, "lexical-substitution": { "prompts": [ { "prompt": "prompt-1", "metric": "f1", "value": 20.94, "stderr": null }, { "prompt": "prompt-2", "metric": "f1", "value": 22.67, "stderr": null } ], "average_accuracy": 21.805, "best_prompt": 22.67, "prompt_id": "prompt-2", "CPS": 22.4739045 }, "evalita NER": { "prompts": [ { "prompt": "prompt-1", "metric": "acc", "value": 28.31347188560349, "stderr": null }, { "prompt": "prompt-2", "metric": "acc", "value": 30.022985942801743, "stderr": null } ], "average_accuracy": 29.168228914202615, "best_prompt": 30.022985942801743, "prompt_id": "prompt-2", "CPS": 29.766362360260317 }, "relation-extraction": { "prompts": [ { "prompt": "prompt-1", "metric": "f1", "value": 25.27, "stderr": null }, { "prompt": "prompt-2", "metric": "f1", "value": 27.27, "stderr": null } ], "average_accuracy": 26.27, "best_prompt": 27.27, "prompt_id": "prompt-2", "CPS": 26.9973 }, "sentiment-analysis": { "prompts": [ { "prompt": "prompt-1", "metric": "f1", "value": 68.32000000000001, "stderr": null }, { "prompt": "prompt-2", "metric": "f1", "value": 67.63, "stderr": null }, { "prompt": "prompt-3", "metric": "f1", "value": 69.25, "stderr": null }, { "prompt": "prompt-4", "metric": "f1", "value": 70.56, "stderr": null }, { "prompt": "prompt-5", "metric": "f1", "value": 68.16, "stderr": null }, { "prompt": "prompt-6", "metric": "f1", "value": 69.88, "stderr": null } ], "average_accuracy": 68.96666666666665, "best_prompt": 70.56, "prompt_id": "prompt-4", "CPS": 69.435744 }, "summarization-fanpage": { "prompts": [ { "prompt": "prompt-1", "metric": "rouge1", "value": 21.45, "stderr": 0.0089 }, { "prompt": "prompt-2", "metric": "rouge1", "value": 21.05, "stderr": 0.0087 } ], "average_accuracy": 21.25, "best_prompt": 21.45, "prompt_id": "prompt-1", "CPS": 21.4071 }, "text-entailment": { "prompts": [ { "prompt": "prompt-1", "metric": "acc", "value": 71, "stderr": 0.0227 }, { "prompt": "prompt-2", "metric": "acc", "value": 70.25, "stderr": 0.0229 }, { "prompt": "prompt-3", "metric": "acc", "value": 63.24999999999999, "stderr": 0.0241 }, { "prompt": "prompt-4", "metric": "acc", "value": 64, "stderr": 0.024 }, { "prompt": "prompt-5", "metric": "acc", "value": 52, "stderr": 0.025 }, { "prompt": "prompt-6", "metric": "acc", "value": 55.25, "stderr": 0.0249 } ], "average_accuracy": 62.625, "best_prompt": 71, "prompt_id": "prompt-1", "CPS": 65.05375 }, "word-in-context": { "prompts": [ { "prompt": "prompt-1", "metric": "f1", "value": 40.39, "stderr": 0.0308 }, { "prompt": "prompt-2", "metric": "f1", "value": 42.38, "stderr": 0.0304 }, { "prompt": "prompt-3", "metric": "f1", "value": 60.88, "stderr": 0.0226 }, { "prompt": "prompt-4", "metric": "f1", "value": 60.41, "stderr": 0.024 }, { "prompt": "prompt-5", "metric": "f1", "value": 35.08, "stderr": 0.0313 }, { "prompt": "prompt-6", "metric": "f1", "value": 46.400000000000006, "stderr": 0.0297 } ], "average_accuracy": 47.589999999999996, "best_prompt": 60.88, "prompt_id": "prompt-3", "CPS": 52.789048 } }
36.282895
{ "model_name": "FairMind/Llama-3-8B-4bit-UltraChat-Ita", "num_fewshot": "None", "batch_size": 1, "model": "FairMind/Llama-3-8B-4bit-UltraChat-Ita", "base_model": "LlamaForCausalLM", "revision": "473a2ee0296e1471ae635fd8977dd3b44404f0e8", "submitted_time": "2024-05-03T05:18:26", "num_params_billion": 8.030261248, "language": "it" }
{ "admission-test": { "prompts": [ { "prompt": "prompt-1", "metric": "acc", "value": 24.4, "stderr": 0.0192 }, { "prompt": "prompt-2", "metric": "acc", "value": 23.799999999999997, "stderr": 0.0191 }, { "prompt": "prompt-3", "metric": "acc", "value": 49.2, "stderr": 0.0224 }, { "prompt": "prompt-4", "metric": "acc", "value": 47.4, "stderr": 0.0224 }, { "prompt": "prompt-5", "metric": "acc", "value": 27.800000000000004, "stderr": 0.0201 }, { "prompt": "prompt-6", "metric": "acc", "value": 27.6, "stderr": 0.02 } ], "average_accuracy": 33.36666666666667, "best_prompt": 49.2, "prompt_id": "prompt-3", "CPS": 41.410000000000004 }, "faq": { "prompts": [ { "prompt": "prompt-1", "metric": "acc", "value": 29.43, "stderr": 0.0228 }, { "prompt": "prompt-2", "metric": "acc", "value": 29.43, "stderr": 0.0228 }, { "prompt": "prompt-3", "metric": "acc", "value": 34.660000000000004, "stderr": 0.0238 }, { "prompt": "prompt-4", "metric": "acc", "value": 55.61000000000001, "stderr": 0.0248 }, { "prompt": "prompt-5", "metric": "acc", "value": 29.18, "stderr": 0.0227 }, { "prompt": "prompt-6", "metric": "acc", "value": 28.93, "stderr": 0.0227 } ], "average_accuracy": 34.540000000000006, "best_prompt": 55.61000000000001, "prompt_id": "prompt-4", "CPS": 43.892973000000005 }, "hate-speech-detection": { "prompts": [ { "prompt": "prompt-1", "metric": "f1", "value": 41.27, "stderr": 0.0162 }, { "prompt": "prompt-2", "metric": "f1", "value": 21.21, "stderr": 0.0166 }, { "prompt": "prompt-3", "metric": "f1", "value": 55.410000000000004, "stderr": 0.0128 }, { "prompt": "prompt-4", "metric": "f1", "value": 50.849999999999994, "stderr": 0.0143 }, { "prompt": "prompt-5", "metric": "f1", "value": 63.38, "stderr": 0.0113 }, { "prompt": "prompt-6", "metric": "f1", "value": 60.22, "stderr": 0.0118 } ], "average_accuracy": 48.723333333333336, "best_prompt": 63.38, "prompt_id": "prompt-5", "CPS": 54.09060466666667 }, "lexical-substitution": { "prompts": [ { "prompt": "prompt-1", "metric": "f1", "value": 0, "stderr": null }, { "prompt": "prompt-2", "metric": "f1", "value": 0, "stderr": null } ], "average_accuracy": 0, "best_prompt": 0, "prompt_id": "prompt-1", "CPS": 0 }, "evalita NER": { "prompts": [ { "prompt": "prompt-1", "metric": "acc", "value": 11.490581677169171, "stderr": null }, { "prompt": "prompt-2", "metric": "acc", "value": 15.94688317983519, "stderr": null } ], "average_accuracy": 13.71873242850218, "best_prompt": 15.94688317983519, "prompt_id": "prompt-2", "CPS": 15.591562582449495 }, "relation-extraction": { "prompts": [ { "prompt": "prompt-1", "metric": "f1", "value": 0.97, "stderr": null }, { "prompt": "prompt-2", "metric": "f1", "value": 4.2700000000000005, "stderr": null } ], "average_accuracy": 2.62, "best_prompt": 4.2700000000000005, "prompt_id": "prompt-2", "CPS": 4.1995450000000005 }, "sentiment-analysis": { "prompts": [ { "prompt": "prompt-1", "metric": "f1", "value": 37.1, "stderr": null }, { "prompt": "prompt-2", "metric": "f1", "value": 39.5, "stderr": null }, { "prompt": "prompt-3", "metric": "f1", "value": 29.26, "stderr": null }, { "prompt": "prompt-4", "metric": "f1", "value": 41.05, "stderr": null }, { "prompt": "prompt-5", "metric": "f1", "value": 67.30000000000001, "stderr": null }, { "prompt": "prompt-6", "metric": "f1", "value": 61.85000000000001, "stderr": null } ], "average_accuracy": 46.01, "best_prompt": 67.30000000000001, "prompt_id": "prompt-5", "CPS": 52.971830000000004 }, "summarization-fanpage": { "prompts": [ { "prompt": "prompt-1", "metric": "rouge1", "value": 22.85, "stderr": 0.0098 }, { "prompt": "prompt-2", "metric": "rouge1", "value": 24.349999999999998, "stderr": 0.0101 } ], "average_accuracy": 23.6, "best_prompt": 24.349999999999998, "prompt_id": "prompt-2", "CPS": 24.167375 }, "text-entailment": { "prompts": [ { "prompt": "prompt-1", "metric": "acc", "value": 56.25, "stderr": 0.0248 }, { "prompt": "prompt-2", "metric": "acc", "value": 55.00000000000001, "stderr": 0.0249 }, { "prompt": "prompt-3", "metric": "acc", "value": 55.50000000000001, "stderr": 0.0249 }, { "prompt": "prompt-4", "metric": "acc", "value": 55.00000000000001, "stderr": 0.0249 }, { "prompt": "prompt-5", "metric": "acc", "value": 62.5, "stderr": 0.0242 }, { "prompt": "prompt-6", "metric": "acc", "value": 64.25, "stderr": 0.024 } ], "average_accuracy": 58.083333333333336, "best_prompt": 64.25, "prompt_id": "prompt-6", "CPS": 60.28791666666667 }, "word-in-context": { "prompts": [ { "prompt": "prompt-1", "metric": "f1", "value": 65.13, "stderr": 0.0204 }, { "prompt": "prompt-2", "metric": "f1", "value": 65.41, "stderr": 0.0203 }, { "prompt": "prompt-3", "metric": "f1", "value": 66.21000000000001, "stderr": 0.0202 }, { "prompt": "prompt-4", "metric": "f1", "value": 66.58, "stderr": 0.0199 }, { "prompt": "prompt-5", "metric": "f1", "value": 66.3, "stderr": 0.0202 }, { "prompt": "prompt-6", "metric": "f1", "value": 66.58, "stderr": 0.0199 } ], "average_accuracy": 66.035, "best_prompt": 66.58, "prompt_id": "prompt-4", "CPS": 66.21713899999999 } }
43.730353
{ "model_name": "MoxoffSpA/Volare", "num_fewshot": "5", "batch_size": 1, "model": "MoxoffSpA/Volare", "base_model": "GemmaForCausalLM", "revision": "41c187a3af2c932a41fa5b6ef1f9ff3abd56c9e7", "submitted_time": "2024-04-15T13:54:16", "num_params_billion": null, "language": "it_en" }
{ "admission-test": { "prompts": [ { "prompt": "prompt-1", "metric": "acc", "value": 32.800000000000004, "stderr": 0.021 }, { "prompt": "prompt-2", "metric": "acc", "value": 33, "stderr": 0.021 }, { "prompt": "prompt-3", "metric": "acc", "value": 59.199999999999996, "stderr": 0.022 }, { "prompt": "prompt-4", "metric": "acc", "value": 59.199999999999996, "stderr": 0.022 }, { "prompt": "prompt-5", "metric": "acc", "value": 34, "stderr": 0.0212 }, { "prompt": "prompt-6", "metric": "acc", "value": 32.800000000000004, "stderr": 0.021 } ], "average_accuracy": 41.833333333333336, "best_prompt": 59.199999999999996, "prompt_id": "prompt-3", "CPS": 48.918933333333335 }, "faq": { "prompts": [ { "prompt": "prompt-1", "metric": "acc", "value": 28.18, "stderr": 0.0225 }, { "prompt": "prompt-2", "metric": "acc", "value": 27.93, "stderr": 0.0224 }, { "prompt": "prompt-3", "metric": "acc", "value": 61.35, "stderr": 0.0243 }, { "prompt": "prompt-4", "metric": "acc", "value": 75.06, "stderr": 0.0216 }, { "prompt": "prompt-5", "metric": "acc", "value": 28.43, "stderr": 0.0226 }, { "prompt": "prompt-6", "metric": "acc", "value": 27.18, "stderr": 0.0222 } ], "average_accuracy": 41.355000000000004, "best_prompt": 75.06, "prompt_id": "prompt-4", "CPS": 49.761027000000006 }, "hate-speech-detection": { "prompts": [ { "prompt": "prompt-1", "metric": "f1", "value": 36.730000000000004, "stderr": 0.0171 }, { "prompt": "prompt-2", "metric": "f1", "value": 37.55, "stderr": 0.0176 }, { "prompt": "prompt-3", "metric": "f1", "value": 36.53, "stderr": 0.0178 }, { "prompt": "prompt-4", "metric": "f1", "value": 54.730000000000004, "stderr": 0.0167 }, { "prompt": "prompt-5", "metric": "f1", "value": 54.120000000000005, "stderr": 0.0154 }, { "prompt": "prompt-6", "metric": "f1", "value": 52.370000000000005, "stderr": 0.0161 } ], "average_accuracy": 45.33833333333334, "best_prompt": 54.730000000000004, "prompt_id": "prompt-4", "CPS": 49.58994083333334 }, "lexical-substitution": { "prompts": [ { "prompt": "prompt-1", "metric": "f1", "value": 28.29, "stderr": null }, { "prompt": "prompt-2", "metric": "f1", "value": 26.21, "stderr": null } ], "average_accuracy": 27.25, "best_prompt": 28.29, "prompt_id": "prompt-1", "CPS": 27.995784 }, "evalita NER": { "prompts": [ { "prompt": "prompt-1", "metric": "acc", "value": 31.398051381483274, "stderr": null }, { "prompt": "prompt-2", "metric": "acc", "value": 31.574670382937477, "stderr": null } ], "average_accuracy": 31.486360882210377, "best_prompt": 31.574670382937477, "prompt_id": "prompt-2", "CPS": 31.546786949166076 }, "relation-extraction": { "prompts": [ { "prompt": "prompt-1", "metric": "f1", "value": 32.85, "stderr": null }, { "prompt": "prompt-2", "metric": "f1", "value": 29.759999999999998, "stderr": null } ], "average_accuracy": 31.305, "best_prompt": 32.85, "prompt_id": "prompt-1", "CPS": 32.342467500000005 }, "sentiment-analysis": { "prompts": [ { "prompt": "prompt-1", "metric": "f1", "value": 66.75999999999999, "stderr": null }, { "prompt": "prompt-2", "metric": "f1", "value": 66.36, "stderr": null }, { "prompt": "prompt-3", "metric": "f1", "value": 72.15, "stderr": null }, { "prompt": "prompt-4", "metric": "f1", "value": 72.33000000000001, "stderr": null }, { "prompt": "prompt-5", "metric": "f1", "value": 62.28, "stderr": null }, { "prompt": "prompt-6", "metric": "f1", "value": 61.419999999999995, "stderr": null } ], "average_accuracy": 66.88333333333334, "best_prompt": 72.33000000000001, "prompt_id": "prompt-4", "CPS": 68.390426 }, "summarization-fanpage": { "prompts": [ { "prompt": "prompt-1", "metric": "rouge1", "value": 17.23, "stderr": 0.0074 }, { "prompt": "prompt-2", "metric": "rouge1", "value": 17.5, "stderr": 0.0078 } ], "average_accuracy": 17.365000000000002, "best_prompt": 17.5, "prompt_id": "prompt-2", "CPS": 17.476375 }, "text-entailment": { "prompts": [ { "prompt": "prompt-1", "metric": "acc", "value": 69, "stderr": 0.0232 }, { "prompt": "prompt-2", "metric": "acc", "value": 71, "stderr": 0.0227 }, { "prompt": "prompt-3", "metric": "acc", "value": 68.5, "stderr": 0.0233 }, { "prompt": "prompt-4", "metric": "acc", "value": 66.5, "stderr": 0.0236 }, { "prompt": "prompt-5", "metric": "acc", "value": 59, "stderr": 0.0246 }, { "prompt": "prompt-6", "metric": "acc", "value": 56.99999999999999, "stderr": 0.0248 } ], "average_accuracy": 65.16666666666667, "best_prompt": 71, "prompt_id": "prompt-2", "CPS": 66.85833333333333 }, "word-in-context": { "prompts": [ { "prompt": "prompt-1", "metric": "f1", "value": 53.1, "stderr": 0.0285 }, { "prompt": "prompt-2", "metric": "f1", "value": 41.870000000000005, "stderr": 0.0326 }, { "prompt": "prompt-3", "metric": "f1", "value": 27.589999999999996, "stderr": 0.0329 }, { "prompt": "prompt-4", "metric": "f1", "value": 13.43, "stderr": 0.0277 }, { "prompt": "prompt-5", "metric": "f1", "value": 32.690000000000005, "stderr": 0.032 }, { "prompt": "prompt-6", "metric": "f1", "value": 51.88, "stderr": 0.0271 } ], "average_accuracy": 36.76, "best_prompt": 53.1, "prompt_id": "prompt-1", "CPS": 44.42346 } }
30.234882
{ "model_name": "MoxoffSpA/Volare", "num_fewshot": "None", "batch_size": 1, "model": "MoxoffSpA/Volare", "base_model": "GemmaForCausalLM", "revision": "41c187a3af2c932a41fa5b6ef1f9ff3abd56c9e7", "submitted_time": "2024-04-15T13:54:16", "num_params_billion": null, "language": "it_en" }
{ "admission-test": { "prompts": [ { "prompt": "prompt-1", "metric": "acc", "value": 21.2, "stderr": 0.0183 }, { "prompt": "prompt-2", "metric": "acc", "value": 19.8, "stderr": 0.0178 }, { "prompt": "prompt-3", "metric": "acc", "value": 27.400000000000002, "stderr": 0.02 }, { "prompt": "prompt-4", "metric": "acc", "value": 26.6, "stderr": 0.0198 }, { "prompt": "prompt-5", "metric": "acc", "value": 21.6, "stderr": 0.0184 }, { "prompt": "prompt-6", "metric": "acc", "value": 20, "stderr": 0.0179 } ], "average_accuracy": 22.766666666666666, "best_prompt": 27.400000000000002, "prompt_id": "prompt-3", "CPS": 26.130466666666667 }, "faq": { "prompts": [ { "prompt": "prompt-1", "metric": "acc", "value": 28.18, "stderr": 0.0225 }, { "prompt": "prompt-2", "metric": "acc", "value": 27.43, "stderr": 0.0223 }, { "prompt": "prompt-3", "metric": "acc", "value": 26.179999999999996, "stderr": 0.022 }, { "prompt": "prompt-4", "metric": "acc", "value": 26.43, "stderr": 0.022 }, { "prompt": "prompt-5", "metric": "acc", "value": 28.18, "stderr": 0.0225 }, { "prompt": "prompt-6", "metric": "acc", "value": 27.68, "stderr": 0.0224 } ], "average_accuracy": 27.346666666666668, "best_prompt": 28.18, "prompt_id": "prompt-1", "CPS": 27.94516666666667 }, "hate-speech-detection": { "prompts": [ { "prompt": "prompt-1", "metric": "f1", "value": 62.53999999999999, "stderr": 0.0124 }, { "prompt": "prompt-2", "metric": "f1", "value": 62.7, "stderr": 0.0113 }, { "prompt": "prompt-3", "metric": "f1", "value": 32.79, "stderr": 0.0161 }, { "prompt": "prompt-4", "metric": "f1", "value": 15.73, "stderr": 0.0154 }, { "prompt": "prompt-5", "metric": "f1", "value": 55.57, "stderr": 0.0141 }, { "prompt": "prompt-6", "metric": "f1", "value": 32.07, "stderr": 0.0167 } ], "average_accuracy": 43.56666666666666, "best_prompt": 62.7, "prompt_id": "prompt-2", "CPS": 50.7034 }, "lexical-substitution": { "prompts": [ { "prompt": "prompt-1", "metric": "f1", "value": 0.04, "stderr": null }, { "prompt": "prompt-2", "metric": "f1", "value": 0, "stderr": null } ], "average_accuracy": 0.02, "best_prompt": 0.04, "prompt_id": "prompt-1", "CPS": 0.039992 }, "evalita NER": { "prompts": [ { "prompt": "prompt-1", "metric": "acc", "value": 10.476448133785748, "stderr": null }, { "prompt": "prompt-2", "metric": "acc", "value": 10.781715947649054, "stderr": null } ], "average_accuracy": 10.629082040717401, "best_prompt": 10.781715947649054, "prompt_id": "prompt-2", "CPS": 10.765259393363884 }, "relation-extraction": { "prompts": [ { "prompt": "prompt-1", "metric": "f1", "value": 20.369999999999997, "stderr": null }, { "prompt": "prompt-2", "metric": "f1", "value": 11.540000000000001, "stderr": null } ], "average_accuracy": 15.954999999999998, "best_prompt": 20.369999999999997, "prompt_id": "prompt-1", "CPS": 19.470664499999998 }, "sentiment-analysis": { "prompts": [ { "prompt": "prompt-1", "metric": "f1", "value": 44.86, "stderr": null }, { "prompt": "prompt-2", "metric": "f1", "value": 49.38, "stderr": null }, { "prompt": "prompt-3", "metric": "f1", "value": 42.44, "stderr": null }, { "prompt": "prompt-4", "metric": "f1", "value": 41.94, "stderr": null }, { "prompt": "prompt-5", "metric": "f1", "value": 27.779999999999998, "stderr": null }, { "prompt": "prompt-6", "metric": "f1", "value": 58.489999999999995, "stderr": null } ], "average_accuracy": 44.14833333333333, "best_prompt": 58.489999999999995, "prompt_id": "prompt-6", "CPS": 50.10155916666666 }, "summarization-fanpage": { "prompts": [ { "prompt": "prompt-1", "metric": "rouge1", "value": 23.32, "stderr": 0.0097 }, { "prompt": "prompt-2", "metric": "rouge1", "value": 22.98, "stderr": 0.0095 } ], "average_accuracy": 23.15, "best_prompt": 23.32, "prompt_id": "prompt-1", "CPS": 23.280356 }, "text-entailment": { "prompts": [ { "prompt": "prompt-1", "metric": "acc", "value": 45.25, "stderr": 0.0249 }, { "prompt": "prompt-2", "metric": "acc", "value": 46.75, "stderr": 0.025 }, { "prompt": "prompt-3", "metric": "acc", "value": 50, "stderr": 0.025 }, { "prompt": "prompt-4", "metric": "acc", "value": 54.75, "stderr": 0.0249 }, { "prompt": "prompt-5", "metric": "acc", "value": 55.25, "stderr": 0.0249 }, { "prompt": "prompt-6", "metric": "acc", "value": 55.50000000000001, "stderr": 0.0249 } ], "average_accuracy": 51.25, "best_prompt": 55.50000000000001, "prompt_id": "prompt-6", "CPS": 53.14125 }, "word-in-context": { "prompts": [ { "prompt": "prompt-1", "metric": "f1", "value": 0, "stderr": 0 }, { "prompt": "prompt-2", "metric": "f1", "value": 0, "stderr": 0 }, { "prompt": "prompt-3", "metric": "f1", "value": 11.15, "stderr": 0.0264 }, { "prompt": "prompt-4", "metric": "f1", "value": 27.88, "stderr": 0.0324 }, { "prompt": "prompt-5", "metric": "f1", "value": 66.4, "stderr": 0.0201 }, { "prompt": "prompt-6", "metric": "f1", "value": 61.38, "stderr": 0.0218 } ], "average_accuracy": 27.801666666666666, "best_prompt": 66.4, "prompt_id": "prompt-5", "CPS": 40.77070666666666 } }
54.719077
{ "model_name": "Qwen/Qwen2.5-14B-Instruct-1M", "num_fewshot": "5", "batch_size": 1, "model": "Qwen/Qwen2.5-14B-Instruct-1M", "base_model": "Qwen2ForCausalLM", "revision": "620fad32de7bdd2293b3d99b39eba2fe63e97438", "submitted_time": "2025-01-23T13:23:24", "num_params_billion": 14.770033664, "language": "en" }
{ "admission-test": { "prompts": [ { "prompt": "prompt-1", "metric": "acc", "value": 40, "stderr": 0.0219 }, { "prompt": "prompt-2", "metric": "acc", "value": 40.2, "stderr": 0.0219 }, { "prompt": "prompt-3", "metric": "acc", "value": 85.6, "stderr": 0.0157 }, { "prompt": "prompt-4", "metric": "acc", "value": 85.39999999999999, "stderr": 0.0158 }, { "prompt": "prompt-5", "metric": "acc", "value": 40.6, "stderr": 0.022 }, { "prompt": "prompt-6", "metric": "acc", "value": 40.6, "stderr": 0.022 } ], "average_accuracy": 55.400000000000006, "best_prompt": 85.6, "prompt_id": "prompt-3", "CPS": 59.74880000000001 }, "faq": { "prompts": [ { "prompt": "prompt-1", "metric": "acc", "value": 28.68, "stderr": 0.0226 }, { "prompt": "prompt-2", "metric": "acc", "value": 28.93, "stderr": 0.0227 }, { "prompt": "prompt-3", "metric": "acc", "value": 99.25, "stderr": 0.0043 }, { "prompt": "prompt-4", "metric": "acc", "value": 99.5, "stderr": 0.0035 }, { "prompt": "prompt-5", "metric": "acc", "value": 28.93, "stderr": 0.0227 }, { "prompt": "prompt-6", "metric": "acc", "value": 28.93, "stderr": 0.0227 } ], "average_accuracy": 52.370000000000005, "best_prompt": 99.5, "prompt_id": "prompt-4", "CPS": 52.605650000000004 }, "hate-speech-detection": { "prompts": [ { "prompt": "prompt-1", "metric": "f1", "value": 64.05, "stderr": 0.0154 }, { "prompt": "prompt-2", "metric": "f1", "value": 65.62, "stderr": 0.0151 }, { "prompt": "prompt-3", "metric": "f1", "value": 75.91, "stderr": 0.0119 }, { "prompt": "prompt-4", "metric": "f1", "value": 75.96000000000001, "stderr": 0.0118 }, { "prompt": "prompt-5", "metric": "f1", "value": 74.07000000000001, "stderr": 0.0115 }, { "prompt": "prompt-6", "metric": "f1", "value": 75.17, "stderr": 0.0112 } ], "average_accuracy": 71.79666666666667, "best_prompt": 75.96000000000001, "prompt_id": "prompt-4", "CPS": 72.797532 }, "lexical-substitution": { "prompts": [ { "prompt": "prompt-1", "metric": "f1", "value": 35.24, "stderr": null }, { "prompt": "prompt-2", "metric": "f1", "value": 34.050000000000004, "stderr": null } ], "average_accuracy": 34.645, "best_prompt": 35.24, "prompt_id": "prompt-1", "CPS": 35.030322 }, "evalita NER": { "prompts": [ { "prompt": "prompt-1", "metric": "acc", "value": 35.141913475521086, "stderr": null }, { "prompt": "prompt-2", "metric": "acc", "value": 34.90836403296171, "stderr": null } ], "average_accuracy": 35.0251387542414, "best_prompt": 35.141913475521086, "prompt_id": "prompt-1", "CPS": 35.100876604007695 }, "relation-extraction": { "prompts": [ { "prompt": "prompt-1", "metric": "f1", "value": 45.56, "stderr": null }, { "prompt": "prompt-2", "metric": "f1", "value": 39.7, "stderr": null } ], "average_accuracy": 42.63, "best_prompt": 45.56, "prompt_id": "prompt-1", "CPS": 44.225092000000004 }, "sentiment-analysis": { "prompts": [ { "prompt": "prompt-1", "metric": "f1", "value": 70.8, "stderr": null }, { "prompt": "prompt-2", "metric": "f1", "value": 71.3, "stderr": null }, { "prompt": "prompt-3", "metric": "f1", "value": 78.25999999999999, "stderr": null }, { "prompt": "prompt-4", "metric": "f1", "value": 77.98, "stderr": null }, { "prompt": "prompt-5", "metric": "f1", "value": 65.93, "stderr": null }, { "prompt": "prompt-6", "metric": "f1", "value": 66.25, "stderr": null } ], "average_accuracy": 71.75333333333333, "best_prompt": 78.25999999999999, "prompt_id": "prompt-3", "CPS": 73.16788266666666 }, "summarization-fanpage": { "prompts": [ { "prompt": "prompt-1", "metric": "rouge1", "value": 24.98, "stderr": 0.0096 }, { "prompt": "prompt-2", "metric": "rouge1", "value": 26.040000000000003, "stderr": 0.0096 } ], "average_accuracy": 25.51, "best_prompt": 26.040000000000003, "prompt_id": "prompt-2", "CPS": 25.901988000000003 }, "text-entailment": { "prompts": [ { "prompt": "prompt-1", "metric": "acc", "value": 84.75, "stderr": 0.018 }, { "prompt": "prompt-2", "metric": "acc", "value": 86, "stderr": 0.0174 }, { "prompt": "prompt-3", "metric": "acc", "value": 86, "stderr": 0.0174 }, { "prompt": "prompt-4", "metric": "acc", "value": 86.5, "stderr": 0.0171 }, { "prompt": "prompt-5", "metric": "acc", "value": 83.75, "stderr": 0.0185 }, { "prompt": "prompt-6", "metric": "acc", "value": 81.75, "stderr": 0.0193 } ], "average_accuracy": 84.79166666666667, "best_prompt": 86.5, "prompt_id": "prompt-4", "CPS": 85.02229166666666 }, "word-in-context": { "prompts": [ { "prompt": "prompt-1", "metric": "f1", "value": 55.00000000000001, "stderr": 0.0316 }, { "prompt": "prompt-2", "metric": "f1", "value": 62.3, "stderr": 0.0292 }, { "prompt": "prompt-3", "metric": "f1", "value": 56.82000000000001, "stderr": 0.0317 }, { "prompt": "prompt-4", "metric": "f1", "value": 63.27, "stderr": 0.0293 }, { "prompt": "prompt-5", "metric": "f1", "value": 65.29, "stderr": 0.023 }, { "prompt": "prompt-6", "metric": "f1", "value": 67.52, "stderr": 0.0217 } ], "average_accuracy": 61.699999999999996, "best_prompt": 67.52, "prompt_id": "prompt-6", "CPS": 63.59033599999999 } }
44.361212
{ "model_name": "Qwen/Qwen2.5-14B-Instruct-1M", "num_fewshot": "None", "batch_size": 1, "model": "Qwen/Qwen2.5-14B-Instruct-1M", "base_model": "Qwen2ForCausalLM", "revision": "620fad32de7bdd2293b3d99b39eba2fe63e97438", "submitted_time": "2025-01-23T13:23:24", "num_params_billion": 14.770033664, "language": "en" }
{ "admission-test": { "prompts": [ { "prompt": "prompt-1", "metric": "acc", "value": 26.6, "stderr": 0.0198 }, { "prompt": "prompt-2", "metric": "acc", "value": 25.6, "stderr": 0.0195 }, { "prompt": "prompt-3", "metric": "acc", "value": 83.39999999999999, "stderr": 0.0167 }, { "prompt": "prompt-4", "metric": "acc", "value": 82.8, "stderr": 0.0169 }, { "prompt": "prompt-5", "metric": "acc", "value": 33.6, "stderr": 0.0211 }, { "prompt": "prompt-6", "metric": "acc", "value": 31.8, "stderr": 0.0208 } ], "average_accuracy": 47.29999999999999, "best_prompt": 83.39999999999999, "prompt_id": "prompt-3", "CPS": 53.29259999999999 }, "faq": { "prompts": [ { "prompt": "prompt-1", "metric": "acc", "value": 31.669999999999998, "stderr": 0.0233 }, { "prompt": "prompt-2", "metric": "acc", "value": 31.419999999999998, "stderr": 0.0232 }, { "prompt": "prompt-3", "metric": "acc", "value": 98.5, "stderr": 0.0061 }, { "prompt": "prompt-4", "metric": "acc", "value": 98.5, "stderr": 0.0061 }, { "prompt": "prompt-5", "metric": "acc", "value": 30.42, "stderr": 0.023 }, { "prompt": "prompt-6", "metric": "acc", "value": 31.919999999999998, "stderr": 0.0233 } ], "average_accuracy": 53.738333333333344, "best_prompt": 98.5, "prompt_id": "prompt-3", "CPS": 54.40975833333334 }, "hate-speech-detection": { "prompts": [ { "prompt": "prompt-1", "metric": "f1", "value": 19.33, "stderr": 0.0169 }, { "prompt": "prompt-2", "metric": "f1", "value": 48.22, "stderr": 0.0149 }, { "prompt": "prompt-3", "metric": "f1", "value": 71.09, "stderr": 0.0132 }, { "prompt": "prompt-4", "metric": "f1", "value": 75.11, "stderr": 0.0115 }, { "prompt": "prompt-5", "metric": "f1", "value": 53.54, "stderr": 0.0129 }, { "prompt": "prompt-6", "metric": "f1", "value": 62.41, "stderr": 0.0113 } ], "average_accuracy": 54.95000000000001, "best_prompt": 75.11, "prompt_id": "prompt-4", "CPS": 59.96782400000001 }, "lexical-substitution": { "prompts": [ { "prompt": "prompt-1", "metric": "f1", "value": 34.72, "stderr": null }, { "prompt": "prompt-2", "metric": "f1", "value": 37.230000000000004, "stderr": null } ], "average_accuracy": 35.975, "best_prompt": 37.230000000000004, "prompt_id": "prompt-2", "CPS": 36.7627635 }, "evalita NER": { "prompts": [ { "prompt": "prompt-1", "metric": "acc", "value": 4.9778259815802235, "stderr": null }, { "prompt": "prompt-2", "metric": "acc", "value": 8.284698254968493, "stderr": null } ], "average_accuracy": 6.631262118274359, "best_prompt": 8.284698254968493, "prompt_id": "prompt-2", "CPS": 8.147716060204775 }, "relation-extraction": { "prompts": [ { "prompt": "prompt-1", "metric": "f1", "value": 33.45, "stderr": null }, { "prompt": "prompt-2", "metric": "f1", "value": 32.68, "stderr": null } ], "average_accuracy": 33.065, "best_prompt": 33.45, "prompt_id": "prompt-1", "CPS": 33.3212175 }, "sentiment-analysis": { "prompts": [ { "prompt": "prompt-1", "metric": "f1", "value": 27.61, "stderr": null }, { "prompt": "prompt-2", "metric": "f1", "value": 48.53, "stderr": null }, { "prompt": "prompt-3", "metric": "f1", "value": 74.98, "stderr": null }, { "prompt": "prompt-4", "metric": "f1", "value": 72.96000000000001, "stderr": null }, { "prompt": "prompt-5", "metric": "f1", "value": 31.94, "stderr": null }, { "prompt": "prompt-6", "metric": "f1", "value": 26.52, "stderr": null } ], "average_accuracy": 47.09, "best_prompt": 74.98, "prompt_id": "prompt-3", "CPS": 54.06807800000001 }, "summarization-fanpage": { "prompts": [ { "prompt": "prompt-1", "metric": "rouge1", "value": 25.009999999999998, "stderr": 0.0086 }, { "prompt": "prompt-2", "metric": "rouge1", "value": 25.19, "stderr": 0.0081 } ], "average_accuracy": 25.1, "best_prompt": 25.19, "prompt_id": "prompt-2", "CPS": 25.167329000000002 }, "text-entailment": { "prompts": [ { "prompt": "prompt-1", "metric": "acc", "value": 60.25, "stderr": 0.0245 }, { "prompt": "prompt-2", "metric": "acc", "value": 63.74999999999999, "stderr": 0.0241 }, { "prompt": "prompt-3", "metric": "acc", "value": 83, "stderr": 0.0188 }, { "prompt": "prompt-4", "metric": "acc", "value": 86.75, "stderr": 0.017 }, { "prompt": "prompt-5", "metric": "acc", "value": 52.5, "stderr": 0.025 }, { "prompt": "prompt-6", "metric": "acc", "value": 55.00000000000001, "stderr": 0.0249 } ], "average_accuracy": 66.875, "best_prompt": 86.75, "prompt_id": "prompt-4", "CPS": 69.5084375 }, "word-in-context": { "prompts": [ { "prompt": "prompt-1", "metric": "f1", "value": 33.660000000000004, "stderr": 0.035 }, { "prompt": "prompt-2", "metric": "f1", "value": 59.8, "stderr": 0.0287 }, { "prompt": "prompt-3", "metric": "f1", "value": 10.61, "stderr": 0.0261 }, { "prompt": "prompt-4", "metric": "f1", "value": 2.37, "stderr": 0.0134 }, { "prompt": "prompt-5", "metric": "f1", "value": 66.75999999999999, "stderr": 0.0199 }, { "prompt": "prompt-6", "metric": "f1", "value": 67.2, "stderr": 0.0199 } ], "average_accuracy": 40.06666666666666, "best_prompt": 67.2, "prompt_id": "prompt-6", "CPS": 48.96639999999999 } }
52.621802
{ "model_name": "Qwen/Qwen2.5-7B-Instruct", "num_fewshot": "5", "batch_size": 1, "model": "Qwen/Qwen2.5-7B-Instruct", "base_model": "Qwen2ForCausalLM", "revision": "a09a35458c702b33eeacc393d103063234e8bc28", "submitted_time": "2024-09-16T11:55:40", "num_params_billion": 7.615616512, "language": "en" }
{ "admission-test": { "prompts": [ { "prompt": "prompt-1", "metric": "acc", "value": 32.4, "stderr": 0.021 }, { "prompt": "prompt-2", "metric": "acc", "value": 32.800000000000004, "stderr": 0.021 }, { "prompt": "prompt-3", "metric": "acc", "value": 74.4, "stderr": 0.0195 }, { "prompt": "prompt-4", "metric": "acc", "value": 75.8, "stderr": 0.0192 }, { "prompt": "prompt-5", "metric": "acc", "value": 33, "stderr": 0.021 }, { "prompt": "prompt-6", "metric": "acc", "value": 33.2, "stderr": 0.0211 } ], "average_accuracy": 46.93333333333334, "best_prompt": 75.8, "prompt_id": "prompt-4", "CPS": 53.919066666666666 }, "faq": { "prompts": [ { "prompt": "prompt-1", "metric": "acc", "value": 27.68, "stderr": 0.0224 }, { "prompt": "prompt-2", "metric": "acc", "value": 28.68, "stderr": 0.0226 }, { "prompt": "prompt-3", "metric": "acc", "value": 98.5, "stderr": 0.0061 }, { "prompt": "prompt-4", "metric": "acc", "value": 98.75, "stderr": 0.0055 }, { "prompt": "prompt-5", "metric": "acc", "value": 28.68, "stderr": 0.0226 }, { "prompt": "prompt-6", "metric": "acc", "value": 28.93, "stderr": 0.0227 } ], "average_accuracy": 51.870000000000005, "best_prompt": 98.75, "prompt_id": "prompt-4", "CPS": 52.45600000000001 }, "hate-speech-detection": { "prompts": [ { "prompt": "prompt-1", "metric": "f1", "value": 62.39, "stderr": 0.0151 }, { "prompt": "prompt-2", "metric": "f1", "value": 57.93000000000001, "stderr": 0.016 }, { "prompt": "prompt-3", "metric": "f1", "value": 72, "stderr": 0.0126 }, { "prompt": "prompt-4", "metric": "f1", "value": 72.04, "stderr": 0.0126 }, { "prompt": "prompt-5", "metric": "f1", "value": 70.67, "stderr": 0.0125 }, { "prompt": "prompt-6", "metric": "f1", "value": 67.80000000000001, "stderr": 0.0128 } ], "average_accuracy": 67.13833333333334, "best_prompt": 72.04, "prompt_id": "prompt-4", "CPS": 68.50883933333334 }, "lexical-substitution": { "prompts": [ { "prompt": "prompt-1", "metric": "f1", "value": 27.41, "stderr": null }, { "prompt": "prompt-2", "metric": "f1", "value": 28.29, "stderr": null } ], "average_accuracy": 27.85, "best_prompt": 28.29, "prompt_id": "prompt-2", "CPS": 28.165524 }, "evalita NER": { "prompts": [ { "prompt": "prompt-1", "metric": "acc", "value": 35.579737033446435, "stderr": null }, { "prompt": "prompt-2", "metric": "acc", "value": 33.31246243334949, "stderr": null } ], "average_accuracy": 34.44609973339796, "best_prompt": 35.579737033446435, "prompt_id": "prompt-1", "CPS": 35.17639186317613 }, "relation-extraction": { "prompts": [ { "prompt": "prompt-1", "metric": "f1", "value": 36.559999999999995, "stderr": null }, { "prompt": "prompt-2", "metric": "f1", "value": 36.720000000000006, "stderr": null } ], "average_accuracy": 36.64, "best_prompt": 36.720000000000006, "prompt_id": "prompt-2", "CPS": 36.69062400000001 }, "sentiment-analysis": { "prompts": [ { "prompt": "prompt-1", "metric": "f1", "value": 72.00999999999999, "stderr": null }, { "prompt": "prompt-2", "metric": "f1", "value": 73.39, "stderr": null }, { "prompt": "prompt-3", "metric": "f1", "value": 74.45, "stderr": null }, { "prompt": "prompt-4", "metric": "f1", "value": 75.35, "stderr": null }, { "prompt": "prompt-5", "metric": "f1", "value": 73.6, "stderr": null }, { "prompt": "prompt-6", "metric": "f1", "value": 73.85000000000001, "stderr": null } ], "average_accuracy": 73.77499999999999, "best_prompt": 75.35, "prompt_id": "prompt-4", "CPS": 74.1632375 }, "summarization-fanpage": { "prompts": [ { "prompt": "prompt-1", "metric": "rouge1", "value": 29.74, "stderr": 0.0104 }, { "prompt": "prompt-2", "metric": "rouge1", "value": 30.98, "stderr": 0.0104 } ], "average_accuracy": 30.36, "best_prompt": 30.98, "prompt_id": "prompt-2", "CPS": 30.787924 }, "text-entailment": { "prompts": [ { "prompt": "prompt-1", "metric": "acc", "value": 83, "stderr": 0.0188 }, { "prompt": "prompt-2", "metric": "acc", "value": 85, "stderr": 0.0179 }, { "prompt": "prompt-3", "metric": "acc", "value": 83.5, "stderr": 0.0186 }, { "prompt": "prompt-4", "metric": "acc", "value": 82.75, "stderr": 0.0189 }, { "prompt": "prompt-5", "metric": "acc", "value": 83, "stderr": 0.0188 }, { "prompt": "prompt-6", "metric": "acc", "value": 83.25, "stderr": 0.0187 } ], "average_accuracy": 83.41666666666667, "best_prompt": 85, "prompt_id": "prompt-2", "CPS": 83.65416666666667 }, "word-in-context": { "prompts": [ { "prompt": "prompt-1", "metric": "f1", "value": 64.02, "stderr": 0.0264 }, { "prompt": "prompt-2", "metric": "f1", "value": 60.589999999999996, "stderr": 0.0287 }, { "prompt": "prompt-3", "metric": "f1", "value": 52.35999999999999, "stderr": 0.0311 }, { "prompt": "prompt-4", "metric": "f1", "value": 56.46, "stderr": 0.0305 }, { "prompt": "prompt-5", "metric": "f1", "value": 61.370000000000005, "stderr": 0.027 }, { "prompt": "prompt-6", "metric": "f1", "value": 67.5, "stderr": 0.0228 } ], "average_accuracy": 60.383333333333326, "best_prompt": 67.5, "prompt_id": "prompt-6", "CPS": 62.69625 } }
45.497945
{ "model_name": "Qwen/Qwen2.5-7B-Instruct", "num_fewshot": "None", "batch_size": 1, "model": "Qwen/Qwen2.5-7B-Instruct", "base_model": "Qwen2ForCausalLM", "revision": "a09a35458c702b33eeacc393d103063234e8bc28", "submitted_time": "2024-09-16T11:55:40", "num_params_billion": 7.615616512, "language": "en" }
{ "admission-test": { "prompts": [ { "prompt": "prompt-1", "metric": "acc", "value": 24.8, "stderr": 0.0193 }, { "prompt": "prompt-2", "metric": "acc", "value": 24.4, "stderr": 0.0192 }, { "prompt": "prompt-3", "metric": "acc", "value": 70.39999999999999, "stderr": 0.0204 }, { "prompt": "prompt-4", "metric": "acc", "value": 70, "stderr": 0.0205 }, { "prompt": "prompt-5", "metric": "acc", "value": 28.000000000000004, "stderr": 0.0201 }, { "prompt": "prompt-6", "metric": "acc", "value": 28.599999999999998, "stderr": 0.0202 } ], "average_accuracy": 41.03333333333333, "best_prompt": 70.39999999999999, "prompt_id": "prompt-3", "CPS": 49.72586666666667 }, "faq": { "prompts": [ { "prompt": "prompt-1", "metric": "acc", "value": 29.93, "stderr": 0.0229 }, { "prompt": "prompt-2", "metric": "acc", "value": 30.17, "stderr": 0.023 }, { "prompt": "prompt-3", "metric": "acc", "value": 93.02, "stderr": 0.0127 }, { "prompt": "prompt-4", "metric": "acc", "value": 97.26, "stderr": 0.0082 }, { "prompt": "prompt-5", "metric": "acc", "value": 29.18, "stderr": 0.0227 }, { "prompt": "prompt-6", "metric": "acc", "value": 29.93, "stderr": 0.0229 } ], "average_accuracy": 51.58166666666667, "best_prompt": 97.26, "prompt_id": "prompt-4", "CPS": 52.833253000000006 }, "hate-speech-detection": { "prompts": [ { "prompt": "prompt-1", "metric": "f1", "value": 64.67, "stderr": 0.0121 }, { "prompt": "prompt-2", "metric": "f1", "value": 61.46, "stderr": 0.0116 }, { "prompt": "prompt-3", "metric": "f1", "value": 48.63, "stderr": 0.0169 }, { "prompt": "prompt-4", "metric": "f1", "value": 72.54, "stderr": 0.0115 }, { "prompt": "prompt-5", "metric": "f1", "value": 63.22, "stderr": 0.0119 }, { "prompt": "prompt-6", "metric": "f1", "value": 58.74, "stderr": 0.0128 } ], "average_accuracy": 61.54333333333333, "best_prompt": 72.54, "prompt_id": "prompt-4", "CPS": 64.563018 }, "lexical-substitution": { "prompts": [ { "prompt": "prompt-1", "metric": "f1", "value": 31.630000000000003, "stderr": null }, { "prompt": "prompt-2", "metric": "f1", "value": 33.22, "stderr": null } ], "average_accuracy": 32.425, "best_prompt": 33.22, "prompt_id": "prompt-2", "CPS": 32.955901 }, "evalita NER": { "prompts": [ { "prompt": "prompt-1", "metric": "acc", "value": 4.627015269025691, "stderr": null }, { "prompt": "prompt-2", "metric": "acc", "value": 15.85720673776054, "stderr": null } ], "average_accuracy": 10.242111003393116, "best_prompt": 15.85720673776054, "prompt_id": "prompt-2", "CPS": 14.966809398638723 }, "relation-extraction": { "prompts": [ { "prompt": "prompt-1", "metric": "f1", "value": 36.35, "stderr": null }, { "prompt": "prompt-2", "metric": "f1", "value": 35.86, "stderr": null } ], "average_accuracy": 36.105000000000004, "best_prompt": 36.35, "prompt_id": "prompt-1", "CPS": 36.260942500000006 }, "sentiment-analysis": { "prompts": [ { "prompt": "prompt-1", "metric": "f1", "value": 43.15, "stderr": null }, { "prompt": "prompt-2", "metric": "f1", "value": 41.42, "stderr": null }, { "prompt": "prompt-3", "metric": "f1", "value": 61.35, "stderr": null }, { "prompt": "prompt-4", "metric": "f1", "value": 70.43, "stderr": null }, { "prompt": "prompt-5", "metric": "f1", "value": 57.28, "stderr": null }, { "prompt": "prompt-6", "metric": "f1", "value": 36.09, "stderr": null } ], "average_accuracy": 51.620000000000005, "best_prompt": 70.43, "prompt_id": "prompt-4", "CPS": 57.182117000000005 }, "summarization-fanpage": { "prompts": [ { "prompt": "prompt-1", "metric": "rouge1", "value": 24.25, "stderr": 0.0083 }, { "prompt": "prompt-2", "metric": "rouge1", "value": 25.56, "stderr": 0.0089 } ], "average_accuracy": 24.905, "best_prompt": 25.56, "prompt_id": "prompt-2", "CPS": 25.392582 }, "text-entailment": { "prompts": [ { "prompt": "prompt-1", "metric": "acc", "value": 82.25, "stderr": 0.0191 }, { "prompt": "prompt-2", "metric": "acc", "value": 82.25, "stderr": 0.0191 }, { "prompt": "prompt-3", "metric": "acc", "value": 81.75, "stderr": 0.0193 }, { "prompt": "prompt-4", "metric": "acc", "value": 82.5, "stderr": 0.019 }, { "prompt": "prompt-5", "metric": "acc", "value": 57.75, "stderr": 0.0247 }, { "prompt": "prompt-6", "metric": "acc", "value": 55.50000000000001, "stderr": 0.0249 } ], "average_accuracy": 73.66666666666667, "best_prompt": 82.5, "prompt_id": "prompt-4", "CPS": 75.2125 }, "word-in-context": { "prompts": [ { "prompt": "prompt-1", "metric": "f1", "value": 47.31, "stderr": 0.032 }, { "prompt": "prompt-2", "metric": "f1", "value": 31.019999999999996, "stderr": 0.0345 }, { "prompt": "prompt-3", "metric": "f1", "value": 6.950000000000001, "stderr": 0.022 }, { "prompt": "prompt-4", "metric": "f1", "value": 7.6899999999999995, "stderr": 0.0229 }, { "prompt": "prompt-5", "metric": "f1", "value": 67.11, "stderr": 0.0199 }, { "prompt": "prompt-6", "metric": "f1", "value": 52.83, "stderr": 0.0262 } ], "average_accuracy": 35.48499999999999, "best_prompt": 67.11, "prompt_id": "prompt-5", "CPS": 45.88646249999999 } }
51.712406
{ "model_name": "arcee-ai/Llama-3.1-SuperNova-Lite", "num_fewshot": "5", "batch_size": 1, "model": "arcee-ai/Llama-3.1-SuperNova-Lite", "base_model": "LlamaForCausalLM", "revision": "2469efed9632c061cb4082755ead1109cd2a90cd", "submitted_time": "2024-09-10T14:56:49", "num_params_billion": 8.030261248, "language": "en" }
{ "admission-test": { "prompts": [ { "prompt": "prompt-1", "metric": "acc", "value": 38.4, "stderr": 0.0218 }, { "prompt": "prompt-2", "metric": "acc", "value": 39.2, "stderr": 0.0219 }, { "prompt": "prompt-3", "metric": "acc", "value": 68.60000000000001, "stderr": 0.0208 }, { "prompt": "prompt-4", "metric": "acc", "value": 69.19999999999999, "stderr": 0.0207 }, { "prompt": "prompt-5", "metric": "acc", "value": 37.2, "stderr": 0.0216 }, { "prompt": "prompt-6", "metric": "acc", "value": 38, "stderr": 0.0217 } ], "average_accuracy": 48.43333333333333, "best_prompt": 69.19999999999999, "prompt_id": "prompt-4", "CPS": 54.82946666666667 }, "faq": { "prompts": [ { "prompt": "prompt-1", "metric": "acc", "value": 30.919999999999998, "stderr": 0.0231 }, { "prompt": "prompt-2", "metric": "acc", "value": 31.169999999999998, "stderr": 0.0232 }, { "prompt": "prompt-3", "metric": "acc", "value": 91.52, "stderr": 0.0139 }, { "prompt": "prompt-4", "metric": "acc", "value": 91.02, "stderr": 0.0143 }, { "prompt": "prompt-5", "metric": "acc", "value": 30.669999999999998, "stderr": 0.0231 }, { "prompt": "prompt-6", "metric": "acc", "value": 30.669999999999998, "stderr": 0.0231 } ], "average_accuracy": 50.995000000000005, "best_prompt": 91.52, "prompt_id": "prompt-3", "CPS": 54.431520000000006 }, "hate-speech-detection": { "prompts": [ { "prompt": "prompt-1", "metric": "f1", "value": 59.650000000000006, "stderr": 0.0152 }, { "prompt": "prompt-2", "metric": "f1", "value": 64.34, "stderr": 0.0146 }, { "prompt": "prompt-3", "metric": "f1", "value": 71.7, "stderr": 0.012 }, { "prompt": "prompt-4", "metric": "f1", "value": 73.92999999999999, "stderr": 0.012 }, { "prompt": "prompt-5", "metric": "f1", "value": 68.26, "stderr": 0.0122 }, { "prompt": "prompt-6", "metric": "f1", "value": 67.63, "stderr": 0.0127 } ], "average_accuracy": 67.585, "best_prompt": 73.92999999999999, "prompt_id": "prompt-4", "CPS": 69.23914149999999 }, "lexical-substitution": { "prompts": [ { "prompt": "prompt-1", "metric": "f1", "value": 28.98, "stderr": null }, { "prompt": "prompt-2", "metric": "f1", "value": 29.59, "stderr": null } ], "average_accuracy": 29.285, "best_prompt": 29.59, "prompt_id": "prompt-2", "CPS": 29.4997505 }, "evalita NER": { "prompts": [ { "prompt": "prompt-1", "metric": "acc", "value": 36.14765996122152, "stderr": null }, { "prompt": "prompt-2", "metric": "acc", "value": 40.33784900630151, "stderr": null } ], "average_accuracy": 38.242754483761516, "best_prompt": 40.33784900630151, "prompt_id": "prompt-2", "CPS": 39.49273294126003 }, "relation-extraction": { "prompts": [ { "prompt": "prompt-1", "metric": "f1", "value": 36.21, "stderr": null }, { "prompt": "prompt-2", "metric": "f1", "value": 35.14, "stderr": null } ], "average_accuracy": 35.675, "best_prompt": 36.21, "prompt_id": "prompt-1", "CPS": 36.0162765 }, "sentiment-analysis": { "prompts": [ { "prompt": "prompt-1", "metric": "f1", "value": 70.27, "stderr": null }, { "prompt": "prompt-2", "metric": "f1", "value": 71.17999999999999, "stderr": null }, { "prompt": "prompt-3", "metric": "f1", "value": 78, "stderr": null }, { "prompt": "prompt-4", "metric": "f1", "value": 79.03, "stderr": null }, { "prompt": "prompt-5", "metric": "f1", "value": 72.26, "stderr": null }, { "prompt": "prompt-6", "metric": "f1", "value": 74.63, "stderr": null } ], "average_accuracy": 74.22833333333334, "best_prompt": 79.03, "prompt_id": "prompt-4", "CPS": 75.23524283333334 }, "summarization-fanpage": { "prompts": [ { "prompt": "prompt-1", "metric": "rouge1", "value": 21.11, "stderr": 0.0083 }, { "prompt": "prompt-2", "metric": "rouge1", "value": 21.66, "stderr": 0.0079 } ], "average_accuracy": 21.384999999999998, "best_prompt": 21.66, "prompt_id": "prompt-2", "CPS": 21.600435 }, "text-entailment": { "prompts": [ { "prompt": "prompt-1", "metric": "acc", "value": 74.25, "stderr": 0.0219 }, { "prompt": "prompt-2", "metric": "acc", "value": 75.5, "stderr": 0.0215 }, { "prompt": "prompt-3", "metric": "acc", "value": 76, "stderr": 0.0214 }, { "prompt": "prompt-4", "metric": "acc", "value": 75.25, "stderr": 0.0216 }, { "prompt": "prompt-5", "metric": "acc", "value": 69.5, "stderr": 0.023 }, { "prompt": "prompt-6", "metric": "acc", "value": 71.25, "stderr": 0.0227 } ], "average_accuracy": 73.625, "best_prompt": 76, "prompt_id": "prompt-3", "CPS": 74.195 }, "word-in-context": { "prompts": [ { "prompt": "prompt-1", "metric": "f1", "value": 60.84, "stderr": 0.0242 }, { "prompt": "prompt-2", "metric": "f1", "value": 61.59, "stderr": 0.0236 }, { "prompt": "prompt-3", "metric": "f1", "value": 58.81999999999999, "stderr": 0.0255 }, { "prompt": "prompt-4", "metric": "f1", "value": 60.11, "stderr": 0.0246 }, { "prompt": "prompt-5", "metric": "f1", "value": 61.89, "stderr": 0.0239 }, { "prompt": "prompt-6", "metric": "f1", "value": 64.86, "stderr": 0.0229 } ], "average_accuracy": 61.35166666666667, "best_prompt": 64.86, "prompt_id": "prompt-6", "CPS": 62.584495 } }
42.659156
{ "model_name": "arcee-ai/Llama-3.1-SuperNova-Lite", "num_fewshot": "None", "batch_size": 1, "model": "arcee-ai/Llama-3.1-SuperNova-Lite", "base_model": "LlamaForCausalLM", "revision": "2469efed9632c061cb4082755ead1109cd2a90cd", "submitted_time": "2024-09-10T14:56:49", "num_params_billion": 8.030261248, "language": "en" }
{ "admission-test": { "prompts": [ { "prompt": "prompt-1", "metric": "acc", "value": 27, "stderr": 0.0199 }, { "prompt": "prompt-2", "metric": "acc", "value": 26.200000000000003, "stderr": 0.0197 }, { "prompt": "prompt-3", "metric": "acc", "value": 68.60000000000001, "stderr": 0.0208 }, { "prompt": "prompt-4", "metric": "acc", "value": 67.4, "stderr": 0.021 }, { "prompt": "prompt-5", "metric": "acc", "value": 36.8, "stderr": 0.0216 }, { "prompt": "prompt-6", "metric": "acc", "value": 35.8, "stderr": 0.0215 } ], "average_accuracy": 43.63333333333333, "best_prompt": 68.60000000000001, "prompt_id": "prompt-3", "CPS": 51.47286666666667 }, "faq": { "prompts": [ { "prompt": "prompt-1", "metric": "acc", "value": 31.669999999999998, "stderr": 0.0233 }, { "prompt": "prompt-2", "metric": "acc", "value": 31.169999999999998, "stderr": 0.0232 }, { "prompt": "prompt-3", "metric": "acc", "value": 89.78, "stderr": 0.0151 }, { "prompt": "prompt-4", "metric": "acc", "value": 90.52, "stderr": 0.0146 }, { "prompt": "prompt-5", "metric": "acc", "value": 31.669999999999998, "stderr": 0.0233 }, { "prompt": "prompt-6", "metric": "acc", "value": 32.17, "stderr": 0.0234 } ], "average_accuracy": 51.163333333333334, "best_prompt": 90.52, "prompt_id": "prompt-4", "CPS": 54.894345333333334 }, "hate-speech-detection": { "prompts": [ { "prompt": "prompt-1", "metric": "f1", "value": 30.73, "stderr": 0.0177 }, { "prompt": "prompt-2", "metric": "f1", "value": 48.1, "stderr": 0.0159 }, { "prompt": "prompt-3", "metric": "f1", "value": 49.89, "stderr": 0.0143 }, { "prompt": "prompt-4", "metric": "f1", "value": 57.769999999999996, "stderr": 0.0149 }, { "prompt": "prompt-5", "metric": "f1", "value": 63.800000000000004, "stderr": 0.0114 }, { "prompt": "prompt-6", "metric": "f1", "value": 62.970000000000006, "stderr": 0.0112 } ], "average_accuracy": 52.21000000000001, "best_prompt": 63.800000000000004, "prompt_id": "prompt-5", "CPS": 56.40558 }, "lexical-substitution": { "prompts": [ { "prompt": "prompt-1", "metric": "f1", "value": 19.8, "stderr": null }, { "prompt": "prompt-2", "metric": "f1", "value": 24.29, "stderr": null } ], "average_accuracy": 22.045, "best_prompt": 24.29, "prompt_id": "prompt-2", "CPS": 23.7446895 }, "evalita NER": { "prompts": [ { "prompt": "prompt-1", "metric": "acc", "value": 7.615676199709162, "stderr": null }, { "prompt": "prompt-2", "metric": "acc", "value": 10.105801017935047, "stderr": null } ], "average_accuracy": 8.860738608822103, "best_prompt": 10.105801017935047, "prompt_id": "prompt-2", "CPS": 9.979977488320985 }, "relation-extraction": { "prompts": [ { "prompt": "prompt-1", "metric": "f1", "value": 15.58, "stderr": null }, { "prompt": "prompt-2", "metric": "f1", "value": 20.06, "stderr": null } ], "average_accuracy": 17.82, "best_prompt": 20.06, "prompt_id": "prompt-2", "CPS": 19.610656 }, "sentiment-analysis": { "prompts": [ { "prompt": "prompt-1", "metric": "f1", "value": 50.36000000000001, "stderr": null }, { "prompt": "prompt-2", "metric": "f1", "value": 46.96, "stderr": null }, { "prompt": "prompt-3", "metric": "f1", "value": 69.5, "stderr": null }, { "prompt": "prompt-4", "metric": "f1", "value": 77.12, "stderr": null }, { "prompt": "prompt-5", "metric": "f1", "value": 72.34, "stderr": null }, { "prompt": "prompt-6", "metric": "f1", "value": 66.52, "stderr": null } ], "average_accuracy": 63.79999999999999, "best_prompt": 77.12, "prompt_id": "prompt-4", "CPS": 66.84761599999999 }, "summarization-fanpage": { "prompts": [ { "prompt": "prompt-1", "metric": "rouge1", "value": 22.71, "stderr": 0.0083 }, { "prompt": "prompt-2", "metric": "rouge1", "value": 22.78, "stderr": 0.0079 } ], "average_accuracy": 22.745, "best_prompt": 22.78, "prompt_id": "prompt-2", "CPS": 22.772027 }, "text-entailment": { "prompts": [ { "prompt": "prompt-1", "metric": "acc", "value": 72.25, "stderr": 0.0224 }, { "prompt": "prompt-2", "metric": "acc", "value": 69.75, "stderr": 0.023 }, { "prompt": "prompt-3", "metric": "acc", "value": 74, "stderr": 0.022 }, { "prompt": "prompt-4", "metric": "acc", "value": 74.5, "stderr": 0.0218 }, { "prompt": "prompt-5", "metric": "acc", "value": 58.75, "stderr": 0.0246 }, { "prompt": "prompt-6", "metric": "acc", "value": 62, "stderr": 0.0243 } ], "average_accuracy": 68.54166666666667, "best_prompt": 74.5, "prompt_id": "prompt-4", "CPS": 70.06104166666667 }, "word-in-context": { "prompts": [ { "prompt": "prompt-1", "metric": "f1", "value": 43.059999999999995, "stderr": 0.0298 }, { "prompt": "prompt-2", "metric": "f1", "value": 23.23, "stderr": 0.0318 }, { "prompt": "prompt-3", "metric": "f1", "value": 54.339999999999996, "stderr": 0.0265 }, { "prompt": "prompt-4", "metric": "f1", "value": 4.6, "stderr": 0.018 }, { "prompt": "prompt-5", "metric": "f1", "value": 65.49000000000001, "stderr": 0.0204 }, { "prompt": "prompt-6", "metric": "f1", "value": 66.58, "stderr": 0.02 } ], "average_accuracy": 42.88333333333333, "best_prompt": 66.58, "prompt_id": "prompt-6", "CPS": 50.802759333333334 } }
53.639798
{ "model_name": "google/gemma-2-9b-it", "num_fewshot": "5", "batch_size": 1, "model": "google/gemma-2-9b-it", "base_model": "Gemma2ForCausalLM", "revision": "11c9b309abf73637e4b6f9a3fa1e92e615547819", "submitted_time": "2024-06-24T08:05:41", "num_params_billion": 9.241705984, "language": "" }
{ "admission-test": { "prompts": [ { "prompt": "prompt-1", "metric": "acc", "value": 44.4, "stderr": 0.0222 }, { "prompt": "prompt-2", "metric": "acc", "value": 44.2, "stderr": 0.0222 }, { "prompt": "prompt-3", "metric": "acc", "value": 75.8, "stderr": 0.0192 }, { "prompt": "prompt-4", "metric": "acc", "value": 75.8, "stderr": 0.0192 }, { "prompt": "prompt-5", "metric": "acc", "value": 44.800000000000004, "stderr": 0.0223 }, { "prompt": "prompt-6", "metric": "acc", "value": 44.4, "stderr": 0.0222 } ], "average_accuracy": 54.9, "best_prompt": 75.8, "prompt_id": "prompt-3", "CPS": 59.9578 }, "faq": { "prompts": [ { "prompt": "prompt-1", "metric": "acc", "value": 27.18, "stderr": 0.0222 }, { "prompt": "prompt-2", "metric": "acc", "value": 27.43, "stderr": 0.0223 }, { "prompt": "prompt-3", "metric": "acc", "value": 98.75, "stderr": 0.0055 }, { "prompt": "prompt-4", "metric": "acc", "value": 98.25, "stderr": 0.0065 }, { "prompt": "prompt-5", "metric": "acc", "value": 27.43, "stderr": 0.0223 }, { "prompt": "prompt-6", "metric": "acc", "value": 27.93, "stderr": 0.0224 } ], "average_accuracy": 51.16166666666667, "best_prompt": 98.75, "prompt_id": "prompt-3", "CPS": 51.75652083333333 }, "hate-speech-detection": { "prompts": [ { "prompt": "prompt-1", "metric": "f1", "value": 71.12, "stderr": 0.0126 }, { "prompt": "prompt-2", "metric": "f1", "value": 70.48, "stderr": 0.0127 }, { "prompt": "prompt-3", "metric": "f1", "value": 76.03, "stderr": 0.0107 }, { "prompt": "prompt-4", "metric": "f1", "value": 76, "stderr": 0.0107 }, { "prompt": "prompt-5", "metric": "f1", "value": 65.47, "stderr": 0.0131 }, { "prompt": "prompt-6", "metric": "f1", "value": 65.18, "stderr": 0.0128 } ], "average_accuracy": 70.71333333333334, "best_prompt": 76.03, "prompt_id": "prompt-3", "CPS": 71.98773833333334 }, "lexical-substitution": { "prompts": [ { "prompt": "prompt-1", "metric": "f1", "value": 21.46, "stderr": null }, { "prompt": "prompt-2", "metric": "f1", "value": 22.470000000000002, "stderr": null } ], "average_accuracy": 21.965000000000003, "best_prompt": 22.470000000000002, "prompt_id": "prompt-2", "CPS": 22.3565265 }, "evalita NER": { "prompts": [ { "prompt": "prompt-1", "metric": "acc", "value": 38.09163717886573, "stderr": null }, { "prompt": "prompt-2", "metric": "acc", "value": 37.75306713523993, "stderr": null } ], "average_accuracy": 37.92235215705283, "best_prompt": 38.09163717886573, "prompt_id": "prompt-1", "CPS": 38.027153742558596 }, "relation-extraction": { "prompts": [ { "prompt": "prompt-1", "metric": "f1", "value": 51.559999999999995, "stderr": null }, { "prompt": "prompt-2", "metric": "f1", "value": 50.41, "stderr": null } ], "average_accuracy": 50.985, "best_prompt": 51.559999999999995, "prompt_id": "prompt-1", "CPS": 51.263529999999996 }, "sentiment-analysis": { "prompts": [ { "prompt": "prompt-1", "metric": "f1", "value": 73.89, "stderr": null }, { "prompt": "prompt-2", "metric": "f1", "value": 73.69, "stderr": null }, { "prompt": "prompt-3", "metric": "f1", "value": 71.39999999999999, "stderr": null }, { "prompt": "prompt-4", "metric": "f1", "value": 71.61, "stderr": null }, { "prompt": "prompt-5", "metric": "f1", "value": 72.11, "stderr": null }, { "prompt": "prompt-6", "metric": "f1", "value": 72.23, "stderr": null } ], "average_accuracy": 72.48833333333333, "best_prompt": 73.89, "prompt_id": "prompt-1", "CPS": 72.8543085 }, "summarization-fanpage": { "prompts": [ { "prompt": "prompt-1", "metric": "rouge1", "value": 31.019999999999996, "stderr": 0.0135 }, { "prompt": "prompt-2", "metric": "rouge1", "value": 27.229999999999997, "stderr": 0.0119 } ], "average_accuracy": 29.124999999999996, "best_prompt": 31.019999999999996, "prompt_id": "prompt-1", "CPS": 30.432170999999997 }, "text-entailment": { "prompts": [ { "prompt": "prompt-1", "metric": "acc", "value": 80.75, "stderr": 0.0197 }, { "prompt": "prompt-2", "metric": "acc", "value": 81.5, "stderr": 0.0194 }, { "prompt": "prompt-3", "metric": "acc", "value": 80.75, "stderr": 0.0197 }, { "prompt": "prompt-4", "metric": "acc", "value": 81.5, "stderr": 0.0194 }, { "prompt": "prompt-5", "metric": "acc", "value": 76.5, "stderr": 0.0212 }, { "prompt": "prompt-6", "metric": "acc", "value": 76.75, "stderr": 0.0211 } ], "average_accuracy": 79.625, "best_prompt": 81.5, "prompt_id": "prompt-2", "CPS": 79.971875 }, "word-in-context": { "prompts": [ { "prompt": "prompt-1", "metric": "f1", "value": 55.269999999999996, "stderr": 0.029 }, { "prompt": "prompt-2", "metric": "f1", "value": 49.74, "stderr": 0.0315 }, { "prompt": "prompt-3", "metric": "f1", "value": 61.17, "stderr": 0.0268 }, { "prompt": "prompt-4", "metric": "f1", "value": 48.75, "stderr": 0.0323 }, { "prompt": "prompt-5", "metric": "f1", "value": 59.870000000000005, "stderr": 0.0236 }, { "prompt": "prompt-6", "metric": "f1", "value": 59.07, "stderr": 0.0235 } ], "average_accuracy": 55.645, "best_prompt": 61.17, "prompt_id": "prompt-3", "CPS": 57.7903575 } }
47.540894
{ "model_name": "google/gemma-2-9b-it", "num_fewshot": "None", "batch_size": 1, "model": "google/gemma-2-9b-it", "base_model": "Gemma2ForCausalLM", "revision": "11c9b309abf73637e4b6f9a3fa1e92e615547819", "submitted_time": "2024-06-24T08:05:41", "num_params_billion": 9.241705984, "language": "" }
{ "admission-test": { "prompts": [ { "prompt": "prompt-1", "metric": "acc", "value": 38.6, "stderr": 0.0218 }, { "prompt": "prompt-2", "metric": "acc", "value": 37.2, "stderr": 0.0216 }, { "prompt": "prompt-3", "metric": "acc", "value": 77.60000000000001, "stderr": 0.0187 }, { "prompt": "prompt-4", "metric": "acc", "value": 77.8, "stderr": 0.0186 }, { "prompt": "prompt-5", "metric": "acc", "value": 41.6, "stderr": 0.0221 }, { "prompt": "prompt-6", "metric": "acc", "value": 40.8, "stderr": 0.022 } ], "average_accuracy": 52.26666666666668, "best_prompt": 77.8, "prompt_id": "prompt-4", "CPS": 57.93506666666668 }, "faq": { "prompts": [ { "prompt": "prompt-1", "metric": "acc", "value": 28.93, "stderr": 0.0227 }, { "prompt": "prompt-2", "metric": "acc", "value": 28.43, "stderr": 0.0226 }, { "prompt": "prompt-3", "metric": "acc", "value": 91.27, "stderr": 0.0141 }, { "prompt": "prompt-4", "metric": "acc", "value": 98, "stderr": 0.007 }, { "prompt": "prompt-5", "metric": "acc", "value": 28.18, "stderr": 0.0225 }, { "prompt": "prompt-6", "metric": "acc", "value": 28.68, "stderr": 0.0226 } ], "average_accuracy": 50.58166666666667, "best_prompt": 98, "prompt_id": "prompt-4", "CPS": 51.530033333333336 }, "hate-speech-detection": { "prompts": [ { "prompt": "prompt-1", "metric": "f1", "value": 58.02, "stderr": 0.0128 }, { "prompt": "prompt-2", "metric": "f1", "value": 49.51, "stderr": 0.0142 }, { "prompt": "prompt-3", "metric": "f1", "value": 68.69, "stderr": 0.0114 }, { "prompt": "prompt-4", "metric": "f1", "value": 69.42, "stderr": 0.0111 }, { "prompt": "prompt-5", "metric": "f1", "value": 63.349999999999994, "stderr": 0.0112 }, { "prompt": "prompt-6", "metric": "f1", "value": 62.56, "stderr": 0.0114 } ], "average_accuracy": 61.925000000000004, "best_prompt": 69.42, "prompt_id": "prompt-4", "CPS": 64.216971 }, "lexical-substitution": { "prompts": [ { "prompt": "prompt-1", "metric": "f1", "value": 25.080000000000002, "stderr": null }, { "prompt": "prompt-2", "metric": "f1", "value": 25.5, "stderr": null } ], "average_accuracy": 25.29, "best_prompt": 25.5, "prompt_id": "prompt-2", "CPS": 25.44645 }, "evalita NER": { "prompts": [ { "prompt": "prompt-1", "metric": "acc", "value": 32.195804653417355, "stderr": null }, { "prompt": "prompt-2", "metric": "acc", "value": 31.070626514784294, "stderr": null } ], "average_accuracy": 31.633215584100824, "best_prompt": 32.195804653417355, "prompt_id": "prompt-1", "CPS": 32.014674575658724 }, "relation-extraction": { "prompts": [ { "prompt": "prompt-1", "metric": "f1", "value": 35.19, "stderr": null }, { "prompt": "prompt-2", "metric": "f1", "value": 33.92, "stderr": null } ], "average_accuracy": 34.555, "best_prompt": 35.19, "prompt_id": "prompt-1", "CPS": 34.9665435 }, "sentiment-analysis": { "prompts": [ { "prompt": "prompt-1", "metric": "f1", "value": 40.489999999999995, "stderr": null }, { "prompt": "prompt-2", "metric": "f1", "value": 59.24, "stderr": null }, { "prompt": "prompt-3", "metric": "f1", "value": 65.64999999999999, "stderr": null }, { "prompt": "prompt-4", "metric": "f1", "value": 70.63000000000001, "stderr": null }, { "prompt": "prompt-5", "metric": "f1", "value": 65.82000000000001, "stderr": null }, { "prompt": "prompt-6", "metric": "f1", "value": 68.61, "stderr": null } ], "average_accuracy": 61.74, "best_prompt": 70.63000000000001, "prompt_id": "prompt-4", "CPS": 64.350993 }, "summarization-fanpage": { "prompts": [ { "prompt": "prompt-1", "metric": "rouge1", "value": 29.54, "stderr": 0.0101 }, { "prompt": "prompt-2", "metric": "rouge1", "value": 30.65, "stderr": 0.0096 } ], "average_accuracy": 30.095, "best_prompt": 30.65, "prompt_id": "prompt-2", "CPS": 30.479892500000002 }, "text-entailment": { "prompts": [ { "prompt": "prompt-1", "metric": "acc", "value": 75.5, "stderr": 0.0215 }, { "prompt": "prompt-2", "metric": "acc", "value": 79, "stderr": 0.0204 }, { "prompt": "prompt-3", "metric": "acc", "value": 73.5, "stderr": 0.0221 }, { "prompt": "prompt-4", "metric": "acc", "value": 74.5, "stderr": 0.0218 }, { "prompt": "prompt-5", "metric": "acc", "value": 56.75, "stderr": 0.0248 }, { "prompt": "prompt-6", "metric": "acc", "value": 61.5, "stderr": 0.0244 } ], "average_accuracy": 70.125, "best_prompt": 79, "prompt_id": "prompt-2", "CPS": 71.98875 }, "word-in-context": { "prompts": [ { "prompt": "prompt-1", "metric": "f1", "value": 3.92, "stderr": 0.017 }, { "prompt": "prompt-2", "metric": "f1", "value": 9.16, "stderr": 0.0246 }, { "prompt": "prompt-3", "metric": "f1", "value": 40.239999999999995, "stderr": 0.034 }, { "prompt": "prompt-4", "metric": "f1", "value": 2.37, "stderr": 0.0134 }, { "prompt": "prompt-5", "metric": "f1", "value": 64.67, "stderr": 0.021 }, { "prompt": "prompt-6", "metric": "f1", "value": 61.78, "stderr": 0.0229 } ], "average_accuracy": 30.356666666666666, "best_prompt": 64.67, "prompt_id": "prompt-5", "CPS": 42.479567333333335 } }
53.916448
{ "model_name": "google/gemma-3-12b-it", "num_fewshot": "5", "batch_size": 1, "model": "google/gemma-3-12b-it", "base_model": "Gemma3ForConditionalGeneration", "revision": "96b6f1eccf38110c56df3a15bffe176da04bfd80", "submitted_time": "2025-03-01T19:11:34", "num_params_billion": 12.18732504, "language": "" }
{ "admission-test": { "prompts": [ { "prompt": "prompt-1", "metric": "acc", "value": 51.800000000000004, "stderr": 0.0224 }, { "prompt": "prompt-2", "metric": "acc", "value": 51.6, "stderr": 0.0224 }, { "prompt": "prompt-3", "metric": "acc", "value": 84.2, "stderr": 0.0163 }, { "prompt": "prompt-4", "metric": "acc", "value": 84.6, "stderr": 0.0162 }, { "prompt": "prompt-5", "metric": "acc", "value": 51, "stderr": 0.0224 }, { "prompt": "prompt-6", "metric": "acc", "value": 51.800000000000004, "stderr": 0.0224 } ], "average_accuracy": 62.50000000000001, "best_prompt": 84.6, "prompt_id": "prompt-4", "CPS": 65.9034 }, "faq": { "prompts": [ { "prompt": "prompt-1", "metric": "acc", "value": 28.43, "stderr": 0.0226 }, { "prompt": "prompt-2", "metric": "acc", "value": 28.68, "stderr": 0.0226 }, { "prompt": "prompt-3", "metric": "acc", "value": 99.25, "stderr": 0.0043 }, { "prompt": "prompt-4", "metric": "acc", "value": 98.75, "stderr": 0.0055 }, { "prompt": "prompt-5", "metric": "acc", "value": 28.43, "stderr": 0.0226 }, { "prompt": "prompt-6", "metric": "acc", "value": 28.43, "stderr": 0.0226 } ], "average_accuracy": 51.995000000000005, "best_prompt": 99.25, "prompt_id": "prompt-3", "CPS": 52.3494125 }, "hate-speech-detection": { "prompts": [ { "prompt": "prompt-1", "metric": "f1", "value": 67.61, "stderr": 0.014 }, { "prompt": "prompt-2", "metric": "f1", "value": 70.33, "stderr": 0.0132 }, { "prompt": "prompt-3", "metric": "f1", "value": 74.61, "stderr": 0.0122 }, { "prompt": "prompt-4", "metric": "f1", "value": 75.24, "stderr": 0.0118 }, { "prompt": "prompt-5", "metric": "f1", "value": 62.79, "stderr": 0.0153 }, { "prompt": "prompt-6", "metric": "f1", "value": 70.27, "stderr": 0.0128 } ], "average_accuracy": 70.14166666666667, "best_prompt": 75.24, "prompt_id": "prompt-4", "CPS": 71.404014 }, "lexical-substitution": { "prompts": [ { "prompt": "prompt-1", "metric": "f1", "value": 26.479999999999997, "stderr": null }, { "prompt": "prompt-2", "metric": "f1", "value": 26.810000000000002, "stderr": null } ], "average_accuracy": 26.645, "best_prompt": 26.810000000000002, "prompt_id": "prompt-2", "CPS": 26.765763500000002 }, "evalita NER": { "prompts": [ { "prompt": "prompt-1", "metric": "acc", "value": 37.88587372758119, "stderr": null }, { "prompt": "prompt-2", "metric": "acc", "value": 37.7511754726127, "stderr": null } ], "average_accuracy": 37.81852460009695, "best_prompt": 37.88587372758119, "prompt_id": "prompt-1", "CPS": 37.860357922185884 }, "relation-extraction": { "prompts": [ { "prompt": "prompt-1", "metric": "f1", "value": 41.69, "stderr": null }, { "prompt": "prompt-2", "metric": "f1", "value": 41.22, "stderr": null } ], "average_accuracy": 41.455, "best_prompt": 41.69, "prompt_id": "prompt-1", "CPS": 41.5920285 }, "sentiment-analysis": { "prompts": [ { "prompt": "prompt-1", "metric": "f1", "value": 77.96, "stderr": null }, { "prompt": "prompt-2", "metric": "f1", "value": 78.67, "stderr": null }, { "prompt": "prompt-3", "metric": "f1", "value": 77.22, "stderr": null }, { "prompt": "prompt-4", "metric": "f1", "value": 77.77, "stderr": null }, { "prompt": "prompt-5", "metric": "f1", "value": 78.39, "stderr": null }, { "prompt": "prompt-6", "metric": "f1", "value": 78.97999999999999, "stderr": null } ], "average_accuracy": 78.165, "best_prompt": 78.97999999999999, "prompt_id": "prompt-6", "CPS": 78.336313 }, "summarization-fanpage": { "prompts": [ { "prompt": "prompt-1", "metric": "rouge1", "value": 18.5, "stderr": 0.006 }, { "prompt": "prompt-2", "metric": "rouge1", "value": 18.98, "stderr": 0.0062 } ], "average_accuracy": 18.740000000000002, "best_prompt": 18.98, "prompt_id": "prompt-2", "CPS": 18.934448 }, "text-entailment": { "prompts": [ { "prompt": "prompt-1", "metric": "acc", "value": 81.25, "stderr": 0.0195 }, { "prompt": "prompt-2", "metric": "acc", "value": 80.5, "stderr": 0.0198 }, { "prompt": "prompt-3", "metric": "acc", "value": 78.5, "stderr": 0.0206 }, { "prompt": "prompt-4", "metric": "acc", "value": 78.25, "stderr": 0.0207 }, { "prompt": "prompt-5", "metric": "acc", "value": 77.75, "stderr": 0.0208 }, { "prompt": "prompt-6", "metric": "acc", "value": 78.5, "stderr": 0.0206 } ], "average_accuracy": 79.125, "best_prompt": 81.25, "prompt_id": "prompt-1", "CPS": 79.5234375 }, "word-in-context": { "prompts": [ { "prompt": "prompt-1", "metric": "f1", "value": 67.47999999999999, "stderr": 0.0226 }, { "prompt": "prompt-2", "metric": "f1", "value": 67.97, "stderr": 0.0238 }, { "prompt": "prompt-3", "metric": "f1", "value": 70.16, "stderr": 0.0219 }, { "prompt": "prompt-4", "metric": "f1", "value": 69.57, "stderr": 0.023 }, { "prompt": "prompt-5", "metric": "f1", "value": 55.67999999999999, "stderr": 0.026 }, { "prompt": "prompt-6", "metric": "f1", "value": 58.76, "stderr": 0.0239 } ], "average_accuracy": 64.93666666666665, "best_prompt": 70.16, "prompt_id": "prompt-3", "CPS": 66.49530933333332 } }
47.345847
{ "model_name": "google/gemma-3-12b-it", "num_fewshot": "None", "batch_size": 1, "model": "google/gemma-3-12b-it", "base_model": "Gemma3ForConditionalGeneration", "revision": "96b6f1eccf38110c56df3a15bffe176da04bfd80", "submitted_time": "2025-03-01T19:11:34", "num_params_billion": 12.18732504, "language": "" }
{ "admission-test": { "prompts": [ { "prompt": "prompt-1", "metric": "acc", "value": 34.2, "stderr": 0.0212 }, { "prompt": "prompt-2", "metric": "acc", "value": 32.2, "stderr": 0.0209 }, { "prompt": "prompt-3", "metric": "acc", "value": 82.8, "stderr": 0.0169 }, { "prompt": "prompt-4", "metric": "acc", "value": 83.39999999999999, "stderr": 0.0167 }, { "prompt": "prompt-5", "metric": "acc", "value": 45, "stderr": 0.0223 }, { "prompt": "prompt-6", "metric": "acc", "value": 44.2, "stderr": 0.0222 } ], "average_accuracy": 53.633333333333326, "best_prompt": 83.39999999999999, "prompt_id": "prompt-4", "CPS": 58.5746 }, "faq": { "prompts": [ { "prompt": "prompt-1", "metric": "acc", "value": 28.93, "stderr": 0.0227 }, { "prompt": "prompt-2", "metric": "acc", "value": 29.18, "stderr": 0.0227 }, { "prompt": "prompt-3", "metric": "acc", "value": 98.75, "stderr": 0.0055 }, { "prompt": "prompt-4", "metric": "acc", "value": 98.25, "stderr": 0.0065 }, { "prompt": "prompt-5", "metric": "acc", "value": 29.43, "stderr": 0.0228 }, { "prompt": "prompt-6", "metric": "acc", "value": 29.43, "stderr": 0.0228 } ], "average_accuracy": 52.32833333333334, "best_prompt": 98.75, "prompt_id": "prompt-3", "CPS": 52.90860416666667 }, "hate-speech-detection": { "prompts": [ { "prompt": "prompt-1", "metric": "f1", "value": 62.55, "stderr": 0.0114 }, { "prompt": "prompt-2", "metric": "f1", "value": 62.050000000000004, "stderr": 0.0115 }, { "prompt": "prompt-3", "metric": "f1", "value": 71.39999999999999, "stderr": 0.0114 }, { "prompt": "prompt-4", "metric": "f1", "value": 70.48, "stderr": 0.0112 }, { "prompt": "prompt-5", "metric": "f1", "value": 64.12, "stderr": 0.0113 }, { "prompt": "prompt-6", "metric": "f1", "value": 62.43, "stderr": 0.0113 } ], "average_accuracy": 65.50500000000001, "best_prompt": 71.39999999999999, "prompt_id": "prompt-3", "CPS": 67.19097000000001 }, "lexical-substitution": { "prompts": [ { "prompt": "prompt-1", "metric": "f1", "value": 18.02, "stderr": null }, { "prompt": "prompt-2", "metric": "f1", "value": 22.33, "stderr": null } ], "average_accuracy": 20.174999999999997, "best_prompt": 22.33, "prompt_id": "prompt-2", "CPS": 21.848788499999998 }, "evalita NER": { "prompts": [ { "prompt": "prompt-1", "metric": "acc", "value": 32.46615244789142, "stderr": null }, { "prompt": "prompt-2", "metric": "acc", "value": 26.504057198254966, "stderr": null } ], "average_accuracy": 29.485104823073193, "best_prompt": 32.46615244789142, "prompt_id": "prompt-1", "CPS": 31.498320981473686 }, "relation-extraction": { "prompts": [ { "prompt": "prompt-1", "metric": "f1", "value": 32.269999999999996, "stderr": null }, { "prompt": "prompt-2", "metric": "f1", "value": 32.04, "stderr": null } ], "average_accuracy": 32.155, "best_prompt": 32.269999999999996, "prompt_id": "prompt-1", "CPS": 32.2328895 }, "sentiment-analysis": { "prompts": [ { "prompt": "prompt-1", "metric": "f1", "value": 52.800000000000004, "stderr": null }, { "prompt": "prompt-2", "metric": "f1", "value": 57.489999999999995, "stderr": null }, { "prompt": "prompt-3", "metric": "f1", "value": 69.17999999999999, "stderr": null }, { "prompt": "prompt-4", "metric": "f1", "value": 74.6, "stderr": null }, { "prompt": "prompt-5", "metric": "f1", "value": 64.66, "stderr": null }, { "prompt": "prompt-6", "metric": "f1", "value": 26.840000000000003, "stderr": null } ], "average_accuracy": 57.59499999999999, "best_prompt": 74.6, "prompt_id": "prompt-4", "CPS": 61.914269999999995 }, "summarization-fanpage": { "prompts": [ { "prompt": "prompt-1", "metric": "rouge1", "value": 28.79, "stderr": 0.0094 }, { "prompt": "prompt-2", "metric": "rouge1", "value": 30.130000000000003, "stderr": 0.0092 } ], "average_accuracy": 29.46, "best_prompt": 30.130000000000003, "prompt_id": "prompt-2", "CPS": 29.928129000000002 }, "text-entailment": { "prompts": [ { "prompt": "prompt-1", "metric": "acc", "value": 78, "stderr": 0.0207 }, { "prompt": "prompt-2", "metric": "acc", "value": 77.25, "stderr": 0.021 }, { "prompt": "prompt-3", "metric": "acc", "value": 77.5, "stderr": 0.0209 }, { "prompt": "prompt-4", "metric": "acc", "value": 79, "stderr": 0.0204 }, { "prompt": "prompt-5", "metric": "acc", "value": 48.5, "stderr": 0.025 }, { "prompt": "prompt-6", "metric": "acc", "value": 55.25, "stderr": 0.0249 } ], "average_accuracy": 69.25, "best_prompt": 79, "prompt_id": "prompt-4", "CPS": 71.2975 }, "word-in-context": { "prompts": [ { "prompt": "prompt-1", "metric": "f1", "value": 14.39, "stderr": 0.0287 }, { "prompt": "prompt-2", "metric": "f1", "value": 15.329999999999998, "stderr": 0.0296 }, { "prompt": "prompt-3", "metric": "f1", "value": 27.74, "stderr": 0.0333 }, { "prompt": "prompt-4", "metric": "f1", "value": 28.57, "stderr": 0.034 }, { "prompt": "prompt-5", "metric": "f1", "value": 66.75999999999999, "stderr": 0.0199 }, { "prompt": "prompt-6", "metric": "f1", "value": 61.77, "stderr": 0.0221 } ], "average_accuracy": 35.76, "best_prompt": 66.75999999999999, "prompt_id": "prompt-5", "CPS": 46.0644 } }
46.572493
{ "model_name": "google/gemma-3-4b-it", "num_fewshot": "5", "batch_size": 1, "model": "google/gemma-3-4b-it", "base_model": "Gemma3ForConditionalGeneration", "revision": "093f9f388b31de276ce2de164bdc2081324b9767", "submitted_time": "2025-02-20T21:20:07", "num_params_billion": null, "language": "" }
{ "admission-test": { "prompts": [ { "prompt": "prompt-1", "metric": "acc", "value": 40.2, "stderr": 0.0219 }, { "prompt": "prompt-2", "metric": "acc", "value": 40.2, "stderr": 0.0219 }, { "prompt": "prompt-3", "metric": "acc", "value": 59.8, "stderr": 0.0219 }, { "prompt": "prompt-4", "metric": "acc", "value": 60, "stderr": 0.0219 }, { "prompt": "prompt-5", "metric": "acc", "value": 39.6, "stderr": 0.0219 }, { "prompt": "prompt-6", "metric": "acc", "value": 39.4, "stderr": 0.0219 } ], "average_accuracy": 46.53333333333333, "best_prompt": 60, "prompt_id": "prompt-4", "CPS": 51.919999999999995 }, "faq": { "prompts": [ { "prompt": "prompt-1", "metric": "acc", "value": 26.93, "stderr": 0.0222 }, { "prompt": "prompt-2", "metric": "acc", "value": 26.68, "stderr": 0.0221 }, { "prompt": "prompt-3", "metric": "acc", "value": 96.50999999999999, "stderr": 0.0092 }, { "prompt": "prompt-4", "metric": "acc", "value": 95.76, "stderr": 0.0101 }, { "prompt": "prompt-5", "metric": "acc", "value": 27.18, "stderr": 0.0222 }, { "prompt": "prompt-6", "metric": "acc", "value": 27.18, "stderr": 0.0222 } ], "average_accuracy": 50.04, "best_prompt": 96.50999999999999, "prompt_id": "prompt-3", "CPS": 51.661803000000006 }, "hate-speech-detection": { "prompts": [ { "prompt": "prompt-1", "metric": "f1", "value": 59.650000000000006, "stderr": 0.0144 }, { "prompt": "prompt-2", "metric": "f1", "value": 60.78, "stderr": 0.0143 }, { "prompt": "prompt-3", "metric": "f1", "value": 71.08, "stderr": 0.0118 }, { "prompt": "prompt-4", "metric": "f1", "value": 71.53, "stderr": 0.0118 }, { "prompt": "prompt-5", "metric": "f1", "value": 56.21000000000001, "stderr": 0.0157 }, { "prompt": "prompt-6", "metric": "f1", "value": 55.35, "stderr": 0.0158 } ], "average_accuracy": 62.43333333333334, "best_prompt": 71.53, "prompt_id": "prompt-4", "CPS": 65.02315433333334 }, "lexical-substitution": { "prompts": [ { "prompt": "prompt-1", "metric": "f1", "value": 14.64, "stderr": null }, { "prompt": "prompt-2", "metric": "f1", "value": 13.469999999999999, "stderr": null } ], "average_accuracy": 14.055, "best_prompt": 14.64, "prompt_id": "prompt-1", "CPS": 14.554356 }, "evalita NER": { "prompts": [ { "prompt": "prompt-1", "metric": "acc", "value": 36.116496606883175, "stderr": null }, { "prompt": "prompt-2", "metric": "acc", "value": 35.45803320407174, "stderr": null } ], "average_accuracy": 35.78726490547746, "best_prompt": 36.116496606883175, "prompt_id": "prompt-1", "CPS": 35.99758965061619 }, "relation-extraction": { "prompts": [ { "prompt": "prompt-1", "metric": "f1", "value": 36.04, "stderr": null }, { "prompt": "prompt-2", "metric": "f1", "value": 35.92, "stderr": null } ], "average_accuracy": 35.980000000000004, "best_prompt": 36.04, "prompt_id": "prompt-1", "CPS": 36.018376 }, "sentiment-analysis": { "prompts": [ { "prompt": "prompt-1", "metric": "f1", "value": 73.91, "stderr": null }, { "prompt": "prompt-2", "metric": "f1", "value": 73.2, "stderr": null }, { "prompt": "prompt-3", "metric": "f1", "value": 75.94, "stderr": null }, { "prompt": "prompt-4", "metric": "f1", "value": 76.37, "stderr": null }, { "prompt": "prompt-5", "metric": "f1", "value": 74.11, "stderr": null }, { "prompt": "prompt-6", "metric": "f1", "value": 76.25, "stderr": null } ], "average_accuracy": 74.96333333333334, "best_prompt": 76.37, "prompt_id": "prompt-4", "CPS": 75.29572866666668 }, "summarization-fanpage": { "prompts": [ { "prompt": "prompt-1", "metric": "rouge1", "value": 17.96, "stderr": 0.0063 }, { "prompt": "prompt-2", "metric": "rouge1", "value": 17.94, "stderr": 0.0064 } ], "average_accuracy": 17.950000000000003, "best_prompt": 17.96, "prompt_id": "prompt-1", "CPS": 17.958204000000002 }, "text-entailment": { "prompts": [ { "prompt": "prompt-1", "metric": "acc", "value": 71, "stderr": 0.0227 }, { "prompt": "prompt-2", "metric": "acc", "value": 73.25, "stderr": 0.0222 }, { "prompt": "prompt-3", "metric": "acc", "value": 64.75, "stderr": 0.0239 }, { "prompt": "prompt-4", "metric": "acc", "value": 70.25, "stderr": 0.0229 }, { "prompt": "prompt-5", "metric": "acc", "value": 60.5, "stderr": 0.0245 }, { "prompt": "prompt-6", "metric": "acc", "value": 59, "stderr": 0.0246 } ], "average_accuracy": 66.45833333333333, "best_prompt": 73.25, "prompt_id": "prompt-2", "CPS": 68.27510416666667 }, "word-in-context": { "prompts": [ { "prompt": "prompt-1", "metric": "f1", "value": 44.440000000000005, "stderr": 0.0308 }, { "prompt": "prompt-2", "metric": "f1", "value": 36.97, "stderr": 0.0327 }, { "prompt": "prompt-3", "metric": "f1", "value": 29.18, "stderr": 0.0328 }, { "prompt": "prompt-4", "metric": "f1", "value": 17.86, "stderr": 0.0309 }, { "prompt": "prompt-5", "metric": "f1", "value": 58.230000000000004, "stderr": 0.0245 }, { "prompt": "prompt-6", "metric": "f1", "value": 61.17, "stderr": 0.0231 } ], "average_accuracy": 41.30833333333334, "best_prompt": 61.17, "prompt_id": "prompt-6", "CPS": 49.020618500000005 } }
44.586033
{ "model_name": "google/gemma-3-4b-it", "num_fewshot": "None", "batch_size": 1, "model": "google/gemma-3-4b-it", "base_model": "Gemma3ForConditionalGeneration", "revision": "093f9f388b31de276ce2de164bdc2081324b9767", "submitted_time": "2025-02-20T21:20:07", "num_params_billion": null, "language": "" }
{ "admission-test": { "prompts": [ { "prompt": "prompt-1", "metric": "acc", "value": 28.199999999999996, "stderr": 0.0201 }, { "prompt": "prompt-2", "metric": "acc", "value": 25.6, "stderr": 0.0195 }, { "prompt": "prompt-3", "metric": "acc", "value": 54.6, "stderr": 0.0223 }, { "prompt": "prompt-4", "metric": "acc", "value": 54, "stderr": 0.0223 }, { "prompt": "prompt-5", "metric": "acc", "value": 35.4, "stderr": 0.0214 }, { "prompt": "prompt-6", "metric": "acc", "value": 35.4, "stderr": 0.0214 } ], "average_accuracy": 38.86666666666667, "best_prompt": 54.6, "prompt_id": "prompt-3", "CPS": 46.0096 }, "faq": { "prompts": [ { "prompt": "prompt-1", "metric": "acc", "value": 26.93, "stderr": 0.0222 }, { "prompt": "prompt-2", "metric": "acc", "value": 27.68, "stderr": 0.0224 }, { "prompt": "prompt-3", "metric": "acc", "value": 90.77, "stderr": 0.0145 }, { "prompt": "prompt-4", "metric": "acc", "value": 90.77, "stderr": 0.0145 }, { "prompt": "prompt-5", "metric": "acc", "value": 28.18, "stderr": 0.0225 }, { "prompt": "prompt-6", "metric": "acc", "value": 27.68, "stderr": 0.0224 } ], "average_accuracy": 48.66833333333333, "best_prompt": 90.77, "prompt_id": "prompt-3", "CPS": 52.55431716666667 }, "hate-speech-detection": { "prompts": [ { "prompt": "prompt-1", "metric": "f1", "value": 37.72, "stderr": 0.0156 }, { "prompt": "prompt-2", "metric": "f1", "value": 54.92, "stderr": 0.0129 }, { "prompt": "prompt-3", "metric": "f1", "value": 62.45, "stderr": 0.0117 }, { "prompt": "prompt-4", "metric": "f1", "value": 62.660000000000004, "stderr": 0.0112 }, { "prompt": "prompt-5", "metric": "f1", "value": 64.25, "stderr": 0.0112 }, { "prompt": "prompt-6", "metric": "f1", "value": 61.95, "stderr": 0.0114 } ], "average_accuracy": 57.324999999999996, "best_prompt": 64.25, "prompt_id": "prompt-5", "CPS": 59.800687499999995 }, "lexical-substitution": { "prompts": [ { "prompt": "prompt-1", "metric": "f1", "value": 33.18, "stderr": null }, { "prompt": "prompt-2", "metric": "f1", "value": 28.27, "stderr": null } ], "average_accuracy": 30.725, "best_prompt": 33.18, "prompt_id": "prompt-1", "CPS": 32.365431 }, "evalita NER": { "prompts": [ { "prompt": "prompt-1", "metric": "acc", "value": 27.7042886572952, "stderr": null }, { "prompt": "prompt-2", "metric": "acc", "value": 30.127498788172563, "stderr": null } ], "average_accuracy": 28.915893722733884, "best_prompt": 30.127498788172563, "prompt_id": "prompt-2", "CPS": 29.762472486765088 }, "relation-extraction": { "prompts": [ { "prompt": "prompt-1", "metric": "f1", "value": 17.9, "stderr": null }, { "prompt": "prompt-2", "metric": "f1", "value": 18.78, "stderr": null } ], "average_accuracy": 18.34, "best_prompt": 18.78, "prompt_id": "prompt-2", "CPS": 18.697368 }, "sentiment-analysis": { "prompts": [ { "prompt": "prompt-1", "metric": "f1", "value": 48.35, "stderr": null }, { "prompt": "prompt-2", "metric": "f1", "value": 52.16, "stderr": null }, { "prompt": "prompt-3", "metric": "f1", "value": 65.60000000000001, "stderr": null }, { "prompt": "prompt-4", "metric": "f1", "value": 63.629999999999995, "stderr": null }, { "prompt": "prompt-5", "metric": "f1", "value": 65.64999999999999, "stderr": null }, { "prompt": "prompt-6", "metric": "f1", "value": 50.580000000000005, "stderr": null } ], "average_accuracy": 57.66166666666666, "best_prompt": 65.64999999999999, "prompt_id": "prompt-5", "CPS": 60.40565916666666 }, "summarization-fanpage": { "prompts": [ { "prompt": "prompt-1", "metric": "rouge1", "value": 28.49, "stderr": 0.009 }, { "prompt": "prompt-2", "metric": "rouge1", "value": 29.37, "stderr": 0.0104 } ], "average_accuracy": 28.93, "best_prompt": 29.37, "prompt_id": "prompt-2", "CPS": 29.240772000000003 }, "text-entailment": { "prompts": [ { "prompt": "prompt-1", "metric": "acc", "value": 59.25, "stderr": 0.0246 }, { "prompt": "prompt-2", "metric": "acc", "value": 68.75, "stderr": 0.0232 }, { "prompt": "prompt-3", "metric": "acc", "value": 69.25, "stderr": 0.0231 }, { "prompt": "prompt-4", "metric": "acc", "value": 69.25, "stderr": 0.0231 }, { "prompt": "prompt-5", "metric": "acc", "value": 65, "stderr": 0.0239 }, { "prompt": "prompt-6", "metric": "acc", "value": 57.75, "stderr": 0.0247 } ], "average_accuracy": 64.875, "best_prompt": 69.25, "prompt_id": "prompt-3", "CPS": 66.2203125 }, "word-in-context": { "prompts": [ { "prompt": "prompt-1", "metric": "f1", "value": 35.77, "stderr": 0.032 }, { "prompt": "prompt-2", "metric": "f1", "value": 3.82, "stderr": 0.0165 }, { "prompt": "prompt-3", "metric": "f1", "value": 61.019999999999996, "stderr": 0.023 }, { "prompt": "prompt-4", "metric": "f1", "value": 52.33, "stderr": 0.0274 }, { "prompt": "prompt-5", "metric": "f1", "value": 61.44, "stderr": 0.0227 }, { "prompt": "prompt-6", "metric": "f1", "value": 50.39, "stderr": 0.027 } ], "average_accuracy": 44.12833333333333, "best_prompt": 61.44, "prompt_id": "prompt-5", "CPS": 50.803712 } }
51.695772
{ "model_name": "ibm-granite/granite-3.1-8b-instruct", "num_fewshot": "5", "batch_size": 1, "model": "ibm-granite/granite-3.1-8b-instruct", "base_model": "GraniteForCausalLM", "revision": "3f05a1d007b2484bbf17593efe110bd5b9d67655", "submitted_time": "2024-12-06T19:56:54", "num_params_billion": 8.170848256, "language": "" }
{ "admission-test": { "prompts": [ { "prompt": "prompt-1", "metric": "acc", "value": 39.4, "stderr": 0.0219 }, { "prompt": "prompt-2", "metric": "acc", "value": 38.800000000000004, "stderr": 0.0218 }, { "prompt": "prompt-3", "metric": "acc", "value": 64.60000000000001, "stderr": 0.0214 }, { "prompt": "prompt-4", "metric": "acc", "value": 66, "stderr": 0.0212 }, { "prompt": "prompt-5", "metric": "acc", "value": 39.800000000000004, "stderr": 0.0219 }, { "prompt": "prompt-6", "metric": "acc", "value": 40.400000000000006, "stderr": 0.022 } ], "average_accuracy": 48.166666666666664, "best_prompt": 66, "prompt_id": "prompt-4", "CPS": 54.23 }, "faq": { "prompts": [ { "prompt": "prompt-1", "metric": "acc", "value": 30.42, "stderr": 0.023 }, { "prompt": "prompt-2", "metric": "acc", "value": 29.93, "stderr": 0.0229 }, { "prompt": "prompt-3", "metric": "acc", "value": 98, "stderr": 0.007 }, { "prompt": "prompt-4", "metric": "acc", "value": 97.76, "stderr": 0.0074 }, { "prompt": "prompt-5", "metric": "acc", "value": 29.43, "stderr": 0.0228 }, { "prompt": "prompt-6", "metric": "acc", "value": 29.93, "stderr": 0.0229 } ], "average_accuracy": 52.57833333333334, "best_prompt": 98, "prompt_id": "prompt-3", "CPS": 53.486766666666675 }, "hate-speech-detection": { "prompts": [ { "prompt": "prompt-1", "metric": "f1", "value": 57.86, "stderr": 0.0155 }, { "prompt": "prompt-2", "metric": "f1", "value": 63.480000000000004, "stderr": 0.0145 }, { "prompt": "prompt-3", "metric": "f1", "value": 74.03, "stderr": 0.0111 }, { "prompt": "prompt-4", "metric": "f1", "value": 75.68, "stderr": 0.0111 }, { "prompt": "prompt-5", "metric": "f1", "value": 57.589999999999996, "stderr": 0.0157 }, { "prompt": "prompt-6", "metric": "f1", "value": 55.669999999999995, "stderr": 0.0159 } ], "average_accuracy": 64.05166666666666, "best_prompt": 75.68, "prompt_id": "prompt-4", "CPS": 66.87967733333333 }, "lexical-substitution": { "prompts": [ { "prompt": "prompt-1", "metric": "f1", "value": 25.790000000000003, "stderr": null }, { "prompt": "prompt-2", "metric": "f1", "value": 25.419999999999998, "stderr": null } ], "average_accuracy": 25.605, "best_prompt": 25.790000000000003, "prompt_id": "prompt-1", "CPS": 25.7422885 }, "evalita NER": { "prompts": [ { "prompt": "prompt-1", "metric": "acc", "value": 28.799121425109067, "stderr": null }, { "prompt": "prompt-2", "metric": "acc", "value": 32.376099127484245, "stderr": null } ], "average_accuracy": 30.587610276296658, "best_prompt": 32.376099127484245, "prompt_id": "prompt-2", "CPS": 31.79705620413975 }, "relation-extraction": { "prompts": [ { "prompt": "prompt-1", "metric": "f1", "value": 40.06, "stderr": null }, { "prompt": "prompt-2", "metric": "f1", "value": 39.33, "stderr": null } ], "average_accuracy": 39.695, "best_prompt": 40.06, "prompt_id": "prompt-1", "CPS": 39.913781 }, "sentiment-analysis": { "prompts": [ { "prompt": "prompt-1", "metric": "f1", "value": 75.67, "stderr": null }, { "prompt": "prompt-2", "metric": "f1", "value": 75.02, "stderr": null }, { "prompt": "prompt-3", "metric": "f1", "value": 74.19, "stderr": null }, { "prompt": "prompt-4", "metric": "f1", "value": 74.68, "stderr": null }, { "prompt": "prompt-5", "metric": "f1", "value": 73.3, "stderr": null }, { "prompt": "prompt-6", "metric": "f1", "value": 73.96000000000001, "stderr": null } ], "average_accuracy": 74.47000000000001, "best_prompt": 75.67, "prompt_id": "prompt-1", "CPS": 74.76196000000002 }, "summarization-fanpage": { "prompts": [ { "prompt": "prompt-1", "metric": "rouge1", "value": 33.14, "stderr": 0.011 }, { "prompt": "prompt-2", "metric": "rouge1", "value": 31.59, "stderr": 0.0111 } ], "average_accuracy": 32.365, "best_prompt": 33.14, "prompt_id": "prompt-1", "CPS": 32.883165 }, "text-entailment": { "prompts": [ { "prompt": "prompt-1", "metric": "acc", "value": 74.5, "stderr": 0.0218 }, { "prompt": "prompt-2", "metric": "acc", "value": 73.5, "stderr": 0.0221 }, { "prompt": "prompt-3", "metric": "acc", "value": 74.25, "stderr": 0.0219 }, { "prompt": "prompt-4", "metric": "acc", "value": 76.5, "stderr": 0.0212 }, { "prompt": "prompt-5", "metric": "acc", "value": 64.5, "stderr": 0.024 }, { "prompt": "prompt-6", "metric": "acc", "value": 62.25000000000001, "stderr": 0.0243 } ], "average_accuracy": 70.91666666666667, "best_prompt": 76.5, "prompt_id": "prompt-4", "CPS": 72.22875 }, "word-in-context": { "prompts": [ { "prompt": "prompt-1", "metric": "f1", "value": 65.5, "stderr": 0.0231 }, { "prompt": "prompt-2", "metric": "f1", "value": 66.55, "stderr": 0.0224 }, { "prompt": "prompt-3", "metric": "f1", "value": 67.34, "stderr": 0.0205 }, { "prompt": "prompt-4", "metric": "f1", "value": 67.83, "stderr": 0.0225 }, { "prompt": "prompt-5", "metric": "f1", "value": 54.33, "stderr": 0.0269 }, { "prompt": "prompt-6", "metric": "f1", "value": 60.699999999999996, "stderr": 0.0242 } ], "average_accuracy": 63.708333333333336, "best_prompt": 67.83, "prompt_id": "prompt-4", "CPS": 65.0342735 } }
37.264778
{ "model_name": "ibm-granite/granite-3.1-8b-instruct", "num_fewshot": "None", "batch_size": 1, "model": "ibm-granite/granite-3.1-8b-instruct", "base_model": "GraniteForCausalLM", "revision": "3f05a1d007b2484bbf17593efe110bd5b9d67655", "submitted_time": "2024-12-06T19:56:54", "num_params_billion": 8.170848256, "language": "" }
{ "admission-test": { "prompts": [ { "prompt": "prompt-1", "metric": "acc", "value": 32.4, "stderr": 0.021 }, { "prompt": "prompt-2", "metric": "acc", "value": 28.799999999999997, "stderr": 0.0203 }, { "prompt": "prompt-3", "metric": "acc", "value": 59.4, "stderr": 0.022 }, { "prompt": "prompt-4", "metric": "acc", "value": 55.800000000000004, "stderr": 0.0222 }, { "prompt": "prompt-5", "metric": "acc", "value": 37.2, "stderr": 0.0216 }, { "prompt": "prompt-6", "metric": "acc", "value": 38.2, "stderr": 0.0218 } ], "average_accuracy": 41.96666666666667, "best_prompt": 59.4, "prompt_id": "prompt-3", "CPS": 49.044599999999996 }, "faq": { "prompts": [ { "prompt": "prompt-1", "metric": "acc", "value": 30.919999999999998, "stderr": 0.0231 }, { "prompt": "prompt-2", "metric": "acc", "value": 31.669999999999998, "stderr": 0.0233 }, { "prompt": "prompt-3", "metric": "acc", "value": 91.27, "stderr": 0.0141 }, { "prompt": "prompt-4", "metric": "acc", "value": 91.52, "stderr": 0.0139 }, { "prompt": "prompt-5", "metric": "acc", "value": 31.169999999999998, "stderr": 0.0232 }, { "prompt": "prompt-6", "metric": "acc", "value": 31.169999999999998, "stderr": 0.0232 } ], "average_accuracy": 51.28666666666667, "best_prompt": 91.52, "prompt_id": "prompt-4", "CPS": 54.69845333333334 }, "hate-speech-detection": { "prompts": [ { "prompt": "prompt-1", "metric": "f1", "value": 62.63999999999999, "stderr": 0.0112 }, { "prompt": "prompt-2", "metric": "f1", "value": 62.56, "stderr": 0.0112 }, { "prompt": "prompt-3", "metric": "f1", "value": 0.98, "stderr": 0.0048 }, { "prompt": "prompt-4", "metric": "f1", "value": 0, "stderr": 0 }, { "prompt": "prompt-5", "metric": "f1", "value": 50, "stderr": 0.0166 }, { "prompt": "prompt-6", "metric": "f1", "value": 60.86, "stderr": 0.0127 } ], "average_accuracy": 39.50666666666667, "best_prompt": 62.63999999999999, "prompt_id": "prompt-1", "CPS": 48.14928 }, "lexical-substitution": { "prompts": [ { "prompt": "prompt-1", "metric": "f1", "value": 0, "stderr": null }, { "prompt": "prompt-2", "metric": "f1", "value": 0.16999999999999998, "stderr": null } ], "average_accuracy": 0.08499999999999999, "best_prompt": 0.16999999999999998, "prompt_id": "prompt-2", "CPS": 0.1698555 }, "evalita NER": { "prompts": [ { "prompt": "prompt-1", "metric": "acc", "value": 13.48637057682986, "stderr": null }, { "prompt": "prompt-2", "metric": "acc", "value": 18.565149054774597, "stderr": null } ], "average_accuracy": 16.02575981580223, "best_prompt": 18.565149054774597, "prompt_id": "prompt-2", "CPS": 18.093707657478472 }, "relation-extraction": { "prompts": [ { "prompt": "prompt-1", "metric": "f1", "value": 6.88, "stderr": null }, { "prompt": "prompt-2", "metric": "f1", "value": 11.469999999999999, "stderr": null } ], "average_accuracy": 9.174999999999999, "best_prompt": 11.469999999999999, "prompt_id": "prompt-2", "CPS": 11.2067635 }, "sentiment-analysis": { "prompts": [ { "prompt": "prompt-1", "metric": "f1", "value": 33.64, "stderr": null }, { "prompt": "prompt-2", "metric": "f1", "value": 27.310000000000002, "stderr": null }, { "prompt": "prompt-3", "metric": "f1", "value": 69.08, "stderr": null }, { "prompt": "prompt-4", "metric": "f1", "value": 69.45, "stderr": null }, { "prompt": "prompt-5", "metric": "f1", "value": 60.9, "stderr": null }, { "prompt": "prompt-6", "metric": "f1", "value": 26.52, "stderr": null } ], "average_accuracy": 47.81666666666666, "best_prompt": 69.45, "prompt_id": "prompt-4", "CPS": 54.42565 }, "summarization-fanpage": { "prompts": [ { "prompt": "prompt-1", "metric": "rouge1", "value": 30.39, "stderr": 0.01 }, { "prompt": "prompt-2", "metric": "rouge1", "value": 29.69, "stderr": 0.0097 } ], "average_accuracy": 30.04, "best_prompt": 30.39, "prompt_id": "prompt-1", "CPS": 30.283635 }, "text-entailment": { "prompts": [ { "prompt": "prompt-1", "metric": "acc", "value": 49, "stderr": 0.025 }, { "prompt": "prompt-2", "metric": "acc", "value": 67, "stderr": 0.0235 }, { "prompt": "prompt-3", "metric": "acc", "value": 45.75, "stderr": 0.0249 }, { "prompt": "prompt-4", "metric": "acc", "value": 44.75, "stderr": 0.0249 }, { "prompt": "prompt-5", "metric": "acc", "value": 45.25, "stderr": 0.0249 }, { "prompt": "prompt-6", "metric": "acc", "value": 54.75, "stderr": 0.0249 } ], "average_accuracy": 51.083333333333336, "best_prompt": 67, "prompt_id": "prompt-2", "CPS": 56.33583333333333 }, "word-in-context": { "prompts": [ { "prompt": "prompt-1", "metric": "f1", "value": 54.900000000000006, "stderr": 0.0254 }, { "prompt": "prompt-2", "metric": "f1", "value": 63.04, "stderr": 0.0231 }, { "prompt": "prompt-3", "metric": "f1", "value": 1.58, "stderr": 0.011 }, { "prompt": "prompt-4", "metric": "f1", "value": 0, "stderr": 0 }, { "prompt": "prompt-5", "metric": "f1", "value": 66.85, "stderr": 0.02 }, { "prompt": "prompt-6", "metric": "f1", "value": 65.64999999999999, "stderr": 0.0205 } ], "average_accuracy": 42.00333333333333, "best_prompt": 66.85, "prompt_id": "prompt-5", "CPS": 50.240003333333334 } }
50.373026
{ "model_name": "meta-llama/Meta-Llama-3.1-8B-Instruct", "num_fewshot": "5", "batch_size": 1, "model": "meta-llama/Meta-Llama-3.1-8B-Instruct", "base_model": "LlamaForCausalLM", "revision": "0e9e39f249a16976918f6564b8830bc894c89659", "submitted_time": "2024-07-18T08:56:00", "num_params_billion": 8.030261248, "language": "en_de_fr_it_pt_hi_es_th" }
{ "admission-test": { "prompts": [ { "prompt": "prompt-1", "metric": "acc", "value": 35.199999999999996, "stderr": 0.0214 }, { "prompt": "prompt-2", "metric": "acc", "value": 35.6, "stderr": 0.0214 }, { "prompt": "prompt-3", "metric": "acc", "value": 69.6, "stderr": 0.0206 }, { "prompt": "prompt-4", "metric": "acc", "value": 70.19999999999999, "stderr": 0.0205 }, { "prompt": "prompt-5", "metric": "acc", "value": 35.8, "stderr": 0.0215 }, { "prompt": "prompt-6", "metric": "acc", "value": 35.4, "stderr": 0.0214 } ], "average_accuracy": 46.96666666666666, "best_prompt": 70.19999999999999, "prompt_id": "prompt-4", "CPS": 53.89019999999999 }, "faq": { "prompts": [ { "prompt": "prompt-1", "metric": "acc", "value": 29.93, "stderr": 0.0229 }, { "prompt": "prompt-2", "metric": "acc", "value": 29.93, "stderr": 0.0229 }, { "prompt": "prompt-3", "metric": "acc", "value": 94.01, "stderr": 0.0119 }, { "prompt": "prompt-4", "metric": "acc", "value": 93.52000000000001, "stderr": 0.0123 }, { "prompt": "prompt-5", "metric": "acc", "value": 30.17, "stderr": 0.023 }, { "prompt": "prompt-6", "metric": "acc", "value": 29.43, "stderr": 0.0228 } ], "average_accuracy": 51.165, "best_prompt": 94.01, "prompt_id": "prompt-3", "CPS": 53.731415500000004 }, "hate-speech-detection": { "prompts": [ { "prompt": "prompt-1", "metric": "f1", "value": 50.449999999999996, "stderr": 0.0167 }, { "prompt": "prompt-2", "metric": "f1", "value": 56.720000000000006, "stderr": 0.0161 }, { "prompt": "prompt-3", "metric": "f1", "value": 70.04, "stderr": 0.0123 }, { "prompt": "prompt-4", "metric": "f1", "value": 72.22, "stderr": 0.0123 }, { "prompt": "prompt-5", "metric": "f1", "value": 65.64999999999999, "stderr": 0.0126 }, { "prompt": "prompt-6", "metric": "f1", "value": 64.22, "stderr": 0.0131 } ], "average_accuracy": 63.21666666666666, "best_prompt": 72.22, "prompt_id": "prompt-4", "CPS": 65.71779266666665 }, "lexical-substitution": { "prompts": [ { "prompt": "prompt-1", "metric": "f1", "value": 28.95, "stderr": null }, { "prompt": "prompt-2", "metric": "f1", "value": 30.570000000000004, "stderr": null } ], "average_accuracy": 29.76, "best_prompt": 30.570000000000004, "prompt_id": "prompt-2", "CPS": 30.322383000000006 }, "evalita NER": { "prompts": [ { "prompt": "prompt-1", "metric": "acc", "value": 38.681505089675234, "stderr": null }, { "prompt": "prompt-2", "metric": "acc", "value": 40.71863063499758, "stderr": null } ], "average_accuracy": 39.70006786233641, "best_prompt": 40.71863063499758, "prompt_id": "prompt-2", "CPS": 40.30388582181209 }, "relation-extraction": { "prompts": [ { "prompt": "prompt-1", "metric": "f1", "value": 35.74, "stderr": null }, { "prompt": "prompt-2", "metric": "f1", "value": 36.05, "stderr": null } ], "average_accuracy": 35.894999999999996, "best_prompt": 36.05, "prompt_id": "prompt-2", "CPS": 35.994122499999996 }, "sentiment-analysis": { "prompts": [ { "prompt": "prompt-1", "metric": "f1", "value": 69.24, "stderr": null }, { "prompt": "prompt-2", "metric": "f1", "value": 69.72, "stderr": null }, { "prompt": "prompt-3", "metric": "f1", "value": 77, "stderr": null }, { "prompt": "prompt-4", "metric": "f1", "value": 77.35, "stderr": null }, { "prompt": "prompt-5", "metric": "f1", "value": 70.19, "stderr": null }, { "prompt": "prompt-6", "metric": "f1", "value": 72.84, "stderr": null } ], "average_accuracy": 72.72333333333331, "best_prompt": 77.35, "prompt_id": "prompt-4", "CPS": 73.77127333333331 }, "summarization-fanpage": { "prompts": [ { "prompt": "prompt-1", "metric": "rouge1", "value": 19.89, "stderr": 0.0071 }, { "prompt": "prompt-2", "metric": "rouge1", "value": 20.5, "stderr": 0.0078 } ], "average_accuracy": 20.195, "best_prompt": 20.5, "prompt_id": "prompt-2", "CPS": 20.437475 }, "text-entailment": { "prompts": [ { "prompt": "prompt-1", "metric": "acc", "value": 74.5, "stderr": 0.0218 }, { "prompt": "prompt-2", "metric": "acc", "value": 73.25, "stderr": 0.0222 }, { "prompt": "prompt-3", "metric": "acc", "value": 73.75, "stderr": 0.022 }, { "prompt": "prompt-4", "metric": "acc", "value": 75, "stderr": 0.0217 }, { "prompt": "prompt-5", "metric": "acc", "value": 59.25, "stderr": 0.0246 }, { "prompt": "prompt-6", "metric": "acc", "value": 58.5, "stderr": 0.0247 } ], "average_accuracy": 69.04166666666667, "best_prompt": 75, "prompt_id": "prompt-4", "CPS": 70.53125 }, "word-in-context": { "prompts": [ { "prompt": "prompt-1", "metric": "f1", "value": 59.89, "stderr": 0.0243 }, { "prompt": "prompt-2", "metric": "f1", "value": 61.199999999999996, "stderr": 0.0236 }, { "prompt": "prompt-3", "metric": "f1", "value": 58.17, "stderr": 0.0257 }, { "prompt": "prompt-4", "metric": "f1", "value": 58.32000000000001, "stderr": 0.0254 }, { "prompt": "prompt-5", "metric": "f1", "value": 52.07000000000001, "stderr": 0.027 }, { "prompt": "prompt-6", "metric": "f1", "value": 56.279999999999994, "stderr": 0.0268 } ], "average_accuracy": 57.654999999999994, "best_prompt": 61.199999999999996, "prompt_id": "prompt-2", "CPS": 59.03046 } }
40.234247
{ "model_name": "meta-llama/Meta-Llama-3.1-8B-Instruct", "num_fewshot": "None", "batch_size": 1, "model": "meta-llama/Meta-Llama-3.1-8B-Instruct", "base_model": "LlamaForCausalLM", "revision": "0e9e39f249a16976918f6564b8830bc894c89659", "submitted_time": "2024-07-18T08:56:00", "num_params_billion": 8.030261248, "language": "en_de_fr_it_pt_hi_es_th" }
{ "admission-test": { "prompts": [ { "prompt": "prompt-1", "metric": "acc", "value": 24.8, "stderr": 0.0193 }, { "prompt": "prompt-2", "metric": "acc", "value": 25.6, "stderr": 0.0195 }, { "prompt": "prompt-3", "metric": "acc", "value": 68.4, "stderr": 0.0208 }, { "prompt": "prompt-4", "metric": "acc", "value": 69, "stderr": 0.0207 }, { "prompt": "prompt-5", "metric": "acc", "value": 33.2, "stderr": 0.0211 }, { "prompt": "prompt-6", "metric": "acc", "value": 32.6, "stderr": 0.021 } ], "average_accuracy": 42.266666666666666, "best_prompt": 69, "prompt_id": "prompt-4", "CPS": 50.553999999999995 }, "faq": { "prompts": [ { "prompt": "prompt-1", "metric": "acc", "value": 30.17, "stderr": 0.023 }, { "prompt": "prompt-2", "metric": "acc", "value": 30.669999999999998, "stderr": 0.0231 }, { "prompt": "prompt-3", "metric": "acc", "value": 79.55, "stderr": 0.0202 }, { "prompt": "prompt-4", "metric": "acc", "value": 87.78, "stderr": 0.0164 }, { "prompt": "prompt-5", "metric": "acc", "value": 30.669999999999998, "stderr": 0.0231 }, { "prompt": "prompt-6", "metric": "acc", "value": 30.919999999999998, "stderr": 0.0231 } ], "average_accuracy": 48.29333333333333, "best_prompt": 87.78, "prompt_id": "prompt-4", "CPS": 53.118604 }, "hate-speech-detection": { "prompts": [ { "prompt": "prompt-1", "metric": "f1", "value": 14.39, "stderr": 0.0155 }, { "prompt": "prompt-2", "metric": "f1", "value": 37.230000000000004, "stderr": 0.0175 }, { "prompt": "prompt-3", "metric": "f1", "value": 36.91, "stderr": 0.016 }, { "prompt": "prompt-4", "metric": "f1", "value": 52.56999999999999, "stderr": 0.0156 }, { "prompt": "prompt-5", "metric": "f1", "value": 63.260000000000005, "stderr": 0.0113 }, { "prompt": "prompt-6", "metric": "f1", "value": 62.59, "stderr": 0.0112 } ], "average_accuracy": 44.491666666666674, "best_prompt": 63.260000000000005, "prompt_id": "prompt-5", "CPS": 51.38715233333334 }, "lexical-substitution": { "prompts": [ { "prompt": "prompt-1", "metric": "f1", "value": 10.440000000000001, "stderr": null }, { "prompt": "prompt-2", "metric": "f1", "value": 21.060000000000002, "stderr": null } ], "average_accuracy": 15.750000000000002, "best_prompt": 21.060000000000002, "prompt_id": "prompt-2", "CPS": 19.941714 }, "evalita NER": { "prompts": [ { "prompt": "prompt-1", "metric": "acc", "value": 7.959172321861367, "stderr": null }, { "prompt": "prompt-2", "metric": "acc", "value": 7.187601793504606, "stderr": null } ], "average_accuracy": 7.573387057682986, "best_prompt": 7.959172321861367, "prompt_id": "prompt-1", "CPS": 7.928467007893061 }, "relation-extraction": { "prompts": [ { "prompt": "prompt-1", "metric": "f1", "value": 14.680000000000001, "stderr": null }, { "prompt": "prompt-2", "metric": "f1", "value": 15.479999999999999, "stderr": null } ], "average_accuracy": 15.08, "best_prompt": 15.479999999999999, "prompt_id": "prompt-2", "CPS": 15.418079999999998 }, "sentiment-analysis": { "prompts": [ { "prompt": "prompt-1", "metric": "f1", "value": 42.730000000000004, "stderr": null }, { "prompt": "prompt-2", "metric": "f1", "value": 49.02, "stderr": null }, { "prompt": "prompt-3", "metric": "f1", "value": 64.39, "stderr": null }, { "prompt": "prompt-4", "metric": "f1", "value": 68.14, "stderr": null }, { "prompt": "prompt-5", "metric": "f1", "value": 71.12, "stderr": null }, { "prompt": "prompt-6", "metric": "f1", "value": 63.41, "stderr": null } ], "average_accuracy": 59.801666666666655, "best_prompt": 71.12, "prompt_id": "prompt-5", "CPS": 63.07040133333332 }, "summarization-fanpage": { "prompts": [ { "prompt": "prompt-1", "metric": "rouge1", "value": 22.34, "stderr": 0.0082 }, { "prompt": "prompt-2", "metric": "rouge1", "value": 22.259999999999998, "stderr": 0.0084 } ], "average_accuracy": 22.299999999999997, "best_prompt": 22.34, "prompt_id": "prompt-1", "CPS": 22.331063999999998 }, "text-entailment": { "prompts": [ { "prompt": "prompt-1", "metric": "acc", "value": 67, "stderr": 0.0235 }, { "prompt": "prompt-2", "metric": "acc", "value": 56.25, "stderr": 0.0248 }, { "prompt": "prompt-3", "metric": "acc", "value": 63.74999999999999, "stderr": 0.0241 }, { "prompt": "prompt-4", "metric": "acc", "value": 63.24999999999999, "stderr": 0.0241 }, { "prompt": "prompt-5", "metric": "acc", "value": 54, "stderr": 0.025 }, { "prompt": "prompt-6", "metric": "acc", "value": 59.5, "stderr": 0.0246 } ], "average_accuracy": 60.625, "best_prompt": 67, "prompt_id": "prompt-1", "CPS": 62.728750000000005 }, "word-in-context": { "prompts": [ { "prompt": "prompt-1", "metric": "f1", "value": 54.87, "stderr": 0.0253 }, { "prompt": "prompt-2", "metric": "f1", "value": 53.26, "stderr": 0.0261 }, { "prompt": "prompt-3", "metric": "f1", "value": 55.059999999999995, "stderr": 0.026 }, { "prompt": "prompt-4", "metric": "f1", "value": 6.87, "stderr": 0.0217 }, { "prompt": "prompt-5", "metric": "f1", "value": 65.39, "stderr": 0.0205 }, { "prompt": "prompt-6", "metric": "f1", "value": 66.93, "stderr": 0.0199 } ], "average_accuracy": 50.39666666666667, "best_prompt": 66.93, "prompt_id": "prompt-6", "CPS": 55.86424 } }
51.220984
{ "model_name": "microsoft/Phi-3-medium-4k-instruct", "num_fewshot": "5", "batch_size": 1, "model": "microsoft/Phi-3-medium-4k-instruct", "base_model": "Phi3ForCausalLM", "revision": "b64223aaea6fbf273c0c8cd0801d5e732dce8897", "submitted_time": "2024-05-07T15:27:19", "num_params_billion": 13.96023808, "language": "multilingual" }
{ "admission-test": { "prompts": [ { "prompt": "prompt-1", "metric": "acc", "value": 44, "stderr": 0.0222 }, { "prompt": "prompt-2", "metric": "acc", "value": 44.6, "stderr": 0.0223 }, { "prompt": "prompt-3", "metric": "acc", "value": 81, "stderr": 0.0176 }, { "prompt": "prompt-4", "metric": "acc", "value": 81.2, "stderr": 0.0175 }, { "prompt": "prompt-5", "metric": "acc", "value": 45, "stderr": 0.0223 }, { "prompt": "prompt-6", "metric": "acc", "value": 45.6, "stderr": 0.0223 } ], "average_accuracy": 56.900000000000006, "best_prompt": 81.2, "prompt_id": "prompt-4", "CPS": 61.4684 }, "faq": { "prompts": [ { "prompt": "prompt-1", "metric": "acc", "value": 28.93, "stderr": 0.0227 }, { "prompt": "prompt-2", "metric": "acc", "value": 27.93, "stderr": 0.0224 }, { "prompt": "prompt-3", "metric": "acc", "value": 75.81, "stderr": 0.0214 }, { "prompt": "prompt-4", "metric": "acc", "value": 81.3, "stderr": 0.0195 }, { "prompt": "prompt-5", "metric": "acc", "value": 29.43, "stderr": 0.0228 }, { "prompt": "prompt-6", "metric": "acc", "value": 29.43, "stderr": 0.0228 } ], "average_accuracy": 45.47166666666667, "best_prompt": 81.3, "prompt_id": "prompt-4", "CPS": 52.171565 }, "hate-speech-detection": { "prompts": [ { "prompt": "prompt-1", "metric": "f1", "value": 70.41, "stderr": 0.0121 }, { "prompt": "prompt-2", "metric": "f1", "value": 71.58, "stderr": 0.0124 }, { "prompt": "prompt-3", "metric": "f1", "value": 75.2, "stderr": 0.0112 }, { "prompt": "prompt-4", "metric": "f1", "value": 74.18, "stderr": 0.012 }, { "prompt": "prompt-5", "metric": "f1", "value": 27.13, "stderr": 0.0186 }, { "prompt": "prompt-6", "metric": "f1", "value": 34.01, "stderr": 0.0189 } ], "average_accuracy": 58.751666666666665, "best_prompt": 75.2, "prompt_id": "prompt-3", "CPS": 62.83085333333334 }, "lexical-substitution": { "prompts": [ { "prompt": "prompt-1", "metric": "f1", "value": 36.11, "stderr": null }, { "prompt": "prompt-2", "metric": "f1", "value": 34.8, "stderr": null } ], "average_accuracy": 35.455, "best_prompt": 36.11, "prompt_id": "prompt-1", "CPS": 35.873479499999995 }, "evalita NER": { "prompts": [ { "prompt": "prompt-1", "metric": "acc", "value": 25.382776296655358, "stderr": null }, { "prompt": "prompt-2", "metric": "acc", "value": 25.710181774115366, "stderr": null } ], "average_accuracy": 25.54647903538536, "best_prompt": 25.710181774115366, "prompt_id": "prompt-2", "CPS": 25.668093502418674 }, "relation-extraction": { "prompts": [ { "prompt": "prompt-1", "metric": "f1", "value": 36.53, "stderr": null }, { "prompt": "prompt-2", "metric": "f1", "value": 37.04, "stderr": null } ], "average_accuracy": 36.785, "best_prompt": 37.04, "prompt_id": "prompt-2", "CPS": 36.945547999999995 }, "sentiment-analysis": { "prompts": [ { "prompt": "prompt-1", "metric": "f1", "value": 68.66, "stderr": null }, { "prompt": "prompt-2", "metric": "f1", "value": 67.17, "stderr": null }, { "prompt": "prompt-3", "metric": "f1", "value": 72.1, "stderr": null }, { "prompt": "prompt-4", "metric": "f1", "value": 71.99, "stderr": null }, { "prompt": "prompt-5", "metric": "f1", "value": 70.66, "stderr": null }, { "prompt": "prompt-6", "metric": "f1", "value": 68.52000000000001, "stderr": null } ], "average_accuracy": 69.84999999999998, "best_prompt": 72.1, "prompt_id": "prompt-3", "CPS": 70.47774999999999 }, "summarization-fanpage": { "prompts": [ { "prompt": "prompt-1", "metric": "rouge1", "value": 21.05, "stderr": 0.0083 }, { "prompt": "prompt-2", "metric": "rouge1", "value": 21.65, "stderr": 0.0084 } ], "average_accuracy": 21.35, "best_prompt": 21.65, "prompt_id": "prompt-2", "CPS": 21.58505 }, "text-entailment": { "prompts": [ { "prompt": "prompt-1", "metric": "acc", "value": 77.5, "stderr": 0.0209 }, { "prompt": "prompt-2", "metric": "acc", "value": 78.5, "stderr": 0.0206 }, { "prompt": "prompt-3", "metric": "acc", "value": 80.25, "stderr": 0.0199 }, { "prompt": "prompt-4", "metric": "acc", "value": 80.5, "stderr": 0.0198 }, { "prompt": "prompt-5", "metric": "acc", "value": 68.25, "stderr": 0.0233 }, { "prompt": "prompt-6", "metric": "acc", "value": 66.5, "stderr": 0.0236 } ], "average_accuracy": 75.25, "best_prompt": 80.5, "prompt_id": "prompt-4", "CPS": 76.27375 }, "word-in-context": { "prompts": [ { "prompt": "prompt-1", "metric": "f1", "value": 71.83, "stderr": 0.0214 }, { "prompt": "prompt-2", "metric": "f1", "value": 72.47, "stderr": 0.0211 }, { "prompt": "prompt-3", "metric": "f1", "value": 71.96000000000001, "stderr": 0.022 }, { "prompt": "prompt-4", "metric": "f1", "value": 68.52000000000001, "stderr": 0.0248 }, { "prompt": "prompt-5", "metric": "f1", "value": 58.089999999999996, "stderr": 0.0253 }, { "prompt": "prompt-6", "metric": "f1", "value": 62.519999999999996, "stderr": 0.0237 } ], "average_accuracy": 67.565, "best_prompt": 72.47, "prompt_id": "prompt-2", "CPS": 68.9153465 } }
42.094142
{ "model_name": "microsoft/Phi-3-medium-4k-instruct", "num_fewshot": "None", "batch_size": 1, "model": "microsoft/Phi-3-medium-4k-instruct", "base_model": "Phi3ForCausalLM", "revision": "b64223aaea6fbf273c0c8cd0801d5e732dce8897", "submitted_time": "2024-05-07T15:27:19", "num_params_billion": 13.96023808, "language": "multilingual" }
{ "admission-test": { "prompts": [ { "prompt": "prompt-1", "metric": "acc", "value": 27.800000000000004, "stderr": 0.0201 }, { "prompt": "prompt-2", "metric": "acc", "value": 26.200000000000003, "stderr": 0.0197 }, { "prompt": "prompt-3", "metric": "acc", "value": 80.2, "stderr": 0.0178 }, { "prompt": "prompt-4", "metric": "acc", "value": 79.2, "stderr": 0.0182 }, { "prompt": "prompt-5", "metric": "acc", "value": 35.4, "stderr": 0.0214 }, { "prompt": "prompt-6", "metric": "acc", "value": 37.2, "stderr": 0.0216 } ], "average_accuracy": 47.66666666666668, "best_prompt": 80.2, "prompt_id": "prompt-3", "CPS": 54.10826666666667 }, "faq": { "prompts": [ { "prompt": "prompt-1", "metric": "acc", "value": 28.68, "stderr": 0.0226 }, { "prompt": "prompt-2", "metric": "acc", "value": 28.18, "stderr": 0.0225 }, { "prompt": "prompt-3", "metric": "acc", "value": 86.78, "stderr": 0.0169 }, { "prompt": "prompt-4", "metric": "acc", "value": 88.53, "stderr": 0.0159 }, { "prompt": "prompt-5", "metric": "acc", "value": 29.93, "stderr": 0.0229 }, { "prompt": "prompt-6", "metric": "acc", "value": 29.93, "stderr": 0.0229 } ], "average_accuracy": 48.67166666666666, "best_prompt": 88.53, "prompt_id": "prompt-4", "CPS": 53.24341749999999 }, "hate-speech-detection": { "prompts": [ { "prompt": "prompt-1", "metric": "f1", "value": 62.61, "stderr": 0.0112 }, { "prompt": "prompt-2", "metric": "f1", "value": 62.69, "stderr": 0.0112 }, { "prompt": "prompt-3", "metric": "f1", "value": 70.94, "stderr": 0.0116 }, { "prompt": "prompt-4", "metric": "f1", "value": 72.75, "stderr": 0.0112 }, { "prompt": "prompt-5", "metric": "f1", "value": 56.46, "stderr": 0.0131 }, { "prompt": "prompt-6", "metric": "f1", "value": 48.66, "stderr": 0.0141 } ], "average_accuracy": 62.35166666666667, "best_prompt": 72.75, "prompt_id": "prompt-4", "CPS": 65.1852125 }, "lexical-substitution": { "prompts": [ { "prompt": "prompt-1", "metric": "f1", "value": 0.29, "stderr": null }, { "prompt": "prompt-2", "metric": "f1", "value": 0.33999999999999997, "stderr": null } ], "average_accuracy": 0.31499999999999995, "best_prompt": 0.33999999999999997, "prompt_id": "prompt-2", "CPS": 0.33991499999999997 }, "evalita NER": { "prompts": [ { "prompt": "prompt-1", "metric": "acc", "value": 18.867341250605914, "stderr": null }, { "prompt": "prompt-2", "metric": "acc", "value": 16.086162142510904, "stderr": null } ], "average_accuracy": 17.47675169655841, "best_prompt": 18.867341250605914, "prompt_id": "prompt-1", "CPS": 18.60497397404849 }, "relation-extraction": { "prompts": [ { "prompt": "prompt-1", "metric": "f1", "value": 26.979999999999997, "stderr": null }, { "prompt": "prompt-2", "metric": "f1", "value": 27.01, "stderr": null } ], "average_accuracy": 26.994999999999997, "best_prompt": 27.01, "prompt_id": "prompt-2", "CPS": 27.0059485 }, "sentiment-analysis": { "prompts": [ { "prompt": "prompt-1", "metric": "f1", "value": 30.59, "stderr": null }, { "prompt": "prompt-2", "metric": "f1", "value": 39.94, "stderr": null }, { "prompt": "prompt-3", "metric": "f1", "value": 72.58, "stderr": null }, { "prompt": "prompt-4", "metric": "f1", "value": 69.27, "stderr": null }, { "prompt": "prompt-5", "metric": "f1", "value": 53.59, "stderr": null }, { "prompt": "prompt-6", "metric": "f1", "value": 26.729999999999997, "stderr": null } ], "average_accuracy": 48.78333333333334, "best_prompt": 72.58, "prompt_id": "prompt-3", "CPS": 55.308379333333335 }, "summarization-fanpage": { "prompts": [ { "prompt": "prompt-1", "metric": "rouge1", "value": 24.95, "stderr": 0.0094 }, { "prompt": "prompt-2", "metric": "rouge1", "value": 28.08, "stderr": 0.01 } ], "average_accuracy": 26.515, "best_prompt": 28.08, "prompt_id": "prompt-2", "CPS": 27.640548 }, "text-entailment": { "prompts": [ { "prompt": "prompt-1", "metric": "acc", "value": 55.00000000000001, "stderr": 0.0249 }, { "prompt": "prompt-2", "metric": "acc", "value": 55.00000000000001, "stderr": 0.0249 }, { "prompt": "prompt-3", "metric": "acc", "value": 78.5, "stderr": 0.0206 }, { "prompt": "prompt-4", "metric": "acc", "value": 77, "stderr": 0.0211 }, { "prompt": "prompt-5", "metric": "acc", "value": 55.00000000000001, "stderr": 0.0249 }, { "prompt": "prompt-6", "metric": "acc", "value": 55.00000000000001, "stderr": 0.0249 } ], "average_accuracy": 62.583333333333336, "best_prompt": 78.5, "prompt_id": "prompt-3", "CPS": 66.00541666666666 }, "word-in-context": { "prompts": [ { "prompt": "prompt-1", "metric": "f1", "value": 66.67, "stderr": 0.0199 }, { "prompt": "prompt-2", "metric": "f1", "value": 66.67, "stderr": 0.0199 }, { "prompt": "prompt-3", "metric": "f1", "value": 0.79, "stderr": 0.0079 }, { "prompt": "prompt-4", "metric": "f1", "value": 14.02, "stderr": 0.0287 }, { "prompt": "prompt-5", "metric": "f1", "value": 66.67, "stderr": 0.0199 }, { "prompt": "prompt-6", "metric": "f1", "value": 66.67, "stderr": 0.0199 } ], "average_accuracy": 46.915, "best_prompt": 66.67, "prompt_id": "prompt-1", "CPS": 53.4993415 } }
50.061157
{ "model_name": "microsoft/Phi-3.5-mini-instruct", "num_fewshot": "5", "batch_size": 1, "model": "microsoft/Phi-3.5-mini-instruct", "base_model": "Phi3ForCausalLM", "revision": "3145e03a9fd4cdd7cd953c34d9bbf7ad606122ca", "submitted_time": "2024-08-16T20:48:26", "num_params_billion": 3.821079552, "language": "multilingual" }
{ "admission-test": { "prompts": [ { "prompt": "prompt-1", "metric": "acc", "value": 36.6, "stderr": 0.0216 }, { "prompt": "prompt-2", "metric": "acc", "value": 37, "stderr": 0.0216 }, { "prompt": "prompt-3", "metric": "acc", "value": 66.4, "stderr": 0.0211 }, { "prompt": "prompt-4", "metric": "acc", "value": 65.2, "stderr": 0.0213 }, { "prompt": "prompt-5", "metric": "acc", "value": 36.8, "stderr": 0.0216 }, { "prompt": "prompt-6", "metric": "acc", "value": 37, "stderr": 0.0216 } ], "average_accuracy": 46.5, "best_prompt": 66.4, "prompt_id": "prompt-3", "CPS": 53.1864 }, "faq": { "prompts": [ { "prompt": "prompt-1", "metric": "acc", "value": 28.43, "stderr": 0.0226 }, { "prompt": "prompt-2", "metric": "acc", "value": 28.43, "stderr": 0.0226 }, { "prompt": "prompt-3", "metric": "acc", "value": 84.78999999999999, "stderr": 0.018 }, { "prompt": "prompt-4", "metric": "acc", "value": 88.03, "stderr": 0.0162 }, { "prompt": "prompt-5", "metric": "acc", "value": 28.93, "stderr": 0.0227 }, { "prompt": "prompt-6", "metric": "acc", "value": 28.93, "stderr": 0.0227 } ], "average_accuracy": 47.923333333333325, "best_prompt": 88.03, "prompt_id": "prompt-4", "CPS": 52.72410133333333 }, "hate-speech-detection": { "prompts": [ { "prompt": "prompt-1", "metric": "f1", "value": 63.92, "stderr": 0.014 }, { "prompt": "prompt-2", "metric": "f1", "value": 67.52, "stderr": 0.0128 }, { "prompt": "prompt-3", "metric": "f1", "value": 71.38, "stderr": 0.0118 }, { "prompt": "prompt-4", "metric": "f1", "value": 71.39, "stderr": 0.0114 }, { "prompt": "prompt-5", "metric": "f1", "value": 53.97, "stderr": 0.0161 }, { "prompt": "prompt-6", "metric": "f1", "value": 63.68000000000001, "stderr": 0.0135 } ], "average_accuracy": 65.30999999999999, "best_prompt": 71.39, "prompt_id": "prompt-4", "CPS": 67.049488 }, "lexical-substitution": { "prompts": [ { "prompt": "prompt-1", "metric": "f1", "value": 23.580000000000002, "stderr": null }, { "prompt": "prompt-2", "metric": "f1", "value": 24.14, "stderr": null } ], "average_accuracy": 23.86, "best_prompt": 24.14, "prompt_id": "prompt-2", "CPS": 24.072408 }, "evalita NER": { "prompts": [ { "prompt": "prompt-1", "metric": "acc", "value": 32.213990547746, "stderr": null }, { "prompt": "prompt-2", "metric": "acc", "value": 30.785665293262237, "stderr": null } ], "average_accuracy": 31.499827920504117, "best_prompt": 32.213990547746, "prompt_id": "prompt-1", "CPS": 31.983930266510765 }, "relation-extraction": { "prompts": [ { "prompt": "prompt-1", "metric": "f1", "value": 36, "stderr": null }, { "prompt": "prompt-2", "metric": "f1", "value": 36.3, "stderr": null } ], "average_accuracy": 36.15, "best_prompt": 36.3, "prompt_id": "prompt-2", "CPS": 36.24555 }, "sentiment-analysis": { "prompts": [ { "prompt": "prompt-1", "metric": "f1", "value": 72.8, "stderr": null }, { "prompt": "prompt-2", "metric": "f1", "value": 72.26, "stderr": null }, { "prompt": "prompt-3", "metric": "f1", "value": 70.06, "stderr": null }, { "prompt": "prompt-4", "metric": "f1", "value": 69.53, "stderr": null }, { "prompt": "prompt-5", "metric": "f1", "value": 74.57000000000001, "stderr": null }, { "prompt": "prompt-6", "metric": "f1", "value": 74.03999999999999, "stderr": null } ], "average_accuracy": 72.21, "best_prompt": 74.57000000000001, "prompt_id": "prompt-5", "CPS": 72.810148 }, "summarization-fanpage": { "prompts": [ { "prompt": "prompt-1", "metric": "rouge1", "value": 19.89, "stderr": 0.0071 }, { "prompt": "prompt-2", "metric": "rouge1", "value": 19.53, "stderr": 0.007 } ], "average_accuracy": 19.71, "best_prompt": 19.89, "prompt_id": "prompt-1", "CPS": 19.854198 }, "text-entailment": { "prompts": [ { "prompt": "prompt-1", "metric": "acc", "value": 78.5, "stderr": 0.0206 }, { "prompt": "prompt-2", "metric": "acc", "value": 79, "stderr": 0.0204 }, { "prompt": "prompt-3", "metric": "acc", "value": 77.5, "stderr": 0.0209 }, { "prompt": "prompt-4", "metric": "acc", "value": 78.5, "stderr": 0.0206 }, { "prompt": "prompt-5", "metric": "acc", "value": 73.75, "stderr": 0.022 }, { "prompt": "prompt-6", "metric": "acc", "value": 74.25, "stderr": 0.0219 } ], "average_accuracy": 76.91666666666667, "best_prompt": 79, "prompt_id": "prompt-2", "CPS": 77.35416666666667 }, "word-in-context": { "prompts": [ { "prompt": "prompt-1", "metric": "f1", "value": 61.480000000000004, "stderr": 0.0243 }, { "prompt": "prompt-2", "metric": "f1", "value": 64.62, "stderr": 0.023 }, { "prompt": "prompt-3", "metric": "f1", "value": 62.64999999999999, "stderr": 0.0254 }, { "prompt": "prompt-4", "metric": "f1", "value": 67.63, "stderr": 0.023 }, { "prompt": "prompt-5", "metric": "f1", "value": 59.209999999999994, "stderr": 0.0266 }, { "prompt": "prompt-6", "metric": "f1", "value": 68.25, "stderr": 0.0214 } ], "average_accuracy": 63.97333333333333, "best_prompt": 68.25, "prompt_id": "prompt-6", "CPS": 65.331175 } }
44.403315
{ "model_name": "microsoft/Phi-3.5-mini-instruct", "num_fewshot": "None", "batch_size": 1, "model": "microsoft/Phi-3.5-mini-instruct", "base_model": "Phi3ForCausalLM", "revision": "3145e03a9fd4cdd7cd953c34d9bbf7ad606122ca", "submitted_time": "2024-08-16T20:48:26", "num_params_billion": 3.821079552, "language": "multilingual" }
{ "admission-test": { "prompts": [ { "prompt": "prompt-1", "metric": "acc", "value": 24.2, "stderr": 0.0192 }, { "prompt": "prompt-2", "metric": "acc", "value": 24, "stderr": 0.0191 }, { "prompt": "prompt-3", "metric": "acc", "value": 62.8, "stderr": 0.0216 }, { "prompt": "prompt-4", "metric": "acc", "value": 62.2, "stderr": 0.0217 }, { "prompt": "prompt-5", "metric": "acc", "value": 35, "stderr": 0.0214 }, { "prompt": "prompt-6", "metric": "acc", "value": 35.8, "stderr": 0.0215 } ], "average_accuracy": 40.666666666666664, "best_prompt": 62.8, "prompt_id": "prompt-3", "CPS": 48.90026666666666 }, "faq": { "prompts": [ { "prompt": "prompt-1", "metric": "acc", "value": 27.68, "stderr": 0.0224 }, { "prompt": "prompt-2", "metric": "acc", "value": 26.68, "stderr": 0.0221 }, { "prompt": "prompt-3", "metric": "acc", "value": 79.05, "stderr": 0.0203 }, { "prompt": "prompt-4", "metric": "acc", "value": 74.56, "stderr": 0.0218 }, { "prompt": "prompt-5", "metric": "acc", "value": 28.43, "stderr": 0.0226 }, { "prompt": "prompt-6", "metric": "acc", "value": 28.93, "stderr": 0.0227 } ], "average_accuracy": 44.221666666666664, "best_prompt": 79.05, "prompt_id": "prompt-3", "CPS": 51.5182025 }, "hate-speech-detection": { "prompts": [ { "prompt": "prompt-1", "metric": "f1", "value": 62.33, "stderr": 0.0113 }, { "prompt": "prompt-2", "metric": "f1", "value": 62.63999999999999, "stderr": 0.0112 }, { "prompt": "prompt-3", "metric": "f1", "value": 68.13, "stderr": 0.0127 }, { "prompt": "prompt-4", "metric": "f1", "value": 69.17, "stderr": 0.0122 }, { "prompt": "prompt-5", "metric": "f1", "value": 62.2, "stderr": 0.0114 }, { "prompt": "prompt-6", "metric": "f1", "value": 62.46000000000001, "stderr": 0.0119 } ], "average_accuracy": 64.48833333333333, "best_prompt": 69.17, "prompt_id": "prompt-4", "CPS": 65.93169116666667 }, "lexical-substitution": { "prompts": [ { "prompt": "prompt-1", "metric": "f1", "value": 15.629999999999999, "stderr": null }, { "prompt": "prompt-2", "metric": "f1", "value": 20.94, "stderr": null } ], "average_accuracy": 18.285, "best_prompt": 20.94, "prompt_id": "prompt-2", "CPS": 20.384043000000002 }, "evalita NER": { "prompts": [ { "prompt": "prompt-1", "metric": "acc", "value": 30.015710130877363, "stderr": null }, { "prompt": "prompt-2", "metric": "acc", "value": 30.67641420261755, "stderr": null } ], "average_accuracy": 30.34606216674746, "best_prompt": 30.67641420261755, "prompt_id": "prompt-2", "CPS": 30.57507404376726 }, "relation-extraction": { "prompts": [ { "prompt": "prompt-1", "metric": "f1", "value": 20.61, "stderr": null }, { "prompt": "prompt-2", "metric": "f1", "value": 8.19, "stderr": null } ], "average_accuracy": 14.399999999999999, "best_prompt": 20.61, "prompt_id": "prompt-1", "CPS": 19.330119 }, "sentiment-analysis": { "prompts": [ { "prompt": "prompt-1", "metric": "f1", "value": 30.94, "stderr": null }, { "prompt": "prompt-2", "metric": "f1", "value": 35.480000000000004, "stderr": null }, { "prompt": "prompt-3", "metric": "f1", "value": 70.59, "stderr": null }, { "prompt": "prompt-4", "metric": "f1", "value": 69.13, "stderr": null }, { "prompt": "prompt-5", "metric": "f1", "value": 27.37, "stderr": null }, { "prompt": "prompt-6", "metric": "f1", "value": 28.84, "stderr": null } ], "average_accuracy": 43.724999999999994, "best_prompt": 70.59, "prompt_id": "prompt-3", "CPS": 51.6259965 }, "summarization-fanpage": { "prompts": [ { "prompt": "prompt-1", "metric": "rouge1", "value": 21.990000000000002, "stderr": 0.0079 }, { "prompt": "prompt-2", "metric": "rouge1", "value": 23.400000000000002, "stderr": 0.0083 } ], "average_accuracy": 22.695, "best_prompt": 23.400000000000002, "prompt_id": "prompt-2", "CPS": 23.235030000000002 }, "text-entailment": { "prompts": [ { "prompt": "prompt-1", "metric": "acc", "value": 73.75, "stderr": 0.022 }, { "prompt": "prompt-2", "metric": "acc", "value": 71.5, "stderr": 0.0226 }, { "prompt": "prompt-3", "metric": "acc", "value": 80.75, "stderr": 0.0197 }, { "prompt": "prompt-4", "metric": "acc", "value": 81.5, "stderr": 0.0194 }, { "prompt": "prompt-5", "metric": "acc", "value": 51.74999999999999, "stderr": 0.025 }, { "prompt": "prompt-6", "metric": "acc", "value": 61, "stderr": 0.0244 } ], "average_accuracy": 70.04166666666667, "best_prompt": 81.5, "prompt_id": "prompt-4", "CPS": 72.16145833333334 }, "word-in-context": { "prompts": [ { "prompt": "prompt-1", "metric": "f1", "value": 58.29, "stderr": 0.0256 }, { "prompt": "prompt-2", "metric": "f1", "value": 66.49000000000001, "stderr": 0.02 }, { "prompt": "prompt-3", "metric": "f1", "value": 47.73, "stderr": 0.0301 }, { "prompt": "prompt-4", "metric": "f1", "value": 35.22, "stderr": 0.0335 }, { "prompt": "prompt-5", "metric": "f1", "value": 67.41, "stderr": 0.0201 }, { "prompt": "prompt-6", "metric": "f1", "value": 66.67, "stderr": 0.0199 } ], "average_accuracy": 56.968333333333334, "best_prompt": 67.41, "prompt_id": "prompt-5", "CPS": 60.3712725 } }
51.839079
{ "model_name": "microsoft/phi-4", "num_fewshot": "5", "batch_size": 1, "model": "microsoft/phi-4", "base_model": "Phi3ForCausalLM", "revision": "187ef0342fff0eb3333be9f00389385e95ef0b61", "submitted_time": "2024-12-11T11:47:29", "num_params_billion": 14.6595072, "language": "en" }
{ "admission-test": { "prompts": [ { "prompt": "prompt-1", "metric": "acc", "value": 43.8, "stderr": 0.0222 }, { "prompt": "prompt-2", "metric": "acc", "value": 43.6, "stderr": 0.0222 }, { "prompt": "prompt-3", "metric": "acc", "value": 85, "stderr": 0.016 }, { "prompt": "prompt-4", "metric": "acc", "value": 85.2, "stderr": 0.0159 }, { "prompt": "prompt-5", "metric": "acc", "value": 44.800000000000004, "stderr": 0.0223 }, { "prompt": "prompt-6", "metric": "acc", "value": 44.800000000000004, "stderr": 0.0223 } ], "average_accuracy": 57.866666666666674, "best_prompt": 85.2, "prompt_id": "prompt-4", "CPS": 61.91200000000001 }, "faq": { "prompts": [ { "prompt": "prompt-1", "metric": "acc", "value": 31.419999999999998, "stderr": 0.0232 }, { "prompt": "prompt-2", "metric": "acc", "value": 31.169999999999998, "stderr": 0.0232 }, { "prompt": "prompt-3", "metric": "acc", "value": 97.76, "stderr": 0.0074 }, { "prompt": "prompt-4", "metric": "acc", "value": 98, "stderr": 0.007 }, { "prompt": "prompt-5", "metric": "acc", "value": 30.919999999999998, "stderr": 0.0231 }, { "prompt": "prompt-6", "metric": "acc", "value": 31.169999999999998, "stderr": 0.0232 } ], "average_accuracy": 53.40666666666667, "best_prompt": 98, "prompt_id": "prompt-4", "CPS": 54.29853333333334 }, "hate-speech-detection": { "prompts": [ { "prompt": "prompt-1", "metric": "f1", "value": 63.91, "stderr": 0.0142 }, { "prompt": "prompt-2", "metric": "f1", "value": 67.4, "stderr": 0.0137 }, { "prompt": "prompt-3", "metric": "f1", "value": 73.42, "stderr": 0.0126 }, { "prompt": "prompt-4", "metric": "f1", "value": 75.89, "stderr": 0.0117 }, { "prompt": "prompt-5", "metric": "f1", "value": 59.18, "stderr": 0.0164 }, { "prompt": "prompt-6", "metric": "f1", "value": 62.68, "stderr": 0.0155 } ], "average_accuracy": 67.08, "best_prompt": 75.89, "prompt_id": "prompt-4", "CPS": 69.20409099999999 }, "lexical-substitution": { "prompts": [ { "prompt": "prompt-1", "metric": "f1", "value": 35.08, "stderr": null }, { "prompt": "prompt-2", "metric": "f1", "value": 35.11, "stderr": null } ], "average_accuracy": 35.095, "best_prompt": 35.11, "prompt_id": "prompt-2", "CPS": 35.1047335 }, "evalita NER": { "prompts": [ { "prompt": "prompt-1", "metric": "acc", "value": 27.75187227338827, "stderr": null }, { "prompt": "prompt-2", "metric": "acc", "value": 28.101054289869122, "stderr": null } ], "average_accuracy": 27.926463281628696, "best_prompt": 28.101054289869122, "prompt_id": "prompt-2", "CPS": 28.05199237585825 }, "relation-extraction": { "prompts": [ { "prompt": "prompt-1", "metric": "f1", "value": 37.34, "stderr": null }, { "prompt": "prompt-2", "metric": "f1", "value": 36.32, "stderr": null } ], "average_accuracy": 36.83, "best_prompt": 37.34, "prompt_id": "prompt-1", "CPS": 37.149566 }, "sentiment-analysis": { "prompts": [ { "prompt": "prompt-1", "metric": "f1", "value": 73.00999999999999, "stderr": null }, { "prompt": "prompt-2", "metric": "f1", "value": 74.03, "stderr": null }, { "prompt": "prompt-3", "metric": "f1", "value": 77.69, "stderr": null }, { "prompt": "prompt-4", "metric": "f1", "value": 77.95, "stderr": null }, { "prompt": "prompt-5", "metric": "f1", "value": 71.37, "stderr": null }, { "prompt": "prompt-6", "metric": "f1", "value": 72.58, "stderr": null } ], "average_accuracy": 74.43833333333333, "best_prompt": 77.95, "prompt_id": "prompt-4", "CPS": 75.21265583333333 }, "summarization-fanpage": { "prompts": [ { "prompt": "prompt-1", "metric": "rouge1", "value": 21.29, "stderr": 0.0085 }, { "prompt": "prompt-2", "metric": "rouge1", "value": 20.82, "stderr": 0.0082 } ], "average_accuracy": 21.055, "best_prompt": 21.29, "prompt_id": "prompt-1", "CPS": 21.2399685 }, "text-entailment": { "prompts": [ { "prompt": "prompt-1", "metric": "acc", "value": 76.25, "stderr": 0.0213 }, { "prompt": "prompt-2", "metric": "acc", "value": 78, "stderr": 0.0207 }, { "prompt": "prompt-3", "metric": "acc", "value": 79.25, "stderr": 0.0203 }, { "prompt": "prompt-4", "metric": "acc", "value": 79, "stderr": 0.0204 }, { "prompt": "prompt-5", "metric": "acc", "value": 75, "stderr": 0.0217 }, { "prompt": "prompt-6", "metric": "acc", "value": 69.75, "stderr": 0.023 } ], "average_accuracy": 76.20833333333333, "best_prompt": 79.25, "prompt_id": "prompt-3", "CPS": 76.83947916666666 }, "word-in-context": { "prompts": [ { "prompt": "prompt-1", "metric": "f1", "value": 52.42, "stderr": 0.0307 }, { "prompt": "prompt-2", "metric": "f1", "value": 52.66, "stderr": 0.0312 }, { "prompt": "prompt-3", "metric": "f1", "value": 59.809999999999995, "stderr": 0.0281 }, { "prompt": "prompt-4", "metric": "f1", "value": 63.160000000000004, "stderr": 0.0275 }, { "prompt": "prompt-5", "metric": "f1", "value": 58.650000000000006, "stderr": 0.0254 }, { "prompt": "prompt-6", "metric": "f1", "value": 56.330000000000005, "stderr": 0.0269 } ], "average_accuracy": 57.17166666666666, "best_prompt": 63.160000000000004, "prompt_id": "prompt-4", "CPS": 59.37776866666667 } }
38.372307
{ "model_name": "microsoft/phi-4", "num_fewshot": "None", "batch_size": 1, "model": "microsoft/phi-4", "base_model": "Phi3ForCausalLM", "revision": "187ef0342fff0eb3333be9f00389385e95ef0b61", "submitted_time": "2024-12-11T11:47:29", "num_params_billion": 14.6595072, "language": "en" }
{ "admission-test": { "prompts": [ { "prompt": "prompt-1", "metric": "acc", "value": 29.599999999999998, "stderr": 0.0204 }, { "prompt": "prompt-2", "metric": "acc", "value": 26.6, "stderr": 0.0198 }, { "prompt": "prompt-3", "metric": "acc", "value": 83.2, "stderr": 0.0167 }, { "prompt": "prompt-4", "metric": "acc", "value": 80.60000000000001, "stderr": 0.0177 }, { "prompt": "prompt-5", "metric": "acc", "value": 40, "stderr": 0.0219 }, { "prompt": "prompt-6", "metric": "acc", "value": 40.8, "stderr": 0.022 } ], "average_accuracy": 50.13333333333333, "best_prompt": 83.2, "prompt_id": "prompt-3", "CPS": 55.68853333333333 }, "faq": { "prompts": [ { "prompt": "prompt-1", "metric": "acc", "value": 31.169999999999998, "stderr": 0.0232 }, { "prompt": "prompt-2", "metric": "acc", "value": 31.169999999999998, "stderr": 0.0232 }, { "prompt": "prompt-3", "metric": "acc", "value": 88.03, "stderr": 0.0162 }, { "prompt": "prompt-4", "metric": "acc", "value": 95.26, "stderr": 0.0106 }, { "prompt": "prompt-5", "metric": "acc", "value": 31.919999999999998, "stderr": 0.0233 }, { "prompt": "prompt-6", "metric": "acc", "value": 31.169999999999998, "stderr": 0.0232 } ], "average_accuracy": 51.45333333333334, "best_prompt": 95.26, "prompt_id": "prompt-4", "CPS": 53.52976933333334 }, "hate-speech-detection": { "prompts": [ { "prompt": "prompt-1", "metric": "f1", "value": 57.720000000000006, "stderr": 0.0131 }, { "prompt": "prompt-2", "metric": "f1", "value": 61.82, "stderr": 0.0114 }, { "prompt": "prompt-3", "metric": "f1", "value": 62.69, "stderr": 0.0112 }, { "prompt": "prompt-4", "metric": "f1", "value": 63.14999999999999, "stderr": 0.0112 }, { "prompt": "prompt-5", "metric": "f1", "value": 55.14, "stderr": 0.013 }, { "prompt": "prompt-6", "metric": "f1", "value": 61.92999999999999, "stderr": 0.0113 } ], "average_accuracy": 60.40833333333333, "best_prompt": 63.14999999999999, "prompt_id": "prompt-4", "CPS": 61.418637499999996 }, "lexical-substitution": { "prompts": [ { "prompt": "prompt-1", "metric": "f1", "value": 0, "stderr": null }, { "prompt": "prompt-2", "metric": "f1", "value": 0, "stderr": null } ], "average_accuracy": 0, "best_prompt": 0, "prompt_id": "prompt-1", "CPS": 0 }, "evalita NER": { "prompts": [ { "prompt": "prompt-1", "metric": "acc", "value": 0.3643625787687833, "stderr": null }, { "prompt": "prompt-2", "metric": "acc", "value": 0, "stderr": null } ], "average_accuracy": 0.18218128938439165, "best_prompt": 0.3643625787687833, "prompt_id": "prompt-1", "CPS": 0.3636987783247481 }, "relation-extraction": { "prompts": [ { "prompt": "prompt-1", "metric": "f1", "value": 21.66, "stderr": null }, { "prompt": "prompt-2", "metric": "f1", "value": 17.11, "stderr": null } ], "average_accuracy": 19.384999999999998, "best_prompt": 21.66, "prompt_id": "prompt-1", "CPS": 21.167234999999998 }, "sentiment-analysis": { "prompts": [ { "prompt": "prompt-1", "metric": "f1", "value": 27.13, "stderr": null }, { "prompt": "prompt-2", "metric": "f1", "value": 26.640000000000004, "stderr": null }, { "prompt": "prompt-3", "metric": "f1", "value": 69.99, "stderr": null }, { "prompt": "prompt-4", "metric": "f1", "value": 74.26, "stderr": null }, { "prompt": "prompt-5", "metric": "f1", "value": 29.330000000000002, "stderr": null }, { "prompt": "prompt-6", "metric": "f1", "value": 26.52, "stderr": null } ], "average_accuracy": 42.31166666666667, "best_prompt": 74.26, "prompt_id": "prompt-4", "CPS": 50.535167666666666 }, "summarization-fanpage": { "prompts": [ { "prompt": "prompt-1", "metric": "rouge1", "value": 25.69, "stderr": 0.0085 }, { "prompt": "prompt-2", "metric": "rouge1", "value": 0.8699999999999999, "stderr": 0.0044 } ], "average_accuracy": 13.280000000000001, "best_prompt": 25.69, "prompt_id": "prompt-1", "CPS": 22.501871 }, "text-entailment": { "prompts": [ { "prompt": "prompt-1", "metric": "acc", "value": 59.5, "stderr": 0.0246 }, { "prompt": "prompt-2", "metric": "acc", "value": 56.25, "stderr": 0.0248 }, { "prompt": "prompt-3", "metric": "acc", "value": 66, "stderr": 0.0237 }, { "prompt": "prompt-4", "metric": "acc", "value": 73.25, "stderr": 0.0222 }, { "prompt": "prompt-5", "metric": "acc", "value": 52.5, "stderr": 0.025 }, { "prompt": "prompt-6", "metric": "acc", "value": 55.00000000000001, "stderr": 0.0249 } ], "average_accuracy": 60.416666666666664, "best_prompt": 73.25, "prompt_id": "prompt-4", "CPS": 63.84958333333333 }, "word-in-context": { "prompts": [ { "prompt": "prompt-1", "metric": "f1", "value": 17.03, "stderr": 0.0284 }, { "prompt": "prompt-2", "metric": "f1", "value": 10.18, "stderr": 0.025 }, { "prompt": "prompt-3", "metric": "f1", "value": 69.01, "stderr": 0.0199 }, { "prompt": "prompt-4", "metric": "f1", "value": 61.42999999999999, "stderr": 0.0272 }, { "prompt": "prompt-5", "metric": "f1", "value": 64.96, "stderr": 0.021 }, { "prompt": "prompt-6", "metric": "f1", "value": 66.75999999999999, "stderr": 0.0199 } ], "average_accuracy": 48.228333333333325, "best_prompt": 69.01, "prompt_id": "prompt-3", "CPS": 54.66857183333333 } }
49.365969
{ "model_name": "mii-llm/maestrale-chat-v0.4-beta", "num_fewshot": "5", "batch_size": 1, "model": "mii-llm/maestrale-chat-v0.4-beta", "base_model": "MistralForCausalLM", "revision": "3a0e5e58f3e7cb1f1496dd5ff593668e77fe82c8", "submitted_time": "2024-06-06T15:34:46", "num_params_billion": 7.24174848, "language": "it" }
{ "admission-test": { "prompts": [ { "prompt": "prompt-1", "metric": "acc", "value": 43.4, "stderr": 0.0222 }, { "prompt": "prompt-2", "metric": "acc", "value": 43.4, "stderr": 0.0222 }, { "prompt": "prompt-3", "metric": "acc", "value": 64.60000000000001, "stderr": 0.0214 }, { "prompt": "prompt-4", "metric": "acc", "value": 64.60000000000001, "stderr": 0.0214 }, { "prompt": "prompt-5", "metric": "acc", "value": 44.2, "stderr": 0.0222 }, { "prompt": "prompt-6", "metric": "acc", "value": 44, "stderr": 0.0222 } ], "average_accuracy": 50.699999999999996, "best_prompt": 64.60000000000001, "prompt_id": "prompt-3", "CPS": 55.620599999999996 }, "faq": { "prompts": [ { "prompt": "prompt-1", "metric": "acc", "value": 29.18, "stderr": 0.0227 }, { "prompt": "prompt-2", "metric": "acc", "value": 29.43, "stderr": 0.0228 }, { "prompt": "prompt-3", "metric": "acc", "value": 74.06, "stderr": 0.0219 }, { "prompt": "prompt-4", "metric": "acc", "value": 72.07000000000001, "stderr": 0.0224 }, { "prompt": "prompt-5", "metric": "acc", "value": 29.18, "stderr": 0.0227 }, { "prompt": "prompt-6", "metric": "acc", "value": 28.93, "stderr": 0.0227 } ], "average_accuracy": 43.80833333333334, "best_prompt": 74.06, "prompt_id": "prompt-3", "CPS": 51.65561566666667 }, "hate-speech-detection": { "prompts": [ { "prompt": "prompt-1", "metric": "f1", "value": 67.81, "stderr": 0.0121 }, { "prompt": "prompt-2", "metric": "f1", "value": 70.23, "stderr": 0.0117 }, { "prompt": "prompt-3", "metric": "f1", "value": 71.56, "stderr": 0.0116 }, { "prompt": "prompt-4", "metric": "f1", "value": 71.41999999999999, "stderr": 0.0112 }, { "prompt": "prompt-5", "metric": "f1", "value": 52.01, "stderr": 0.0159 }, { "prompt": "prompt-6", "metric": "f1", "value": 56.89999999999999, "stderr": 0.0152 } ], "average_accuracy": 64.98833333333333, "best_prompt": 71.56, "prompt_id": "prompt-3", "CPS": 66.85731533333333 }, "lexical-substitution": { "prompts": [ { "prompt": "prompt-1", "metric": "f1", "value": 22.59, "stderr": null }, { "prompt": "prompt-2", "metric": "f1", "value": 22.14, "stderr": null } ], "average_accuracy": 22.365000000000002, "best_prompt": 22.59, "prompt_id": "prompt-1", "CPS": 22.5391725 }, "evalita NER": { "prompts": [ { "prompt": "prompt-1", "metric": "acc", "value": 33.676022782355794, "stderr": null }, { "prompt": "prompt-2", "metric": "acc", "value": 32.80326345128454, "stderr": null } ], "average_accuracy": 33.23964311682017, "best_prompt": 33.676022782355794, "prompt_id": "prompt-1", "CPS": 33.529067466772446 }, "relation-extraction": { "prompts": [ { "prompt": "prompt-1", "metric": "f1", "value": 34.67, "stderr": null }, { "prompt": "prompt-2", "metric": "f1", "value": 36.059999999999995, "stderr": null } ], "average_accuracy": 35.364999999999995, "best_prompt": 36.059999999999995, "prompt_id": "prompt-2", "CPS": 35.809383 }, "sentiment-analysis": { "prompts": [ { "prompt": "prompt-1", "metric": "f1", "value": 73.39, "stderr": null }, { "prompt": "prompt-2", "metric": "f1", "value": 73.57000000000001, "stderr": null }, { "prompt": "prompt-3", "metric": "f1", "value": 68.85, "stderr": null }, { "prompt": "prompt-4", "metric": "f1", "value": 70.67, "stderr": null }, { "prompt": "prompt-5", "metric": "f1", "value": 71.94, "stderr": null }, { "prompt": "prompt-6", "metric": "f1", "value": 72.06, "stderr": null } ], "average_accuracy": 71.74666666666667, "best_prompt": 73.57000000000001, "prompt_id": "prompt-2", "CPS": 72.22857366666668 }, "summarization-fanpage": { "prompts": [ { "prompt": "prompt-1", "metric": "rouge1", "value": 21.17, "stderr": 0.0081 }, { "prompt": "prompt-2", "metric": "rouge1", "value": 21.18, "stderr": 0.0079 } ], "average_accuracy": 21.175, "best_prompt": 21.18, "prompt_id": "prompt-2", "CPS": 21.178941 }, "text-entailment": { "prompts": [ { "prompt": "prompt-1", "metric": "acc", "value": 75, "stderr": 0.0217 }, { "prompt": "prompt-2", "metric": "acc", "value": 77.25, "stderr": 0.021 }, { "prompt": "prompt-3", "metric": "acc", "value": 76, "stderr": 0.0214 }, { "prompt": "prompt-4", "metric": "acc", "value": 78.5, "stderr": 0.0206 }, { "prompt": "prompt-5", "metric": "acc", "value": 64.75, "stderr": 0.0239 }, { "prompt": "prompt-6", "metric": "acc", "value": 64.25, "stderr": 0.024 } ], "average_accuracy": 72.625, "best_prompt": 78.5, "prompt_id": "prompt-4", "CPS": 73.888125 }, "word-in-context": { "prompts": [ { "prompt": "prompt-1", "metric": "f1", "value": 56.96, "stderr": 0.0275 }, { "prompt": "prompt-2", "metric": "f1", "value": 56.64, "stderr": 0.028 }, { "prompt": "prompt-3", "metric": "f1", "value": 60.23, "stderr": 0.0234 }, { "prompt": "prompt-4", "metric": "f1", "value": 61.42999999999999, "stderr": 0.0237 }, { "prompt": "prompt-5", "metric": "f1", "value": 59.870000000000005, "stderr": 0.0238 }, { "prompt": "prompt-6", "metric": "f1", "value": 61.760000000000005, "stderr": 0.0232 } ], "average_accuracy": 59.48166666666666, "best_prompt": 61.760000000000005, "prompt_id": "prompt-6", "CPS": 60.352901333333335 } }
41.042316
{ "model_name": "mii-llm/maestrale-chat-v0.4-beta", "num_fewshot": "None", "batch_size": 1, "model": "mii-llm/maestrale-chat-v0.4-beta", "base_model": "MistralForCausalLM", "revision": "3a0e5e58f3e7cb1f1496dd5ff593668e77fe82c8", "submitted_time": "2024-06-06T15:34:46", "num_params_billion": 7.24174848, "language": "it" }
{ "admission-test": { "prompts": [ { "prompt": "prompt-1", "metric": "acc", "value": 31.4, "stderr": 0.0208 }, { "prompt": "prompt-2", "metric": "acc", "value": 30, "stderr": 0.0205 }, { "prompt": "prompt-3", "metric": "acc", "value": 63.800000000000004, "stderr": 0.0215 }, { "prompt": "prompt-4", "metric": "acc", "value": 63.800000000000004, "stderr": 0.0215 }, { "prompt": "prompt-5", "metric": "acc", "value": 40, "stderr": 0.0219 }, { "prompt": "prompt-6", "metric": "acc", "value": 42, "stderr": 0.0221 } ], "average_accuracy": 45.166666666666664, "best_prompt": 63.800000000000004, "prompt_id": "prompt-3", "CPS": 51.91193333333333 }, "faq": { "prompts": [ { "prompt": "prompt-1", "metric": "acc", "value": 30.669999999999998, "stderr": 0.0231 }, { "prompt": "prompt-2", "metric": "acc", "value": 30.17, "stderr": 0.023 }, { "prompt": "prompt-3", "metric": "acc", "value": 71.57, "stderr": 0.0226 }, { "prompt": "prompt-4", "metric": "acc", "value": 83.04, "stderr": 0.0188 }, { "prompt": "prompt-5", "metric": "acc", "value": 30.42, "stderr": 0.023 }, { "prompt": "prompt-6", "metric": "acc", "value": 30.669999999999998, "stderr": 0.0231 } ], "average_accuracy": 46.09, "best_prompt": 83.04, "prompt_id": "prompt-4", "CPS": 52.35672 }, "hate-speech-detection": { "prompts": [ { "prompt": "prompt-1", "metric": "f1", "value": 62.339999999999996, "stderr": 0.0113 }, { "prompt": "prompt-2", "metric": "f1", "value": 62.5, "stderr": 0.0112 }, { "prompt": "prompt-3", "metric": "f1", "value": 62.419999999999995, "stderr": 0.0125 }, { "prompt": "prompt-4", "metric": "f1", "value": 59.260000000000005, "stderr": 0.0129 }, { "prompt": "prompt-5", "metric": "f1", "value": 54.15, "stderr": 0.0144 }, { "prompt": "prompt-6", "metric": "f1", "value": 63.07000000000001, "stderr": 0.0115 } ], "average_accuracy": 60.62333333333333, "best_prompt": 63.07000000000001, "prompt_id": "prompt-6", "CPS": 61.526887333333335 }, "lexical-substitution": { "prompts": [ { "prompt": "prompt-1", "metric": "f1", "value": 17.75, "stderr": null }, { "prompt": "prompt-2", "metric": "f1", "value": 20.68, "stderr": null } ], "average_accuracy": 19.215, "best_prompt": 20.68, "prompt_id": "prompt-2", "CPS": 20.377038 }, "evalita NER": { "prompts": [ { "prompt": "prompt-1", "metric": "acc", "value": 5.045075133301019, "stderr": null }, { "prompt": "prompt-2", "metric": "acc", "value": 4.1902968977217645, "stderr": null } ], "average_accuracy": 4.617686015511392, "best_prompt": 5.045075133301019, "prompt_id": "prompt-1", "CPS": 5.0235130311969804 }, "relation-extraction": { "prompts": [ { "prompt": "prompt-1", "metric": "f1", "value": 19.64, "stderr": null }, { "prompt": "prompt-2", "metric": "f1", "value": 24.33, "stderr": null } ], "average_accuracy": 21.985, "best_prompt": 24.33, "prompt_id": "prompt-2", "CPS": 23.7594615 }, "sentiment-analysis": { "prompts": [ { "prompt": "prompt-1", "metric": "f1", "value": 48.91, "stderr": null }, { "prompt": "prompt-2", "metric": "f1", "value": 52.16, "stderr": null }, { "prompt": "prompt-3", "metric": "f1", "value": 66.4, "stderr": null }, { "prompt": "prompt-4", "metric": "f1", "value": 67.4, "stderr": null }, { "prompt": "prompt-5", "metric": "f1", "value": 66.17, "stderr": null }, { "prompt": "prompt-6", "metric": "f1", "value": 66.28, "stderr": null } ], "average_accuracy": 61.220000000000006, "best_prompt": 67.4, "prompt_id": "prompt-4", "CPS": 63.234680000000004 }, "summarization-fanpage": { "prompts": [ { "prompt": "prompt-1", "metric": "rouge1", "value": 24.38, "stderr": 0.0098 }, { "prompt": "prompt-2", "metric": "rouge1", "value": 25.89, "stderr": 0.0094 } ], "average_accuracy": 25.134999999999998, "best_prompt": 25.89, "prompt_id": "prompt-2", "CPS": 25.6945305 }, "text-entailment": { "prompts": [ { "prompt": "prompt-1", "metric": "acc", "value": 51, "stderr": 0.025 }, { "prompt": "prompt-2", "metric": "acc", "value": 57.25, "stderr": 0.0248 }, { "prompt": "prompt-3", "metric": "acc", "value": 53.75, "stderr": 0.025 }, { "prompt": "prompt-4", "metric": "acc", "value": 73.25, "stderr": 0.0222 }, { "prompt": "prompt-5", "metric": "acc", "value": 41.75, "stderr": 0.0247 }, { "prompt": "prompt-6", "metric": "acc", "value": 55.50000000000001, "stderr": 0.0249 } ], "average_accuracy": 55.416666666666664, "best_prompt": 73.25, "prompt_id": "prompt-4", "CPS": 60.187083333333334 }, "word-in-context": { "prompts": [ { "prompt": "prompt-1", "metric": "f1", "value": 41.870000000000005, "stderr": 0.0326 }, { "prompt": "prompt-2", "metric": "f1", "value": 15.920000000000002, "stderr": 0.0292 }, { "prompt": "prompt-3", "metric": "f1", "value": 25.729999999999997, "stderr": 0.0312 }, { "prompt": "prompt-4", "metric": "f1", "value": 0, "stderr": 0 }, { "prompt": "prompt-5", "metric": "f1", "value": 66.75999999999999, "stderr": 0.0199 }, { "prompt": "prompt-6", "metric": "f1", "value": 66.84, "stderr": 0.0199 } ], "average_accuracy": 36.18666666666667, "best_prompt": 66.84, "prompt_id": "prompt-6", "CPS": 46.351312 } }
47.313878
{ "model_name": "mistralai/Mistral-7B-Instruct-v0.3", "num_fewshot": "5", "batch_size": 1, "model": "mistralai/Mistral-7B-Instruct-v0.3", "base_model": "MistralForCausalLM", "revision": "e0bc86c23ce5aae1db576c8cca6f06f1f73af2db", "submitted_time": "2024-05-22T09:57:04", "num_params_billion": 7.248023552, "language": "" }
{ "admission-test": { "prompts": [ { "prompt": "prompt-1", "metric": "acc", "value": 35.8, "stderr": 0.0215 }, { "prompt": "prompt-2", "metric": "acc", "value": 35.6, "stderr": 0.0214 }, { "prompt": "prompt-3", "metric": "acc", "value": 57.4, "stderr": 0.0221 }, { "prompt": "prompt-4", "metric": "acc", "value": 57.199999999999996, "stderr": 0.0221 }, { "prompt": "prompt-5", "metric": "acc", "value": 36.8, "stderr": 0.0216 }, { "prompt": "prompt-6", "metric": "acc", "value": 37.2, "stderr": 0.0216 } ], "average_accuracy": 43.333333333333336, "best_prompt": 57.4, "prompt_id": "prompt-3", "CPS": 49.32573333333333 }, "faq": { "prompts": [ { "prompt": "prompt-1", "metric": "acc", "value": 28.93, "stderr": 0.0227 }, { "prompt": "prompt-2", "metric": "acc", "value": 28.93, "stderr": 0.0227 }, { "prompt": "prompt-3", "metric": "acc", "value": 96.76, "stderr": 0.0089 }, { "prompt": "prompt-4", "metric": "acc", "value": 96.26, "stderr": 0.0095 }, { "prompt": "prompt-5", "metric": "acc", "value": 28.93, "stderr": 0.0227 }, { "prompt": "prompt-6", "metric": "acc", "value": 28.93, "stderr": 0.0227 } ], "average_accuracy": 51.45666666666667, "best_prompt": 96.76, "prompt_id": "prompt-3", "CPS": 52.92449466666666 }, "hate-speech-detection": { "prompts": [ { "prompt": "prompt-1", "metric": "f1", "value": 62.88, "stderr": 0.013 }, { "prompt": "prompt-2", "metric": "f1", "value": 64.58, "stderr": 0.0123 }, { "prompt": "prompt-3", "metric": "f1", "value": 63.79, "stderr": 0.012 }, { "prompt": "prompt-4", "metric": "f1", "value": 66.03, "stderr": 0.0112 }, { "prompt": "prompt-5", "metric": "f1", "value": 60.84, "stderr": 0.0139 }, { "prompt": "prompt-6", "metric": "f1", "value": 63.59, "stderr": 0.0128 } ], "average_accuracy": 63.61833333333334, "best_prompt": 66.03, "prompt_id": "prompt-4", "CPS": 64.4375765 }, "lexical-substitution": { "prompts": [ { "prompt": "prompt-1", "metric": "f1", "value": 18.22, "stderr": null }, { "prompt": "prompt-2", "metric": "f1", "value": 18.099999999999998, "stderr": null } ], "average_accuracy": 18.159999999999997, "best_prompt": 18.22, "prompt_id": "prompt-1", "CPS": 18.209068 }, "evalita NER": { "prompts": [ { "prompt": "prompt-1", "metric": "acc", "value": 26.77644813378575, "stderr": null }, { "prompt": "prompt-2", "metric": "acc", "value": 34.52066529326224, "stderr": null } ], "average_accuracy": 30.648556713523995, "best_prompt": 34.52066529326224, "prompt_id": "prompt-2", "CPS": 33.18398765065911 }, "relation-extraction": { "prompts": [ { "prompt": "prompt-1", "metric": "f1", "value": 36.78, "stderr": null }, { "prompt": "prompt-2", "metric": "f1", "value": 36.919999999999995, "stderr": null } ], "average_accuracy": 36.849999999999994, "best_prompt": 36.919999999999995, "prompt_id": "prompt-2", "CPS": 36.894155999999995 }, "sentiment-analysis": { "prompts": [ { "prompt": "prompt-1", "metric": "f1", "value": 70.76, "stderr": null }, { "prompt": "prompt-2", "metric": "f1", "value": 69.36, "stderr": null }, { "prompt": "prompt-3", "metric": "f1", "value": 60.39, "stderr": null }, { "prompt": "prompt-4", "metric": "f1", "value": 61.86000000000001, "stderr": null }, { "prompt": "prompt-5", "metric": "f1", "value": 72.39999999999999, "stderr": null }, { "prompt": "prompt-6", "metric": "f1", "value": 72.97, "stderr": null } ], "average_accuracy": 67.95666666666666, "best_prompt": 72.97, "prompt_id": "prompt-6", "CPS": 69.31177066666666 }, "summarization-fanpage": { "prompts": [ { "prompt": "prompt-1", "metric": "rouge1", "value": 19.939999999999998, "stderr": 0.0078 }, { "prompt": "prompt-2", "metric": "rouge1", "value": 20.66, "stderr": 0.008 } ], "average_accuracy": 20.299999999999997, "best_prompt": 20.66, "prompt_id": "prompt-2", "CPS": 20.585624 }, "text-entailment": { "prompts": [ { "prompt": "prompt-1", "metric": "acc", "value": 75.75, "stderr": 0.0215 }, { "prompt": "prompt-2", "metric": "acc", "value": 77.5, "stderr": 0.0209 }, { "prompt": "prompt-3", "metric": "acc", "value": 73.75, "stderr": 0.022 }, { "prompt": "prompt-4", "metric": "acc", "value": 74, "stderr": 0.022 }, { "prompt": "prompt-5", "metric": "acc", "value": 69, "stderr": 0.0232 }, { "prompt": "prompt-6", "metric": "acc", "value": 71, "stderr": 0.0227 } ], "average_accuracy": 73.5, "best_prompt": 77.5, "prompt_id": "prompt-2", "CPS": 74.39999999999999 }, "word-in-context": { "prompts": [ { "prompt": "prompt-1", "metric": "f1", "value": 48.88, "stderr": 0.0308 }, { "prompt": "prompt-2", "metric": "f1", "value": 44.57, "stderr": 0.0325 }, { "prompt": "prompt-3", "metric": "f1", "value": 65.31, "stderr": 0.0219 }, { "prompt": "prompt-4", "metric": "f1", "value": 66.16, "stderr": 0.0214 }, { "prompt": "prompt-5", "metric": "f1", "value": 32.07, "stderr": 0.0328 }, { "prompt": "prompt-6", "metric": "f1", "value": 28.48, "stderr": 0.0328 } ], "average_accuracy": 47.57833333333334, "best_prompt": 66.16, "prompt_id": "prompt-4", "CPS": 53.86636933333334 } }
41.56062
{ "model_name": "mistralai/Mistral-7B-Instruct-v0.3", "num_fewshot": "None", "batch_size": 1, "model": "mistralai/Mistral-7B-Instruct-v0.3", "base_model": "MistralForCausalLM", "revision": "e0bc86c23ce5aae1db576c8cca6f06f1f73af2db", "submitted_time": "2024-05-22T09:57:04", "num_params_billion": 7.248023552, "language": "" }
{ "admission-test": { "prompts": [ { "prompt": "prompt-1", "metric": "acc", "value": 28.599999999999998, "stderr": 0.0202 }, { "prompt": "prompt-2", "metric": "acc", "value": 28.4, "stderr": 0.0202 }, { "prompt": "prompt-3", "metric": "acc", "value": 54, "stderr": 0.0223 }, { "prompt": "prompt-4", "metric": "acc", "value": 53, "stderr": 0.0223 }, { "prompt": "prompt-5", "metric": "acc", "value": 34.599999999999994, "stderr": 0.0213 }, { "prompt": "prompt-6", "metric": "acc", "value": 36.8, "stderr": 0.0216 } ], "average_accuracy": 39.23333333333333, "best_prompt": 54, "prompt_id": "prompt-3", "CPS": 46.025999999999996 }, "faq": { "prompts": [ { "prompt": "prompt-1", "metric": "acc", "value": 29.93, "stderr": 0.0229 }, { "prompt": "prompt-2", "metric": "acc", "value": 29.68, "stderr": 0.0228 }, { "prompt": "prompt-3", "metric": "acc", "value": 80.80000000000001, "stderr": 0.0197 }, { "prompt": "prompt-4", "metric": "acc", "value": 84.78999999999999, "stderr": 0.018 }, { "prompt": "prompt-5", "metric": "acc", "value": 30.669999999999998, "stderr": 0.0231 }, { "prompt": "prompt-6", "metric": "acc", "value": 29.93, "stderr": 0.0229 } ], "average_accuracy": 47.63333333333333, "best_prompt": 84.78999999999999, "prompt_id": "prompt-4", "CPS": 53.284862333333336 }, "hate-speech-detection": { "prompts": [ { "prompt": "prompt-1", "metric": "f1", "value": 62.29, "stderr": 0.0113 }, { "prompt": "prompt-2", "metric": "f1", "value": 61.46, "stderr": 0.0115 }, { "prompt": "prompt-3", "metric": "f1", "value": 54.94, "stderr": 0.0126 }, { "prompt": "prompt-4", "metric": "f1", "value": 27.189999999999998, "stderr": 0.0164 }, { "prompt": "prompt-5", "metric": "f1", "value": 65.39, "stderr": 0.0116 }, { "prompt": "prompt-6", "metric": "f1", "value": 63.91, "stderr": 0.0112 } ], "average_accuracy": 55.86333333333332, "best_prompt": 65.39, "prompt_id": "prompt-5", "CPS": 59.160512666666655 }, "lexical-substitution": { "prompts": [ { "prompt": "prompt-1", "metric": "f1", "value": 2.68, "stderr": null }, { "prompt": "prompt-2", "metric": "f1", "value": 10.26, "stderr": null } ], "average_accuracy": 6.47, "best_prompt": 10.26, "prompt_id": "prompt-2", "CPS": 9.871146 }, "evalita NER": { "prompts": [ { "prompt": "prompt-1", "metric": "acc", "value": 9.730236306349976, "stderr": null }, { "prompt": "prompt-2", "metric": "acc", "value": 9.542698739699468, "stderr": null } ], "average_accuracy": 9.636467523024722, "best_prompt": 9.730236306349976, "prompt_id": "prompt-1", "CPS": 9.72111238215084 }, "relation-extraction": { "prompts": [ { "prompt": "prompt-1", "metric": "f1", "value": 18.75, "stderr": null }, { "prompt": "prompt-2", "metric": "f1", "value": 21.83, "stderr": null } ], "average_accuracy": 20.29, "best_prompt": 21.83, "prompt_id": "prompt-2", "CPS": 21.493817999999997 }, "sentiment-analysis": { "prompts": [ { "prompt": "prompt-1", "metric": "f1", "value": 58.39, "stderr": null }, { "prompt": "prompt-2", "metric": "f1", "value": 63.470000000000006, "stderr": null }, { "prompt": "prompt-3", "metric": "f1", "value": 43.63, "stderr": null }, { "prompt": "prompt-4", "metric": "f1", "value": 62.32, "stderr": null }, { "prompt": "prompt-5", "metric": "f1", "value": 58.330000000000005, "stderr": null }, { "prompt": "prompt-6", "metric": "f1", "value": 66.14, "stderr": null } ], "average_accuracy": 58.71333333333333, "best_prompt": 66.14, "prompt_id": "prompt-6", "CPS": 61.22800266666666 }, "summarization-fanpage": { "prompts": [ { "prompt": "prompt-1", "metric": "rouge1", "value": 26.13, "stderr": 0.0011 }, { "prompt": "prompt-2", "metric": "rouge1", "value": 28.43, "stderr": 0.0011 } ], "average_accuracy": 27.28, "best_prompt": 28.43, "prompt_id": "prompt-2", "CPS": 28.103055 }, "text-entailment": { "prompts": [ { "prompt": "prompt-1", "metric": "acc", "value": 71.25, "stderr": 0.0227 }, { "prompt": "prompt-2", "metric": "acc", "value": 61, "stderr": 0.0244 }, { "prompt": "prompt-3", "metric": "acc", "value": 65, "stderr": 0.0239 }, { "prompt": "prompt-4", "metric": "acc", "value": 59, "stderr": 0.0246 }, { "prompt": "prompt-5", "metric": "acc", "value": 51, "stderr": 0.025 }, { "prompt": "prompt-6", "metric": "acc", "value": 57.75, "stderr": 0.0247 } ], "average_accuracy": 60.833333333333336, "best_prompt": 71.25, "prompt_id": "prompt-1", "CPS": 63.828125 }, "word-in-context": { "prompts": [ { "prompt": "prompt-1", "metric": "f1", "value": 65.41, "stderr": 0.0219 }, { "prompt": "prompt-2", "metric": "f1", "value": 66.49000000000001, "stderr": 0.0202 }, { "prompt": "prompt-3", "metric": "f1", "value": 54.87, "stderr": 0.0252 }, { "prompt": "prompt-4", "metric": "f1", "value": 65.42, "stderr": 0.021 }, { "prompt": "prompt-5", "metric": "f1", "value": 64.14, "stderr": 0.0222 }, { "prompt": "prompt-6", "metric": "f1", "value": 50.12, "stderr": 0.0297 } ], "average_accuracy": 61.074999999999996, "best_prompt": 66.49000000000001, "prompt_id": "prompt-2", "CPS": 62.8895665 } }
43.689416
{ "model_name": "occiglot/occiglot-7b-it-en-instruct", "num_fewshot": "5", "batch_size": 1, "model": "occiglot/occiglot-7b-it-en-instruct", "base_model": "MistralForCausalLM", "revision": "4204441d8d341b01de154fb929beaeba74e99b1a", "submitted_time": "2024-03-05T10:15:33", "num_params_billion": null, "language": "en_it" }
{ "admission-test": { "prompts": [ { "prompt": "prompt-1", "metric": "acc", "value": 41.8, "stderr": 0.0221 }, { "prompt": "prompt-2", "metric": "acc", "value": 43, "stderr": 0.0222 }, { "prompt": "prompt-3", "metric": "acc", "value": 61.199999999999996, "stderr": 0.0218 }, { "prompt": "prompt-4", "metric": "acc", "value": 61.8, "stderr": 0.0218 }, { "prompt": "prompt-5", "metric": "acc", "value": 44, "stderr": 0.0222 }, { "prompt": "prompt-6", "metric": "acc", "value": 44.800000000000004, "stderr": 0.0223 } ], "average_accuracy": 49.43333333333334, "best_prompt": 61.8, "prompt_id": "prompt-4", "CPS": 54.1574 }, "faq": { "prompts": [ { "prompt": "prompt-1", "metric": "acc", "value": 29.43, "stderr": 0.0228 }, { "prompt": "prompt-2", "metric": "acc", "value": 29.43, "stderr": 0.0228 }, { "prompt": "prompt-3", "metric": "acc", "value": 71.07, "stderr": 0.0227 }, { "prompt": "prompt-4", "metric": "acc", "value": 73.07000000000001, "stderr": 0.0222 }, { "prompt": "prompt-5", "metric": "acc", "value": 29.18, "stderr": 0.0227 }, { "prompt": "prompt-6", "metric": "acc", "value": 29.93, "stderr": 0.0229 } ], "average_accuracy": 43.685, "best_prompt": 73.07000000000001, "prompt_id": "prompt-4", "CPS": 51.598380500000005 }, "hate-speech-detection": { "prompts": [ { "prompt": "prompt-1", "metric": "f1", "value": 59.760000000000005, "stderr": 0.013 }, { "prompt": "prompt-2", "metric": "f1", "value": 64.71000000000001, "stderr": 0.0122 }, { "prompt": "prompt-3", "metric": "f1", "value": 67.01, "stderr": 0.0123 }, { "prompt": "prompt-4", "metric": "f1", "value": 69.78, "stderr": 0.012 }, { "prompt": "prompt-5", "metric": "f1", "value": 40.47, "stderr": 0.0173 }, { "prompt": "prompt-6", "metric": "f1", "value": 48.07, "stderr": 0.0161 } ], "average_accuracy": 58.300000000000004, "best_prompt": 69.78, "prompt_id": "prompt-4", "CPS": 61.769256 }, "lexical-substitution": { "prompts": [ { "prompt": "prompt-1", "metric": "f1", "value": 5.18, "stderr": null }, { "prompt": "prompt-2", "metric": "f1", "value": 5.609999999999999, "stderr": null } ], "average_accuracy": 5.395, "best_prompt": 5.609999999999999, "prompt_id": "prompt-2", "CPS": 5.5979385 }, "evalita NER": { "prompts": [ { "prompt": "prompt-1", "metric": "acc", "value": 24.63574406204556, "stderr": null }, { "prompt": "prompt-2", "metric": "acc", "value": 25.221215462918085, "stderr": null } ], "average_accuracy": 24.92847976248182, "best_prompt": 25.221215462918085, "prompt_id": "prompt-2", "CPS": 25.14738396117417 }, "relation-extraction": { "prompts": [ { "prompt": "prompt-1", "metric": "f1", "value": 28.71, "stderr": null }, { "prompt": "prompt-2", "metric": "f1", "value": 25.69, "stderr": null } ], "average_accuracy": 27.200000000000003, "best_prompt": 28.71, "prompt_id": "prompt-1", "CPS": 28.276479000000002 }, "sentiment-analysis": { "prompts": [ { "prompt": "prompt-1", "metric": "f1", "value": 67.47999999999999, "stderr": null }, { "prompt": "prompt-2", "metric": "f1", "value": 67.99, "stderr": null }, { "prompt": "prompt-3", "metric": "f1", "value": 69.82000000000001, "stderr": null }, { "prompt": "prompt-4", "metric": "f1", "value": 70.48, "stderr": null }, { "prompt": "prompt-5", "metric": "f1", "value": 67.51, "stderr": null }, { "prompt": "prompt-6", "metric": "f1", "value": 67.31, "stderr": null } ], "average_accuracy": 68.43166666666666, "best_prompt": 70.48, "prompt_id": "prompt-4", "CPS": 69.03633466666666 }, "summarization-fanpage": { "prompts": [ { "prompt": "prompt-1", "metric": "rouge1", "value": 19.66, "stderr": 0.0148 }, { "prompt": "prompt-2", "metric": "rouge1", "value": 21.44, "stderr": 0.0151 } ], "average_accuracy": 20.55, "best_prompt": 21.44, "prompt_id": "prompt-2", "CPS": 21.249184 }, "text-entailment": { "prompts": [ { "prompt": "prompt-1", "metric": "acc", "value": 68.5, "stderr": 0.0233 }, { "prompt": "prompt-2", "metric": "acc", "value": 70.25, "stderr": 0.0229 }, { "prompt": "prompt-3", "metric": "acc", "value": 60.25, "stderr": 0.0245 }, { "prompt": "prompt-4", "metric": "acc", "value": 62, "stderr": 0.0243 }, { "prompt": "prompt-5", "metric": "acc", "value": 55.00000000000001, "stderr": 0.0249 }, { "prompt": "prompt-6", "metric": "acc", "value": 55.00000000000001, "stderr": 0.0249 } ], "average_accuracy": 61.833333333333336, "best_prompt": 70.25, "prompt_id": "prompt-2", "CPS": 64.33729166666667 }, "word-in-context": { "prompts": [ { "prompt": "prompt-1", "metric": "f1", "value": 53.15, "stderr": 0.0287 }, { "prompt": "prompt-2", "metric": "f1", "value": 52.82, "stderr": 0.0288 }, { "prompt": "prompt-3", "metric": "f1", "value": 63.65, "stderr": 0.0215 }, { "prompt": "prompt-4", "metric": "f1", "value": 60.72, "stderr": 0.0228 }, { "prompt": "prompt-5", "metric": "f1", "value": 33.160000000000004, "stderr": 0.0313 }, { "prompt": "prompt-6", "metric": "f1", "value": 43.69, "stderr": 0.0306 } ], "average_accuracy": 51.19833333333333, "best_prompt": 63.65, "prompt_id": "prompt-3", "CPS": 55.724514166666665 } }
38.001769
{ "model_name": "occiglot/occiglot-7b-it-en-instruct", "num_fewshot": "None", "batch_size": 8, "model": "occiglot/occiglot-7b-it-en-instruct", "base_model": "MistralForCausalLM", "revision": "4204441d8d341b01de154fb929beaeba74e99b1a", "submitted_time": "2024-03-05T10:15:33", "num_params_billion": null, "language": "en_it" }
{ "admission-test": { "prompts": [ { "prompt": "prompt-1", "metric": "acc", "value": 35.8, "stderr": 0.0215 }, { "prompt": "prompt-2", "metric": "acc", "value": 33.2, "stderr": 0.0211 }, { "prompt": "prompt-3", "metric": "acc", "value": 54, "stderr": 0.0223 }, { "prompt": "prompt-4", "metric": "acc", "value": 53.6, "stderr": 0.0223 }, { "prompt": "prompt-5", "metric": "acc", "value": 40.400000000000006, "stderr": 0.022 }, { "prompt": "prompt-6", "metric": "acc", "value": 39.800000000000004, "stderr": 0.0219 } ], "average_accuracy": 42.800000000000004, "best_prompt": 54, "prompt_id": "prompt-3", "CPS": 47.952 }, "faq": { "prompts": [ { "prompt": "prompt-1", "metric": "acc", "value": 29.93, "stderr": 0.0229 }, { "prompt": "prompt-2", "metric": "acc", "value": 29.68, "stderr": 0.0228 }, { "prompt": "prompt-3", "metric": "acc", "value": 53.37, "stderr": 0.0249 }, { "prompt": "prompt-4", "metric": "acc", "value": 83.28999999999999, "stderr": 0.0187 }, { "prompt": "prompt-5", "metric": "acc", "value": 30.42, "stderr": 0.023 }, { "prompt": "prompt-6", "metric": "acc", "value": 30.669999999999998, "stderr": 0.0231 } ], "average_accuracy": 42.89333333333334, "best_prompt": 83.28999999999999, "prompt_id": "prompt-4", "CPS": 49.643616333333334 }, "hate-speech-detection": { "prompts": [ { "prompt": "prompt-1", "metric": "f1", "value": 62.72, "stderr": 0.0115 }, { "prompt": "prompt-2", "metric": "f1", "value": 62.6, "stderr": 0.0112 }, { "prompt": "prompt-3", "metric": "f1", "value": 38.82, "stderr": 0.0164 }, { "prompt": "prompt-4", "metric": "f1", "value": 9.84, "stderr": 0.0134 }, { "prompt": "prompt-5", "metric": "f1", "value": 56.84, "stderr": 0.013 }, { "prompt": "prompt-6", "metric": "f1", "value": 60.089999999999996, "stderr": 0.0119 } ], "average_accuracy": 48.48499999999999, "best_prompt": 62.72, "prompt_id": "prompt-1", "CPS": 53.791807999999996 }, "lexical-substitution": { "prompts": [ { "prompt": "prompt-1", "metric": "f1", "value": 0.22999999999999998, "stderr": null }, { "prompt": "prompt-2", "metric": "f1", "value": 2.88, "stderr": null } ], "average_accuracy": 1.555, "best_prompt": 2.88, "prompt_id": "prompt-2", "CPS": 2.84184 }, "evalita NER": { "prompts": [ { "prompt": "prompt-1", "metric": "acc", "value": 7.0875036354823076, "stderr": null }, { "prompt": "prompt-2", "metric": "acc", "value": 3.4387784779447403, "stderr": null } ], "average_accuracy": 5.263141056713524, "best_prompt": 7.0875036354823076, "prompt_id": "prompt-1", "CPS": 6.958201871387692 }, "relation-extraction": { "prompts": [ { "prompt": "prompt-1", "metric": "f1", "value": 31.6, "stderr": null }, { "prompt": "prompt-2", "metric": "f1", "value": 32.45, "stderr": null } ], "average_accuracy": 32.025000000000006, "best_prompt": 32.45, "prompt_id": "prompt-2", "CPS": 32.312087500000004 }, "sentiment-analysis": { "prompts": [ { "prompt": "prompt-1", "metric": "f1", "value": 58.75, "stderr": null }, { "prompt": "prompt-2", "metric": "f1", "value": 52.09, "stderr": null }, { "prompt": "prompt-3", "metric": "f1", "value": 51.980000000000004, "stderr": null }, { "prompt": "prompt-4", "metric": "f1", "value": 33.300000000000004, "stderr": null }, { "prompt": "prompt-5", "metric": "f1", "value": 54.03, "stderr": null }, { "prompt": "prompt-6", "metric": "f1", "value": 61.09, "stderr": null } ], "average_accuracy": 51.873333333333335, "best_prompt": 61.09, "prompt_id": "prompt-6", "CPS": 55.459538333333334 }, "summarization-fanpage": { "prompts": [ { "prompt": "prompt-1", "metric": "rouge1", "value": 22.43, "stderr": 0.0164 }, { "prompt": "prompt-2", "metric": "rouge1", "value": 28.560000000000002, "stderr": 0.0137 } ], "average_accuracy": 25.495, "best_prompt": 28.560000000000002, "prompt_id": "prompt-2", "CPS": 27.684636 }, "text-entailment": { "prompts": [ { "prompt": "prompt-1", "metric": "acc", "value": 45.75, "stderr": 0.0249 }, { "prompt": "prompt-2", "metric": "acc", "value": 55.00000000000001, "stderr": 0.0249 }, { "prompt": "prompt-3", "metric": "acc", "value": 46.25, "stderr": 0.025 }, { "prompt": "prompt-4", "metric": "acc", "value": 56.00000000000001, "stderr": 0.0249 }, { "prompt": "prompt-5", "metric": "acc", "value": 47.5, "stderr": 0.025 }, { "prompt": "prompt-6", "metric": "acc", "value": 49, "stderr": 0.025 } ], "average_accuracy": 49.916666666666664, "best_prompt": 56.00000000000001, "prompt_id": "prompt-4", "CPS": 52.593333333333334 }, "word-in-context": { "prompts": [ { "prompt": "prompt-1", "metric": "f1", "value": 5.34, "stderr": 0.0194 }, { "prompt": "prompt-2", "metric": "f1", "value": 6.79, "stderr": 0.0214 }, { "prompt": "prompt-3", "metric": "f1", "value": 54.58, "stderr": 0.0252 }, { "prompt": "prompt-4", "metric": "f1", "value": 57.58, "stderr": 0.0243 }, { "prompt": "prompt-5", "metric": "f1", "value": 66.49000000000001, "stderr": 0.02 }, { "prompt": "prompt-6", "metric": "f1", "value": 66.4, "stderr": 0.02 } ], "average_accuracy": 42.86333333333334, "best_prompt": 66.49000000000001, "prompt_id": "prompt-5", "CPS": 50.78062933333334 } }
35.056361
{ "model_name": "sapienzanlp/Minerva-7B-base-v1.0", "num_fewshot": "5", "batch_size": 1, "model": "sapienzanlp/Minerva-7B-base-v1.0", "base_model": "MistralForCausalLM", "revision": "ff16836b81e75ae299c01fd6c797115c9935907d", "submitted_time": "2024-05-26T19:34:19", "num_params_billion": null, "language": "it_en" }
{ "admission-test": { "prompts": [ { "prompt": "prompt-1", "metric": "acc", "value": 37.6, "stderr": 0.0217 }, { "prompt": "prompt-2", "metric": "acc", "value": 37.2, "stderr": 0.0216 }, { "prompt": "prompt-3", "metric": "acc", "value": 35.4, "stderr": 0.0214 }, { "prompt": "prompt-4", "metric": "acc", "value": 34.4, "stderr": 0.0213 }, { "prompt": "prompt-5", "metric": "acc", "value": 37.6, "stderr": 0.0217 }, { "prompt": "prompt-6", "metric": "acc", "value": 38.800000000000004, "stderr": 0.0218 } ], "average_accuracy": 36.833333333333336, "best_prompt": 38.800000000000004, "prompt_id": "prompt-6", "CPS": 38.03693333333334 }, "faq": { "prompts": [ { "prompt": "prompt-1", "metric": "acc", "value": 28.93, "stderr": 0.0227 }, { "prompt": "prompt-2", "metric": "acc", "value": 28.43, "stderr": 0.0226 }, { "prompt": "prompt-3", "metric": "acc", "value": 24.19, "stderr": 0.0214 }, { "prompt": "prompt-4", "metric": "acc", "value": 24.44, "stderr": 0.0215 }, { "prompt": "prompt-5", "metric": "acc", "value": 28.93, "stderr": 0.0227 }, { "prompt": "prompt-6", "metric": "acc", "value": 28.68, "stderr": 0.0226 } ], "average_accuracy": 27.266666666666666, "best_prompt": 28.93, "prompt_id": "prompt-1", "CPS": 28.448797666666664 }, "hate-speech-detection": { "prompts": [ { "prompt": "prompt-1", "metric": "f1", "value": 38.67, "stderr": 0.0175 }, { "prompt": "prompt-2", "metric": "f1", "value": 37.25, "stderr": 0.0173 }, { "prompt": "prompt-3", "metric": "f1", "value": 28.62, "stderr": 0.0181 }, { "prompt": "prompt-4", "metric": "f1", "value": 16.88, "stderr": 0.0163 }, { "prompt": "prompt-5", "metric": "f1", "value": 43.34, "stderr": 0.0171 }, { "prompt": "prompt-6", "metric": "f1", "value": 41.94, "stderr": 0.0166 } ], "average_accuracy": 34.449999999999996, "best_prompt": 43.34, "prompt_id": "prompt-5", "CPS": 39.487074 }, "lexical-substitution": { "prompts": [ { "prompt": "prompt-1", "metric": "f1", "value": 9.49, "stderr": null }, { "prompt": "prompt-2", "metric": "f1", "value": 9.53, "stderr": null } ], "average_accuracy": 9.51, "best_prompt": 9.53, "prompt_id": "prompt-2", "CPS": 9.528094 }, "evalita NER": { "prompts": [ { "prompt": "prompt-1", "metric": "acc", "value": 24.400103005332042, "stderr": null }, { "prompt": "prompt-2", "metric": "acc", "value": 24.183951769268052, "stderr": null } ], "average_accuracy": 24.292027387300045, "best_prompt": 24.400103005332042, "prompt_id": "prompt-1", "CPS": 24.373732443208585 }, "relation-extraction": { "prompts": [ { "prompt": "prompt-1", "metric": "f1", "value": 28.83, "stderr": null }, { "prompt": "prompt-2", "metric": "f1", "value": 24.45, "stderr": null } ], "average_accuracy": 26.64, "best_prompt": 28.83, "prompt_id": "prompt-1", "CPS": 28.198622999999998 }, "sentiment-analysis": { "prompts": [ { "prompt": "prompt-1", "metric": "f1", "value": 53.239999999999995, "stderr": null }, { "prompt": "prompt-2", "metric": "f1", "value": 53.42, "stderr": null }, { "prompt": "prompt-3", "metric": "f1", "value": 50.970000000000006, "stderr": null }, { "prompt": "prompt-4", "metric": "f1", "value": 51.89, "stderr": null }, { "prompt": "prompt-5", "metric": "f1", "value": 54.7, "stderr": null }, { "prompt": "prompt-6", "metric": "f1", "value": 54.620000000000005, "stderr": null } ], "average_accuracy": 53.13999999999999, "best_prompt": 54.7, "prompt_id": "prompt-5", "CPS": 53.84668 }, "summarization-fanpage": { "prompts": [ { "prompt": "prompt-1", "metric": "rouge1", "value": 15.950000000000001, "stderr": 0.0071 }, { "prompt": "prompt-2", "metric": "rouge1", "value": 15.879999999999999, "stderr": 0.0071 } ], "average_accuracy": 15.915, "best_prompt": 15.950000000000001, "prompt_id": "prompt-1", "CPS": 15.9444175 }, "text-entailment": { "prompts": [ { "prompt": "prompt-1", "metric": "acc", "value": 72.75, "stderr": 0.0223 }, { "prompt": "prompt-2", "metric": "acc", "value": 73, "stderr": 0.0222 }, { "prompt": "prompt-3", "metric": "acc", "value": 70, "stderr": 0.0229 }, { "prompt": "prompt-4", "metric": "acc", "value": 74.25, "stderr": 0.0219 }, { "prompt": "prompt-5", "metric": "acc", "value": 55.50000000000001, "stderr": 0.0249 }, { "prompt": "prompt-6", "metric": "acc", "value": 55.25, "stderr": 0.0249 } ], "average_accuracy": 66.79166666666667, "best_prompt": 74.25, "prompt_id": "prompt-4", "CPS": 68.71218750000001 }, "word-in-context": { "prompts": [ { "prompt": "prompt-1", "metric": "f1", "value": 45, "stderr": 0.0295 }, { "prompt": "prompt-2", "metric": "f1", "value": 39.69, "stderr": 0.0313 }, { "prompt": "prompt-3", "metric": "f1", "value": 12.1, "stderr": 0.0265 }, { "prompt": "prompt-4", "metric": "f1", "value": 8.7, "stderr": 0.0233 }, { "prompt": "prompt-5", "metric": "f1", "value": 56.00000000000001, "stderr": 0.0254 }, { "prompt": "prompt-6", "metric": "f1", "value": 45.800000000000004, "stderr": 0.0284 } ], "average_accuracy": 34.54833333333334, "best_prompt": 56.00000000000001, "prompt_id": "prompt-5", "CPS": 43.98706666666667 } }
End of preview. Expand in Data Studio
README.md exists but content is empty.
Downloads last month
138