leaderboard-test-results
/
42MARU
/GenAI-llama2-ko-en-platypus-13B-v2
/result_2023-10-30 01:04:14.json
{ | |
"results": { | |
"harness|ko_arc_challenge|25": { | |
"acc": 0.3890784982935154, | |
"acc_stderr": 0.014247309976045607, | |
"acc_norm": 0.4496587030716723, | |
"acc_norm_stderr": 0.01453714444428474 | |
}, | |
"harness|ko_hellaswag|10": { | |
"acc": 0.42013543118900615, | |
"acc_stderr": 0.00492571700809971, | |
"acc_norm": 0.5487950607448715, | |
"acc_norm_stderr": 0.004965963647210315 | |
}, | |
"harness|ko_mmlu_world_religions|5": { | |
"acc": 0.5555555555555556, | |
"acc_stderr": 0.03811079669833531, | |
"acc_norm": 0.5555555555555556, | |
"acc_norm_stderr": 0.03811079669833531 | |
}, | |
"harness|ko_mmlu_management|5": { | |
"acc": 0.5145631067961165, | |
"acc_stderr": 0.04948637324026637, | |
"acc_norm": 0.5145631067961165, | |
"acc_norm_stderr": 0.04948637324026637 | |
}, | |
"harness|ko_mmlu_miscellaneous|5": { | |
"acc": 0.4878671775223499, | |
"acc_stderr": 0.01787469866749135, | |
"acc_norm": 0.4878671775223499, | |
"acc_norm_stderr": 0.01787469866749135 | |
}, | |
"harness|ko_mmlu_anatomy|5": { | |
"acc": 0.37037037037037035, | |
"acc_stderr": 0.04171654161354544, | |
"acc_norm": 0.37037037037037035, | |
"acc_norm_stderr": 0.04171654161354544 | |
}, | |
"harness|ko_mmlu_abstract_algebra|5": { | |
"acc": 0.24, | |
"acc_stderr": 0.04292346959909284, | |
"acc_norm": 0.24, | |
"acc_norm_stderr": 0.04292346959909284 | |
}, | |
"harness|ko_mmlu_conceptual_physics|5": { | |
"acc": 0.3276595744680851, | |
"acc_stderr": 0.030683020843231015, | |
"acc_norm": 0.3276595744680851, | |
"acc_norm_stderr": 0.030683020843231015 | |
}, | |
"harness|ko_mmlu_virology|5": { | |
"acc": 0.3795180722891566, | |
"acc_stderr": 0.03777798822748018, | |
"acc_norm": 0.3795180722891566, | |
"acc_norm_stderr": 0.03777798822748018 | |
}, | |
"harness|ko_mmlu_philosophy|5": { | |
"acc": 0.5048231511254019, | |
"acc_stderr": 0.02839677044411129, | |
"acc_norm": 0.5048231511254019, | |
"acc_norm_stderr": 0.02839677044411129 | |
}, | |
"harness|ko_mmlu_human_aging|5": { | |
"acc": 0.45739910313901344, | |
"acc_stderr": 0.033435777055830646, | |
"acc_norm": 0.45739910313901344, | |
"acc_norm_stderr": 0.033435777055830646 | |
}, | |
"harness|ko_mmlu_human_sexuality|5": { | |
"acc": 0.48091603053435117, | |
"acc_stderr": 0.04382094705550988, | |
"acc_norm": 0.48091603053435117, | |
"acc_norm_stderr": 0.04382094705550988 | |
}, | |
"harness|ko_mmlu_medical_genetics|5": { | |
"acc": 0.39, | |
"acc_stderr": 0.04902071300001975, | |
"acc_norm": 0.39, | |
"acc_norm_stderr": 0.04902071300001975 | |
}, | |
"harness|ko_mmlu_high_school_geography|5": { | |
"acc": 0.5050505050505051, | |
"acc_stderr": 0.035621707606254015, | |
"acc_norm": 0.5050505050505051, | |
"acc_norm_stderr": 0.035621707606254015 | |
}, | |
"harness|ko_mmlu_electrical_engineering|5": { | |
"acc": 0.3724137931034483, | |
"acc_stderr": 0.0402873153294756, | |
"acc_norm": 0.3724137931034483, | |
"acc_norm_stderr": 0.0402873153294756 | |
}, | |
"harness|ko_mmlu_college_physics|5": { | |
"acc": 0.22549019607843138, | |
"acc_stderr": 0.04158307533083286, | |
"acc_norm": 0.22549019607843138, | |
"acc_norm_stderr": 0.04158307533083286 | |
}, | |
"harness|ko_mmlu_high_school_microeconomics|5": { | |
"acc": 0.46638655462184875, | |
"acc_stderr": 0.03240501447690071, | |
"acc_norm": 0.46638655462184875, | |
"acc_norm_stderr": 0.03240501447690071 | |
}, | |
"harness|ko_mmlu_high_school_macroeconomics|5": { | |
"acc": 0.4307692307692308, | |
"acc_stderr": 0.025106820660539753, | |
"acc_norm": 0.4307692307692308, | |
"acc_norm_stderr": 0.025106820660539753 | |
}, | |
"harness|ko_mmlu_computer_security|5": { | |
"acc": 0.54, | |
"acc_stderr": 0.05009082659620332, | |
"acc_norm": 0.54, | |
"acc_norm_stderr": 0.05009082659620332 | |
}, | |
"harness|ko_mmlu_global_facts|5": { | |
"acc": 0.31, | |
"acc_stderr": 0.04648231987117316, | |
"acc_norm": 0.31, | |
"acc_norm_stderr": 0.04648231987117316 | |
}, | |
"harness|ko_mmlu_jurisprudence|5": { | |
"acc": 0.49074074074074076, | |
"acc_stderr": 0.04832853553437055, | |
"acc_norm": 0.49074074074074076, | |
"acc_norm_stderr": 0.04832853553437055 | |
}, | |
"harness|ko_mmlu_high_school_chemistry|5": { | |
"acc": 0.3399014778325123, | |
"acc_stderr": 0.033327690684107895, | |
"acc_norm": 0.3399014778325123, | |
"acc_norm_stderr": 0.033327690684107895 | |
}, | |
"harness|ko_mmlu_high_school_biology|5": { | |
"acc": 0.44516129032258067, | |
"acc_stderr": 0.02827241018621491, | |
"acc_norm": 0.44516129032258067, | |
"acc_norm_stderr": 0.02827241018621491 | |
}, | |
"harness|ko_mmlu_marketing|5": { | |
"acc": 0.6452991452991453, | |
"acc_stderr": 0.03134250486245402, | |
"acc_norm": 0.6452991452991453, | |
"acc_norm_stderr": 0.03134250486245402 | |
}, | |
"harness|ko_mmlu_clinical_knowledge|5": { | |
"acc": 0.39245283018867927, | |
"acc_stderr": 0.030052580579557845, | |
"acc_norm": 0.39245283018867927, | |
"acc_norm_stderr": 0.030052580579557845 | |
}, | |
"harness|ko_mmlu_public_relations|5": { | |
"acc": 0.4, | |
"acc_stderr": 0.0469237132203465, | |
"acc_norm": 0.4, | |
"acc_norm_stderr": 0.0469237132203465 | |
}, | |
"harness|ko_mmlu_high_school_mathematics|5": { | |
"acc": 0.22592592592592592, | |
"acc_stderr": 0.025497532639609553, | |
"acc_norm": 0.22592592592592592, | |
"acc_norm_stderr": 0.025497532639609553 | |
}, | |
"harness|ko_mmlu_high_school_physics|5": { | |
"acc": 0.2847682119205298, | |
"acc_stderr": 0.03684881521389023, | |
"acc_norm": 0.2847682119205298, | |
"acc_norm_stderr": 0.03684881521389023 | |
}, | |
"harness|ko_mmlu_sociology|5": { | |
"acc": 0.5970149253731343, | |
"acc_stderr": 0.034683432951111266, | |
"acc_norm": 0.5970149253731343, | |
"acc_norm_stderr": 0.034683432951111266 | |
}, | |
"harness|ko_mmlu_college_medicine|5": { | |
"acc": 0.3988439306358382, | |
"acc_stderr": 0.037336266553835096, | |
"acc_norm": 0.3988439306358382, | |
"acc_norm_stderr": 0.037336266553835096 | |
}, | |
"harness|ko_mmlu_elementary_mathematics|5": { | |
"acc": 0.31216931216931215, | |
"acc_stderr": 0.023865206836972585, | |
"acc_norm": 0.31216931216931215, | |
"acc_norm_stderr": 0.023865206836972585 | |
}, | |
"harness|ko_mmlu_college_biology|5": { | |
"acc": 0.3402777777777778, | |
"acc_stderr": 0.03962135573486219, | |
"acc_norm": 0.3402777777777778, | |
"acc_norm_stderr": 0.03962135573486219 | |
}, | |
"harness|ko_mmlu_college_chemistry|5": { | |
"acc": 0.31, | |
"acc_stderr": 0.04648231987117316, | |
"acc_norm": 0.31, | |
"acc_norm_stderr": 0.04648231987117316 | |
}, | |
"harness|ko_mmlu_us_foreign_policy|5": { | |
"acc": 0.64, | |
"acc_stderr": 0.04824181513244218, | |
"acc_norm": 0.64, | |
"acc_norm_stderr": 0.04824181513244218 | |
}, | |
"harness|ko_mmlu_moral_disputes|5": { | |
"acc": 0.5, | |
"acc_stderr": 0.026919095102908273, | |
"acc_norm": 0.5, | |
"acc_norm_stderr": 0.026919095102908273 | |
}, | |
"harness|ko_mmlu_logical_fallacies|5": { | |
"acc": 0.44171779141104295, | |
"acc_stderr": 0.03901591825836185, | |
"acc_norm": 0.44171779141104295, | |
"acc_norm_stderr": 0.03901591825836185 | |
}, | |
"harness|ko_mmlu_prehistory|5": { | |
"acc": 0.46296296296296297, | |
"acc_stderr": 0.027744313443376536, | |
"acc_norm": 0.46296296296296297, | |
"acc_norm_stderr": 0.027744313443376536 | |
}, | |
"harness|ko_mmlu_college_mathematics|5": { | |
"acc": 0.36, | |
"acc_stderr": 0.04824181513244218, | |
"acc_norm": 0.36, | |
"acc_norm_stderr": 0.04824181513244218 | |
}, | |
"harness|ko_mmlu_high_school_government_and_politics|5": { | |
"acc": 0.49740932642487046, | |
"acc_stderr": 0.03608390745384487, | |
"acc_norm": 0.49740932642487046, | |
"acc_norm_stderr": 0.03608390745384487 | |
}, | |
"harness|ko_mmlu_econometrics|5": { | |
"acc": 0.21929824561403508, | |
"acc_stderr": 0.03892431106518754, | |
"acc_norm": 0.21929824561403508, | |
"acc_norm_stderr": 0.03892431106518754 | |
}, | |
"harness|ko_mmlu_high_school_psychology|5": { | |
"acc": 0.5009174311926605, | |
"acc_stderr": 0.021437287056051215, | |
"acc_norm": 0.5009174311926605, | |
"acc_norm_stderr": 0.021437287056051215 | |
}, | |
"harness|ko_mmlu_formal_logic|5": { | |
"acc": 0.25396825396825395, | |
"acc_stderr": 0.038932596106046734, | |
"acc_norm": 0.25396825396825395, | |
"acc_norm_stderr": 0.038932596106046734 | |
}, | |
"harness|ko_mmlu_nutrition|5": { | |
"acc": 0.46078431372549017, | |
"acc_stderr": 0.028541722692618874, | |
"acc_norm": 0.46078431372549017, | |
"acc_norm_stderr": 0.028541722692618874 | |
}, | |
"harness|ko_mmlu_business_ethics|5": { | |
"acc": 0.51, | |
"acc_stderr": 0.05024183937956911, | |
"acc_norm": 0.51, | |
"acc_norm_stderr": 0.05024183937956911 | |
}, | |
"harness|ko_mmlu_international_law|5": { | |
"acc": 0.6528925619834711, | |
"acc_stderr": 0.04345724570292534, | |
"acc_norm": 0.6528925619834711, | |
"acc_norm_stderr": 0.04345724570292534 | |
}, | |
"harness|ko_mmlu_astronomy|5": { | |
"acc": 0.48026315789473684, | |
"acc_stderr": 0.040657710025626057, | |
"acc_norm": 0.48026315789473684, | |
"acc_norm_stderr": 0.040657710025626057 | |
}, | |
"harness|ko_mmlu_professional_psychology|5": { | |
"acc": 0.35784313725490197, | |
"acc_stderr": 0.019393058402355435, | |
"acc_norm": 0.35784313725490197, | |
"acc_norm_stderr": 0.019393058402355435 | |
}, | |
"harness|ko_mmlu_professional_accounting|5": { | |
"acc": 0.3191489361702128, | |
"acc_stderr": 0.027807990141320203, | |
"acc_norm": 0.3191489361702128, | |
"acc_norm_stderr": 0.027807990141320203 | |
}, | |
"harness|ko_mmlu_machine_learning|5": { | |
"acc": 0.22321428571428573, | |
"acc_stderr": 0.039523019677025116, | |
"acc_norm": 0.22321428571428573, | |
"acc_norm_stderr": 0.039523019677025116 | |
}, | |
"harness|ko_mmlu_high_school_statistics|5": { | |
"acc": 0.27314814814814814, | |
"acc_stderr": 0.030388051301678116, | |
"acc_norm": 0.27314814814814814, | |
"acc_norm_stderr": 0.030388051301678116 | |
}, | |
"harness|ko_mmlu_moral_scenarios|5": { | |
"acc": 0.2424581005586592, | |
"acc_stderr": 0.01433352205921789, | |
"acc_norm": 0.2424581005586592, | |
"acc_norm_stderr": 0.01433352205921789 | |
}, | |
"harness|ko_mmlu_college_computer_science|5": { | |
"acc": 0.36, | |
"acc_stderr": 0.048241815132442176, | |
"acc_norm": 0.36, | |
"acc_norm_stderr": 0.048241815132442176 | |
}, | |
"harness|ko_mmlu_high_school_computer_science|5": { | |
"acc": 0.44, | |
"acc_stderr": 0.04988876515698589, | |
"acc_norm": 0.44, | |
"acc_norm_stderr": 0.04988876515698589 | |
}, | |
"harness|ko_mmlu_professional_medicine|5": { | |
"acc": 0.2757352941176471, | |
"acc_stderr": 0.027146271936625166, | |
"acc_norm": 0.2757352941176471, | |
"acc_norm_stderr": 0.027146271936625166 | |
}, | |
"harness|ko_mmlu_security_studies|5": { | |
"acc": 0.4489795918367347, | |
"acc_stderr": 0.03184213866687579, | |
"acc_norm": 0.4489795918367347, | |
"acc_norm_stderr": 0.03184213866687579 | |
}, | |
"harness|ko_mmlu_high_school_world_history|5": { | |
"acc": 0.569620253164557, | |
"acc_stderr": 0.03223017195937598, | |
"acc_norm": 0.569620253164557, | |
"acc_norm_stderr": 0.03223017195937598 | |
}, | |
"harness|ko_mmlu_professional_law|5": { | |
"acc": 0.3135593220338983, | |
"acc_stderr": 0.011849234291459313, | |
"acc_norm": 0.3135593220338983, | |
"acc_norm_stderr": 0.011849234291459313 | |
}, | |
"harness|ko_mmlu_high_school_us_history|5": { | |
"acc": 0.4950980392156863, | |
"acc_stderr": 0.03509143375606785, | |
"acc_norm": 0.4950980392156863, | |
"acc_norm_stderr": 0.03509143375606785 | |
}, | |
"harness|ko_mmlu_high_school_european_history|5": { | |
"acc": 0.5515151515151515, | |
"acc_stderr": 0.03883565977956928, | |
"acc_norm": 0.5515151515151515, | |
"acc_norm_stderr": 0.03883565977956928 | |
}, | |
"harness|ko_truthfulqa_mc|0": { | |
"mc1": 0.26805385556915545, | |
"mc1_stderr": 0.015506204722834562, | |
"mc2": 0.44689474709496685, | |
"mc2_stderr": 0.015256070107718848 | |
}, | |
"harness|ko_commongen_v2|2": { | |
"acc": 0.5478158205430933, | |
"acc_stderr": 0.017111567130916796, | |
"acc_norm": 0.5962219598583235, | |
"acc_norm_stderr": 0.016869031540298632 | |
} | |
}, | |
"versions": { | |
"all": 0, | |
"harness|ko_arc_challenge|25": 0, | |
"harness|ko_hellaswag|10": 0, | |
"harness|ko_mmlu_world_religions|5": 1, | |
"harness|ko_mmlu_management|5": 1, | |
"harness|ko_mmlu_miscellaneous|5": 1, | |
"harness|ko_mmlu_anatomy|5": 1, | |
"harness|ko_mmlu_abstract_algebra|5": 1, | |
"harness|ko_mmlu_conceptual_physics|5": 1, | |
"harness|ko_mmlu_virology|5": 1, | |
"harness|ko_mmlu_philosophy|5": 1, | |
"harness|ko_mmlu_human_aging|5": 1, | |
"harness|ko_mmlu_human_sexuality|5": 1, | |
"harness|ko_mmlu_medical_genetics|5": 1, | |
"harness|ko_mmlu_high_school_geography|5": 1, | |
"harness|ko_mmlu_electrical_engineering|5": 1, | |
"harness|ko_mmlu_college_physics|5": 1, | |
"harness|ko_mmlu_high_school_microeconomics|5": 1, | |
"harness|ko_mmlu_high_school_macroeconomics|5": 1, | |
"harness|ko_mmlu_computer_security|5": 1, | |
"harness|ko_mmlu_global_facts|5": 1, | |
"harness|ko_mmlu_jurisprudence|5": 1, | |
"harness|ko_mmlu_high_school_chemistry|5": 1, | |
"harness|ko_mmlu_high_school_biology|5": 1, | |
"harness|ko_mmlu_marketing|5": 1, | |
"harness|ko_mmlu_clinical_knowledge|5": 1, | |
"harness|ko_mmlu_public_relations|5": 1, | |
"harness|ko_mmlu_high_school_mathematics|5": 1, | |
"harness|ko_mmlu_high_school_physics|5": 1, | |
"harness|ko_mmlu_sociology|5": 1, | |
"harness|ko_mmlu_college_medicine|5": 1, | |
"harness|ko_mmlu_elementary_mathematics|5": 1, | |
"harness|ko_mmlu_college_biology|5": 1, | |
"harness|ko_mmlu_college_chemistry|5": 1, | |
"harness|ko_mmlu_us_foreign_policy|5": 1, | |
"harness|ko_mmlu_moral_disputes|5": 1, | |
"harness|ko_mmlu_logical_fallacies|5": 1, | |
"harness|ko_mmlu_prehistory|5": 1, | |
"harness|ko_mmlu_college_mathematics|5": 1, | |
"harness|ko_mmlu_high_school_government_and_politics|5": 1, | |
"harness|ko_mmlu_econometrics|5": 1, | |
"harness|ko_mmlu_high_school_psychology|5": 1, | |
"harness|ko_mmlu_formal_logic|5": 1, | |
"harness|ko_mmlu_nutrition|5": 1, | |
"harness|ko_mmlu_business_ethics|5": 1, | |
"harness|ko_mmlu_international_law|5": 1, | |
"harness|ko_mmlu_astronomy|5": 1, | |
"harness|ko_mmlu_professional_psychology|5": 1, | |
"harness|ko_mmlu_professional_accounting|5": 1, | |
"harness|ko_mmlu_machine_learning|5": 1, | |
"harness|ko_mmlu_high_school_statistics|5": 1, | |
"harness|ko_mmlu_moral_scenarios|5": 1, | |
"harness|ko_mmlu_college_computer_science|5": 1, | |
"harness|ko_mmlu_high_school_computer_science|5": 1, | |
"harness|ko_mmlu_professional_medicine|5": 1, | |
"harness|ko_mmlu_security_studies|5": 1, | |
"harness|ko_mmlu_high_school_world_history|5": 1, | |
"harness|ko_mmlu_professional_law|5": 1, | |
"harness|ko_mmlu_high_school_us_history|5": 1, | |
"harness|ko_mmlu_high_school_european_history|5": 1, | |
"harness|ko_truthfulqa_mc|0": 0, | |
"harness|ko_commongen_v2|2": 1 | |
}, | |
"config_general": { | |
"model_name": "42MARU/GenAI-llama2-ko-en-platypus-13B-v2", | |
"model_sha": "1b4eb6319be99c113d17778ce2737acffe2a0fee", | |
"model_dtype": "torch.float16", | |
"lighteval_sha": "", | |
"num_few_shot_default": 0, | |
"num_fewshot_seeds": 1, | |
"override_batch_size": 1, | |
"max_samples": null | |
} | |
} |