leaderboard-test-results
/
42MARU
/GenAI-llama2-ko-en-instruct-v5-13B
/result_2023-11-16 17:37:54.json
{ | |
"results": { | |
"harness|ko_arc_challenge|25": { | |
"acc": 0.3822525597269625, | |
"acc_stderr": 0.014200454049979272, | |
"acc_norm": 0.43600682593856654, | |
"acc_norm_stderr": 0.014491225699230918 | |
}, | |
"harness|ko_hellaswag|10": { | |
"acc": 0.4221270663214499, | |
"acc_stderr": 0.00492889189587429, | |
"acc_norm": 0.5567616012746465, | |
"acc_norm_stderr": 0.004957524197900418 | |
}, | |
"harness|ko_mmlu_world_religions|5": { | |
"acc": 0.4619883040935672, | |
"acc_stderr": 0.03823727092882307, | |
"acc_norm": 0.4619883040935672, | |
"acc_norm_stderr": 0.03823727092882307 | |
}, | |
"harness|ko_mmlu_management|5": { | |
"acc": 0.49514563106796117, | |
"acc_stderr": 0.04950504382128921, | |
"acc_norm": 0.49514563106796117, | |
"acc_norm_stderr": 0.04950504382128921 | |
}, | |
"harness|ko_mmlu_miscellaneous|5": { | |
"acc": 0.5274584929757343, | |
"acc_stderr": 0.017852981266633944, | |
"acc_norm": 0.5274584929757343, | |
"acc_norm_stderr": 0.017852981266633944 | |
}, | |
"harness|ko_mmlu_anatomy|5": { | |
"acc": 0.4740740740740741, | |
"acc_stderr": 0.04313531696750573, | |
"acc_norm": 0.4740740740740741, | |
"acc_norm_stderr": 0.04313531696750573 | |
}, | |
"harness|ko_mmlu_abstract_algebra|5": { | |
"acc": 0.29, | |
"acc_stderr": 0.045604802157206824, | |
"acc_norm": 0.29, | |
"acc_norm_stderr": 0.045604802157206824 | |
}, | |
"harness|ko_mmlu_conceptual_physics|5": { | |
"acc": 0.40425531914893614, | |
"acc_stderr": 0.03208115750788684, | |
"acc_norm": 0.40425531914893614, | |
"acc_norm_stderr": 0.03208115750788684 | |
}, | |
"harness|ko_mmlu_virology|5": { | |
"acc": 0.39156626506024095, | |
"acc_stderr": 0.03799857454479637, | |
"acc_norm": 0.39156626506024095, | |
"acc_norm_stderr": 0.03799857454479637 | |
}, | |
"harness|ko_mmlu_philosophy|5": { | |
"acc": 0.47266881028938906, | |
"acc_stderr": 0.028355633568328174, | |
"acc_norm": 0.47266881028938906, | |
"acc_norm_stderr": 0.028355633568328174 | |
}, | |
"harness|ko_mmlu_human_aging|5": { | |
"acc": 0.47533632286995514, | |
"acc_stderr": 0.03351695167652628, | |
"acc_norm": 0.47533632286995514, | |
"acc_norm_stderr": 0.03351695167652628 | |
}, | |
"harness|ko_mmlu_human_sexuality|5": { | |
"acc": 0.4580152671755725, | |
"acc_stderr": 0.04369802690578756, | |
"acc_norm": 0.4580152671755725, | |
"acc_norm_stderr": 0.04369802690578756 | |
}, | |
"harness|ko_mmlu_medical_genetics|5": { | |
"acc": 0.42, | |
"acc_stderr": 0.049604496374885836, | |
"acc_norm": 0.42, | |
"acc_norm_stderr": 0.049604496374885836 | |
}, | |
"harness|ko_mmlu_high_school_geography|5": { | |
"acc": 0.51010101010101, | |
"acc_stderr": 0.035616254886737454, | |
"acc_norm": 0.51010101010101, | |
"acc_norm_stderr": 0.035616254886737454 | |
}, | |
"harness|ko_mmlu_electrical_engineering|5": { | |
"acc": 0.4896551724137931, | |
"acc_stderr": 0.04165774775728763, | |
"acc_norm": 0.4896551724137931, | |
"acc_norm_stderr": 0.04165774775728763 | |
}, | |
"harness|ko_mmlu_college_physics|5": { | |
"acc": 0.20588235294117646, | |
"acc_stderr": 0.04023382273617747, | |
"acc_norm": 0.20588235294117646, | |
"acc_norm_stderr": 0.04023382273617747 | |
}, | |
"harness|ko_mmlu_high_school_microeconomics|5": { | |
"acc": 0.40756302521008403, | |
"acc_stderr": 0.031918633744784645, | |
"acc_norm": 0.40756302521008403, | |
"acc_norm_stderr": 0.031918633744784645 | |
}, | |
"harness|ko_mmlu_high_school_macroeconomics|5": { | |
"acc": 0.39487179487179486, | |
"acc_stderr": 0.024784316942156367, | |
"acc_norm": 0.39487179487179486, | |
"acc_norm_stderr": 0.024784316942156367 | |
}, | |
"harness|ko_mmlu_computer_security|5": { | |
"acc": 0.53, | |
"acc_stderr": 0.05016135580465919, | |
"acc_norm": 0.53, | |
"acc_norm_stderr": 0.05016135580465919 | |
}, | |
"harness|ko_mmlu_global_facts|5": { | |
"acc": 0.33, | |
"acc_stderr": 0.047258156262526045, | |
"acc_norm": 0.33, | |
"acc_norm_stderr": 0.047258156262526045 | |
}, | |
"harness|ko_mmlu_jurisprudence|5": { | |
"acc": 0.5277777777777778, | |
"acc_stderr": 0.04826217294139894, | |
"acc_norm": 0.5277777777777778, | |
"acc_norm_stderr": 0.04826217294139894 | |
}, | |
"harness|ko_mmlu_high_school_chemistry|5": { | |
"acc": 0.39408866995073893, | |
"acc_stderr": 0.03438157967036545, | |
"acc_norm": 0.39408866995073893, | |
"acc_norm_stderr": 0.03438157967036545 | |
}, | |
"harness|ko_mmlu_high_school_biology|5": { | |
"acc": 0.44193548387096776, | |
"acc_stderr": 0.028251557906849734, | |
"acc_norm": 0.44193548387096776, | |
"acc_norm_stderr": 0.028251557906849734 | |
}, | |
"harness|ko_mmlu_marketing|5": { | |
"acc": 0.6324786324786325, | |
"acc_stderr": 0.031585391577456365, | |
"acc_norm": 0.6324786324786325, | |
"acc_norm_stderr": 0.031585391577456365 | |
}, | |
"harness|ko_mmlu_clinical_knowledge|5": { | |
"acc": 0.4188679245283019, | |
"acc_stderr": 0.030365050829115208, | |
"acc_norm": 0.4188679245283019, | |
"acc_norm_stderr": 0.030365050829115208 | |
}, | |
"harness|ko_mmlu_public_relations|5": { | |
"acc": 0.4090909090909091, | |
"acc_stderr": 0.04709306978661896, | |
"acc_norm": 0.4090909090909091, | |
"acc_norm_stderr": 0.04709306978661896 | |
}, | |
"harness|ko_mmlu_high_school_mathematics|5": { | |
"acc": 0.2740740740740741, | |
"acc_stderr": 0.027195934804085626, | |
"acc_norm": 0.2740740740740741, | |
"acc_norm_stderr": 0.027195934804085626 | |
}, | |
"harness|ko_mmlu_high_school_physics|5": { | |
"acc": 0.2913907284768212, | |
"acc_stderr": 0.03710185726119995, | |
"acc_norm": 0.2913907284768212, | |
"acc_norm_stderr": 0.03710185726119995 | |
}, | |
"harness|ko_mmlu_sociology|5": { | |
"acc": 0.5522388059701493, | |
"acc_stderr": 0.03516184772952167, | |
"acc_norm": 0.5522388059701493, | |
"acc_norm_stderr": 0.03516184772952167 | |
}, | |
"harness|ko_mmlu_college_medicine|5": { | |
"acc": 0.3815028901734104, | |
"acc_stderr": 0.03703851193099521, | |
"acc_norm": 0.3815028901734104, | |
"acc_norm_stderr": 0.03703851193099521 | |
}, | |
"harness|ko_mmlu_elementary_mathematics|5": { | |
"acc": 0.2804232804232804, | |
"acc_stderr": 0.02313528797432563, | |
"acc_norm": 0.2804232804232804, | |
"acc_norm_stderr": 0.02313528797432563 | |
}, | |
"harness|ko_mmlu_college_biology|5": { | |
"acc": 0.3472222222222222, | |
"acc_stderr": 0.039812405437178615, | |
"acc_norm": 0.3472222222222222, | |
"acc_norm_stderr": 0.039812405437178615 | |
}, | |
"harness|ko_mmlu_college_chemistry|5": { | |
"acc": 0.3, | |
"acc_stderr": 0.046056618647183814, | |
"acc_norm": 0.3, | |
"acc_norm_stderr": 0.046056618647183814 | |
}, | |
"harness|ko_mmlu_us_foreign_policy|5": { | |
"acc": 0.61, | |
"acc_stderr": 0.04902071300001975, | |
"acc_norm": 0.61, | |
"acc_norm_stderr": 0.04902071300001975 | |
}, | |
"harness|ko_mmlu_moral_disputes|5": { | |
"acc": 0.5115606936416185, | |
"acc_stderr": 0.026911898686377906, | |
"acc_norm": 0.5115606936416185, | |
"acc_norm_stderr": 0.026911898686377906 | |
}, | |
"harness|ko_mmlu_logical_fallacies|5": { | |
"acc": 0.44785276073619634, | |
"acc_stderr": 0.03906947479456601, | |
"acc_norm": 0.44785276073619634, | |
"acc_norm_stderr": 0.03906947479456601 | |
}, | |
"harness|ko_mmlu_prehistory|5": { | |
"acc": 0.43209876543209874, | |
"acc_stderr": 0.02756301097160667, | |
"acc_norm": 0.43209876543209874, | |
"acc_norm_stderr": 0.02756301097160667 | |
}, | |
"harness|ko_mmlu_college_mathematics|5": { | |
"acc": 0.3, | |
"acc_stderr": 0.046056618647183814, | |
"acc_norm": 0.3, | |
"acc_norm_stderr": 0.046056618647183814 | |
}, | |
"harness|ko_mmlu_high_school_government_and_politics|5": { | |
"acc": 0.538860103626943, | |
"acc_stderr": 0.035975244117345775, | |
"acc_norm": 0.538860103626943, | |
"acc_norm_stderr": 0.035975244117345775 | |
}, | |
"harness|ko_mmlu_econometrics|5": { | |
"acc": 0.2982456140350877, | |
"acc_stderr": 0.04303684033537316, | |
"acc_norm": 0.2982456140350877, | |
"acc_norm_stderr": 0.04303684033537316 | |
}, | |
"harness|ko_mmlu_high_school_psychology|5": { | |
"acc": 0.5009174311926605, | |
"acc_stderr": 0.021437287056051215, | |
"acc_norm": 0.5009174311926605, | |
"acc_norm_stderr": 0.021437287056051215 | |
}, | |
"harness|ko_mmlu_formal_logic|5": { | |
"acc": 0.2619047619047619, | |
"acc_stderr": 0.0393253768039287, | |
"acc_norm": 0.2619047619047619, | |
"acc_norm_stderr": 0.0393253768039287 | |
}, | |
"harness|ko_mmlu_nutrition|5": { | |
"acc": 0.434640522875817, | |
"acc_stderr": 0.028384256704883037, | |
"acc_norm": 0.434640522875817, | |
"acc_norm_stderr": 0.028384256704883037 | |
}, | |
"harness|ko_mmlu_business_ethics|5": { | |
"acc": 0.42, | |
"acc_stderr": 0.049604496374885836, | |
"acc_norm": 0.42, | |
"acc_norm_stderr": 0.049604496374885836 | |
}, | |
"harness|ko_mmlu_international_law|5": { | |
"acc": 0.5950413223140496, | |
"acc_stderr": 0.04481137755942469, | |
"acc_norm": 0.5950413223140496, | |
"acc_norm_stderr": 0.04481137755942469 | |
}, | |
"harness|ko_mmlu_astronomy|5": { | |
"acc": 0.3684210526315789, | |
"acc_stderr": 0.03925523381052932, | |
"acc_norm": 0.3684210526315789, | |
"acc_norm_stderr": 0.03925523381052932 | |
}, | |
"harness|ko_mmlu_professional_psychology|5": { | |
"acc": 0.380718954248366, | |
"acc_stderr": 0.019643801557924806, | |
"acc_norm": 0.380718954248366, | |
"acc_norm_stderr": 0.019643801557924806 | |
}, | |
"harness|ko_mmlu_professional_accounting|5": { | |
"acc": 0.31560283687943264, | |
"acc_stderr": 0.027724989449509314, | |
"acc_norm": 0.31560283687943264, | |
"acc_norm_stderr": 0.027724989449509314 | |
}, | |
"harness|ko_mmlu_machine_learning|5": { | |
"acc": 0.2767857142857143, | |
"acc_stderr": 0.042466243366976235, | |
"acc_norm": 0.2767857142857143, | |
"acc_norm_stderr": 0.042466243366976235 | |
}, | |
"harness|ko_mmlu_high_school_statistics|5": { | |
"acc": 0.26851851851851855, | |
"acc_stderr": 0.030225226160012386, | |
"acc_norm": 0.26851851851851855, | |
"acc_norm_stderr": 0.030225226160012386 | |
}, | |
"harness|ko_mmlu_moral_scenarios|5": { | |
"acc": 0.2446927374301676, | |
"acc_stderr": 0.014378169884098424, | |
"acc_norm": 0.2446927374301676, | |
"acc_norm_stderr": 0.014378169884098424 | |
}, | |
"harness|ko_mmlu_college_computer_science|5": { | |
"acc": 0.37, | |
"acc_stderr": 0.04852365870939099, | |
"acc_norm": 0.37, | |
"acc_norm_stderr": 0.04852365870939099 | |
}, | |
"harness|ko_mmlu_high_school_computer_science|5": { | |
"acc": 0.47, | |
"acc_stderr": 0.05016135580465919, | |
"acc_norm": 0.47, | |
"acc_norm_stderr": 0.05016135580465919 | |
}, | |
"harness|ko_mmlu_professional_medicine|5": { | |
"acc": 0.3382352941176471, | |
"acc_stderr": 0.028739328513983576, | |
"acc_norm": 0.3382352941176471, | |
"acc_norm_stderr": 0.028739328513983576 | |
}, | |
"harness|ko_mmlu_security_studies|5": { | |
"acc": 0.40816326530612246, | |
"acc_stderr": 0.03146465712827424, | |
"acc_norm": 0.40816326530612246, | |
"acc_norm_stderr": 0.03146465712827424 | |
}, | |
"harness|ko_mmlu_high_school_world_history|5": { | |
"acc": 0.569620253164557, | |
"acc_stderr": 0.03223017195937598, | |
"acc_norm": 0.569620253164557, | |
"acc_norm_stderr": 0.03223017195937598 | |
}, | |
"harness|ko_mmlu_professional_law|5": { | |
"acc": 0.33116036505867014, | |
"acc_stderr": 0.01202012819598575, | |
"acc_norm": 0.33116036505867014, | |
"acc_norm_stderr": 0.01202012819598575 | |
}, | |
"harness|ko_mmlu_high_school_us_history|5": { | |
"acc": 0.45098039215686275, | |
"acc_stderr": 0.03492406104163614, | |
"acc_norm": 0.45098039215686275, | |
"acc_norm_stderr": 0.03492406104163614 | |
}, | |
"harness|ko_mmlu_high_school_european_history|5": { | |
"acc": 0.5636363636363636, | |
"acc_stderr": 0.03872592983524754, | |
"acc_norm": 0.5636363636363636, | |
"acc_norm_stderr": 0.03872592983524754 | |
}, | |
"harness|ko_truthfulqa_mc|0": { | |
"mc1": 0.29253365973072215, | |
"mc1_stderr": 0.015925597445286165, | |
"mc2": 0.4591418911312825, | |
"mc2_stderr": 0.015363002653584545 | |
}, | |
"harness|ko_commongen_v2|2": { | |
"acc": 0.4911452184179457, | |
"acc_stderr": 0.017187658199336736, | |
"acc_norm": 0.5454545454545454, | |
"acc_norm_stderr": 0.017119172208061504 | |
} | |
}, | |
"versions": { | |
"all": 0, | |
"harness|ko_arc_challenge|25": 0, | |
"harness|ko_hellaswag|10": 0, | |
"harness|ko_mmlu_world_religions|5": 1, | |
"harness|ko_mmlu_management|5": 1, | |
"harness|ko_mmlu_miscellaneous|5": 1, | |
"harness|ko_mmlu_anatomy|5": 1, | |
"harness|ko_mmlu_abstract_algebra|5": 1, | |
"harness|ko_mmlu_conceptual_physics|5": 1, | |
"harness|ko_mmlu_virology|5": 1, | |
"harness|ko_mmlu_philosophy|5": 1, | |
"harness|ko_mmlu_human_aging|5": 1, | |
"harness|ko_mmlu_human_sexuality|5": 1, | |
"harness|ko_mmlu_medical_genetics|5": 1, | |
"harness|ko_mmlu_high_school_geography|5": 1, | |
"harness|ko_mmlu_electrical_engineering|5": 1, | |
"harness|ko_mmlu_college_physics|5": 1, | |
"harness|ko_mmlu_high_school_microeconomics|5": 1, | |
"harness|ko_mmlu_high_school_macroeconomics|5": 1, | |
"harness|ko_mmlu_computer_security|5": 1, | |
"harness|ko_mmlu_global_facts|5": 1, | |
"harness|ko_mmlu_jurisprudence|5": 1, | |
"harness|ko_mmlu_high_school_chemistry|5": 1, | |
"harness|ko_mmlu_high_school_biology|5": 1, | |
"harness|ko_mmlu_marketing|5": 1, | |
"harness|ko_mmlu_clinical_knowledge|5": 1, | |
"harness|ko_mmlu_public_relations|5": 1, | |
"harness|ko_mmlu_high_school_mathematics|5": 1, | |
"harness|ko_mmlu_high_school_physics|5": 1, | |
"harness|ko_mmlu_sociology|5": 1, | |
"harness|ko_mmlu_college_medicine|5": 1, | |
"harness|ko_mmlu_elementary_mathematics|5": 1, | |
"harness|ko_mmlu_college_biology|5": 1, | |
"harness|ko_mmlu_college_chemistry|5": 1, | |
"harness|ko_mmlu_us_foreign_policy|5": 1, | |
"harness|ko_mmlu_moral_disputes|5": 1, | |
"harness|ko_mmlu_logical_fallacies|5": 1, | |
"harness|ko_mmlu_prehistory|5": 1, | |
"harness|ko_mmlu_college_mathematics|5": 1, | |
"harness|ko_mmlu_high_school_government_and_politics|5": 1, | |
"harness|ko_mmlu_econometrics|5": 1, | |
"harness|ko_mmlu_high_school_psychology|5": 1, | |
"harness|ko_mmlu_formal_logic|5": 1, | |
"harness|ko_mmlu_nutrition|5": 1, | |
"harness|ko_mmlu_business_ethics|5": 1, | |
"harness|ko_mmlu_international_law|5": 1, | |
"harness|ko_mmlu_astronomy|5": 1, | |
"harness|ko_mmlu_professional_psychology|5": 1, | |
"harness|ko_mmlu_professional_accounting|5": 1, | |
"harness|ko_mmlu_machine_learning|5": 1, | |
"harness|ko_mmlu_high_school_statistics|5": 1, | |
"harness|ko_mmlu_moral_scenarios|5": 1, | |
"harness|ko_mmlu_college_computer_science|5": 1, | |
"harness|ko_mmlu_high_school_computer_science|5": 1, | |
"harness|ko_mmlu_professional_medicine|5": 1, | |
"harness|ko_mmlu_security_studies|5": 1, | |
"harness|ko_mmlu_high_school_world_history|5": 1, | |
"harness|ko_mmlu_professional_law|5": 1, | |
"harness|ko_mmlu_high_school_us_history|5": 1, | |
"harness|ko_mmlu_high_school_european_history|5": 1, | |
"harness|ko_truthfulqa_mc|0": 0, | |
"harness|ko_commongen_v2|2": 1 | |
}, | |
"config_general": { | |
"model_name": "42MARU/GenAI-llama2-ko-en-instruct-v5-13B", | |
"model_sha": "e625b2673e2a0839e7d3fc0f2a844e9966404678", | |
"model_dtype": "torch.float16", | |
"lighteval_sha": "", | |
"num_few_shot_default": 0, | |
"num_fewshot_seeds": 1, | |
"override_batch_size": 1, | |
"max_samples": null | |
} | |
} |