leaderboard-test-results
/
42MARU
/GenAI-llama2-ko-en-instruct-v3-13B
/result_2023-11-01 18:54:40.json
{ | |
"results": { | |
"harness|ko_arc_challenge|25": { | |
"acc": 0.3967576791808874, | |
"acc_stderr": 0.014296513020180646, | |
"acc_norm": 0.454778156996587, | |
"acc_norm_stderr": 0.014551507060836355 | |
}, | |
"harness|ko_hellaswag|10": { | |
"acc": 0.4164509061939853, | |
"acc_stderr": 0.004919626380645517, | |
"acc_norm": 0.5536745668193587, | |
"acc_norm_stderr": 0.004960947388535101 | |
}, | |
"harness|ko_mmlu_world_religions|5": { | |
"acc": 0.5380116959064327, | |
"acc_stderr": 0.038237270928823064, | |
"acc_norm": 0.5380116959064327, | |
"acc_norm_stderr": 0.038237270928823064 | |
}, | |
"harness|ko_mmlu_management|5": { | |
"acc": 0.6019417475728155, | |
"acc_stderr": 0.048467482539772386, | |
"acc_norm": 0.6019417475728155, | |
"acc_norm_stderr": 0.048467482539772386 | |
}, | |
"harness|ko_mmlu_miscellaneous|5": { | |
"acc": 0.5159642401021711, | |
"acc_stderr": 0.017870847506081738, | |
"acc_norm": 0.5159642401021711, | |
"acc_norm_stderr": 0.017870847506081738 | |
}, | |
"harness|ko_mmlu_anatomy|5": { | |
"acc": 0.42962962962962964, | |
"acc_stderr": 0.04276349494376599, | |
"acc_norm": 0.42962962962962964, | |
"acc_norm_stderr": 0.04276349494376599 | |
}, | |
"harness|ko_mmlu_abstract_algebra|5": { | |
"acc": 0.32, | |
"acc_stderr": 0.046882617226215034, | |
"acc_norm": 0.32, | |
"acc_norm_stderr": 0.046882617226215034 | |
}, | |
"harness|ko_mmlu_conceptual_physics|5": { | |
"acc": 0.37446808510638296, | |
"acc_stderr": 0.031639106653672915, | |
"acc_norm": 0.37446808510638296, | |
"acc_norm_stderr": 0.031639106653672915 | |
}, | |
"harness|ko_mmlu_virology|5": { | |
"acc": 0.45180722891566266, | |
"acc_stderr": 0.03874371556587953, | |
"acc_norm": 0.45180722891566266, | |
"acc_norm_stderr": 0.03874371556587953 | |
}, | |
"harness|ko_mmlu_philosophy|5": { | |
"acc": 0.5627009646302251, | |
"acc_stderr": 0.0281739177617629, | |
"acc_norm": 0.5627009646302251, | |
"acc_norm_stderr": 0.0281739177617629 | |
}, | |
"harness|ko_mmlu_human_aging|5": { | |
"acc": 0.3811659192825112, | |
"acc_stderr": 0.032596251184168284, | |
"acc_norm": 0.3811659192825112, | |
"acc_norm_stderr": 0.032596251184168284 | |
}, | |
"harness|ko_mmlu_human_sexuality|5": { | |
"acc": 0.46564885496183206, | |
"acc_stderr": 0.043749285605997376, | |
"acc_norm": 0.46564885496183206, | |
"acc_norm_stderr": 0.043749285605997376 | |
}, | |
"harness|ko_mmlu_medical_genetics|5": { | |
"acc": 0.42, | |
"acc_stderr": 0.049604496374885836, | |
"acc_norm": 0.42, | |
"acc_norm_stderr": 0.049604496374885836 | |
}, | |
"harness|ko_mmlu_high_school_geography|5": { | |
"acc": 0.5757575757575758, | |
"acc_stderr": 0.035212249088415866, | |
"acc_norm": 0.5757575757575758, | |
"acc_norm_stderr": 0.035212249088415866 | |
}, | |
"harness|ko_mmlu_electrical_engineering|5": { | |
"acc": 0.45517241379310347, | |
"acc_stderr": 0.04149886942192117, | |
"acc_norm": 0.45517241379310347, | |
"acc_norm_stderr": 0.04149886942192117 | |
}, | |
"harness|ko_mmlu_college_physics|5": { | |
"acc": 0.18627450980392157, | |
"acc_stderr": 0.038739587141493524, | |
"acc_norm": 0.18627450980392157, | |
"acc_norm_stderr": 0.038739587141493524 | |
}, | |
"harness|ko_mmlu_high_school_microeconomics|5": { | |
"acc": 0.4495798319327731, | |
"acc_stderr": 0.03231293497137707, | |
"acc_norm": 0.4495798319327731, | |
"acc_norm_stderr": 0.03231293497137707 | |
}, | |
"harness|ko_mmlu_high_school_macroeconomics|5": { | |
"acc": 0.4641025641025641, | |
"acc_stderr": 0.025285585990017834, | |
"acc_norm": 0.4641025641025641, | |
"acc_norm_stderr": 0.025285585990017834 | |
}, | |
"harness|ko_mmlu_computer_security|5": { | |
"acc": 0.46, | |
"acc_stderr": 0.05009082659620333, | |
"acc_norm": 0.46, | |
"acc_norm_stderr": 0.05009082659620333 | |
}, | |
"harness|ko_mmlu_global_facts|5": { | |
"acc": 0.35, | |
"acc_stderr": 0.04793724854411019, | |
"acc_norm": 0.35, | |
"acc_norm_stderr": 0.04793724854411019 | |
}, | |
"harness|ko_mmlu_jurisprudence|5": { | |
"acc": 0.5277777777777778, | |
"acc_stderr": 0.04826217294139894, | |
"acc_norm": 0.5277777777777778, | |
"acc_norm_stderr": 0.04826217294139894 | |
}, | |
"harness|ko_mmlu_high_school_chemistry|5": { | |
"acc": 0.35467980295566504, | |
"acc_stderr": 0.03366124489051449, | |
"acc_norm": 0.35467980295566504, | |
"acc_norm_stderr": 0.03366124489051449 | |
}, | |
"harness|ko_mmlu_high_school_biology|5": { | |
"acc": 0.5096774193548387, | |
"acc_stderr": 0.02843867799890955, | |
"acc_norm": 0.5096774193548387, | |
"acc_norm_stderr": 0.02843867799890955 | |
}, | |
"harness|ko_mmlu_marketing|5": { | |
"acc": 0.6837606837606838, | |
"acc_stderr": 0.030463656747340254, | |
"acc_norm": 0.6837606837606838, | |
"acc_norm_stderr": 0.030463656747340254 | |
}, | |
"harness|ko_mmlu_clinical_knowledge|5": { | |
"acc": 0.4641509433962264, | |
"acc_stderr": 0.030693675018458003, | |
"acc_norm": 0.4641509433962264, | |
"acc_norm_stderr": 0.030693675018458003 | |
}, | |
"harness|ko_mmlu_public_relations|5": { | |
"acc": 0.44545454545454544, | |
"acc_stderr": 0.047605488214603246, | |
"acc_norm": 0.44545454545454544, | |
"acc_norm_stderr": 0.047605488214603246 | |
}, | |
"harness|ko_mmlu_high_school_mathematics|5": { | |
"acc": 0.2777777777777778, | |
"acc_stderr": 0.02730914058823017, | |
"acc_norm": 0.2777777777777778, | |
"acc_norm_stderr": 0.02730914058823017 | |
}, | |
"harness|ko_mmlu_high_school_physics|5": { | |
"acc": 0.304635761589404, | |
"acc_stderr": 0.03757949922943343, | |
"acc_norm": 0.304635761589404, | |
"acc_norm_stderr": 0.03757949922943343 | |
}, | |
"harness|ko_mmlu_sociology|5": { | |
"acc": 0.5920398009950248, | |
"acc_stderr": 0.03475116365194092, | |
"acc_norm": 0.5920398009950248, | |
"acc_norm_stderr": 0.03475116365194092 | |
}, | |
"harness|ko_mmlu_college_medicine|5": { | |
"acc": 0.3988439306358382, | |
"acc_stderr": 0.03733626655383509, | |
"acc_norm": 0.3988439306358382, | |
"acc_norm_stderr": 0.03733626655383509 | |
}, | |
"harness|ko_mmlu_elementary_mathematics|5": { | |
"acc": 0.2857142857142857, | |
"acc_stderr": 0.02326651221373056, | |
"acc_norm": 0.2857142857142857, | |
"acc_norm_stderr": 0.02326651221373056 | |
}, | |
"harness|ko_mmlu_college_biology|5": { | |
"acc": 0.4027777777777778, | |
"acc_stderr": 0.04101405519842424, | |
"acc_norm": 0.4027777777777778, | |
"acc_norm_stderr": 0.04101405519842424 | |
}, | |
"harness|ko_mmlu_college_chemistry|5": { | |
"acc": 0.36, | |
"acc_stderr": 0.04824181513244218, | |
"acc_norm": 0.36, | |
"acc_norm_stderr": 0.04824181513244218 | |
}, | |
"harness|ko_mmlu_us_foreign_policy|5": { | |
"acc": 0.64, | |
"acc_stderr": 0.04824181513244218, | |
"acc_norm": 0.64, | |
"acc_norm_stderr": 0.04824181513244218 | |
}, | |
"harness|ko_mmlu_moral_disputes|5": { | |
"acc": 0.5028901734104047, | |
"acc_stderr": 0.02691864538323901, | |
"acc_norm": 0.5028901734104047, | |
"acc_norm_stderr": 0.02691864538323901 | |
}, | |
"harness|ko_mmlu_logical_fallacies|5": { | |
"acc": 0.4723926380368098, | |
"acc_stderr": 0.03922378290610991, | |
"acc_norm": 0.4723926380368098, | |
"acc_norm_stderr": 0.03922378290610991 | |
}, | |
"harness|ko_mmlu_prehistory|5": { | |
"acc": 0.4783950617283951, | |
"acc_stderr": 0.02779476010500874, | |
"acc_norm": 0.4783950617283951, | |
"acc_norm_stderr": 0.02779476010500874 | |
}, | |
"harness|ko_mmlu_college_mathematics|5": { | |
"acc": 0.28, | |
"acc_stderr": 0.045126085985421296, | |
"acc_norm": 0.28, | |
"acc_norm_stderr": 0.045126085985421296 | |
}, | |
"harness|ko_mmlu_high_school_government_and_politics|5": { | |
"acc": 0.538860103626943, | |
"acc_stderr": 0.035975244117345775, | |
"acc_norm": 0.538860103626943, | |
"acc_norm_stderr": 0.035975244117345775 | |
}, | |
"harness|ko_mmlu_econometrics|5": { | |
"acc": 0.2807017543859649, | |
"acc_stderr": 0.042270544512321984, | |
"acc_norm": 0.2807017543859649, | |
"acc_norm_stderr": 0.042270544512321984 | |
}, | |
"harness|ko_mmlu_high_school_psychology|5": { | |
"acc": 0.5229357798165137, | |
"acc_stderr": 0.0214147570581755, | |
"acc_norm": 0.5229357798165137, | |
"acc_norm_stderr": 0.0214147570581755 | |
}, | |
"harness|ko_mmlu_formal_logic|5": { | |
"acc": 0.3412698412698413, | |
"acc_stderr": 0.04240799327574925, | |
"acc_norm": 0.3412698412698413, | |
"acc_norm_stderr": 0.04240799327574925 | |
}, | |
"harness|ko_mmlu_nutrition|5": { | |
"acc": 0.477124183006536, | |
"acc_stderr": 0.028599936776089775, | |
"acc_norm": 0.477124183006536, | |
"acc_norm_stderr": 0.028599936776089775 | |
}, | |
"harness|ko_mmlu_business_ethics|5": { | |
"acc": 0.45, | |
"acc_stderr": 0.05, | |
"acc_norm": 0.45, | |
"acc_norm_stderr": 0.05 | |
}, | |
"harness|ko_mmlu_international_law|5": { | |
"acc": 0.6776859504132231, | |
"acc_stderr": 0.04266416363352167, | |
"acc_norm": 0.6776859504132231, | |
"acc_norm_stderr": 0.04266416363352167 | |
}, | |
"harness|ko_mmlu_astronomy|5": { | |
"acc": 0.46710526315789475, | |
"acc_stderr": 0.040601270352363966, | |
"acc_norm": 0.46710526315789475, | |
"acc_norm_stderr": 0.040601270352363966 | |
}, | |
"harness|ko_mmlu_professional_psychology|5": { | |
"acc": 0.3660130718954248, | |
"acc_stderr": 0.019488025745529672, | |
"acc_norm": 0.3660130718954248, | |
"acc_norm_stderr": 0.019488025745529672 | |
}, | |
"harness|ko_mmlu_professional_accounting|5": { | |
"acc": 0.35106382978723405, | |
"acc_stderr": 0.028473501272963775, | |
"acc_norm": 0.35106382978723405, | |
"acc_norm_stderr": 0.028473501272963775 | |
}, | |
"harness|ko_mmlu_machine_learning|5": { | |
"acc": 0.25, | |
"acc_stderr": 0.04109974682633932, | |
"acc_norm": 0.25, | |
"acc_norm_stderr": 0.04109974682633932 | |
}, | |
"harness|ko_mmlu_high_school_statistics|5": { | |
"acc": 0.375, | |
"acc_stderr": 0.033016908987210894, | |
"acc_norm": 0.375, | |
"acc_norm_stderr": 0.033016908987210894 | |
}, | |
"harness|ko_mmlu_moral_scenarios|5": { | |
"acc": 0.2547486033519553, | |
"acc_stderr": 0.014572650383409167, | |
"acc_norm": 0.2547486033519553, | |
"acc_norm_stderr": 0.014572650383409167 | |
}, | |
"harness|ko_mmlu_college_computer_science|5": { | |
"acc": 0.45, | |
"acc_stderr": 0.05, | |
"acc_norm": 0.45, | |
"acc_norm_stderr": 0.05 | |
}, | |
"harness|ko_mmlu_high_school_computer_science|5": { | |
"acc": 0.42, | |
"acc_stderr": 0.04960449637488584, | |
"acc_norm": 0.42, | |
"acc_norm_stderr": 0.04960449637488584 | |
}, | |
"harness|ko_mmlu_professional_medicine|5": { | |
"acc": 0.45955882352941174, | |
"acc_stderr": 0.03027332507734575, | |
"acc_norm": 0.45955882352941174, | |
"acc_norm_stderr": 0.03027332507734575 | |
}, | |
"harness|ko_mmlu_security_studies|5": { | |
"acc": 0.4897959183673469, | |
"acc_stderr": 0.03200255347893782, | |
"acc_norm": 0.4897959183673469, | |
"acc_norm_stderr": 0.03200255347893782 | |
}, | |
"harness|ko_mmlu_high_school_world_history|5": { | |
"acc": 0.6033755274261603, | |
"acc_stderr": 0.03184399873811225, | |
"acc_norm": 0.6033755274261603, | |
"acc_norm_stderr": 0.03184399873811225 | |
}, | |
"harness|ko_mmlu_professional_law|5": { | |
"acc": 0.3285528031290743, | |
"acc_stderr": 0.011996027247502912, | |
"acc_norm": 0.3285528031290743, | |
"acc_norm_stderr": 0.011996027247502912 | |
}, | |
"harness|ko_mmlu_high_school_us_history|5": { | |
"acc": 0.49019607843137253, | |
"acc_stderr": 0.035086373586305716, | |
"acc_norm": 0.49019607843137253, | |
"acc_norm_stderr": 0.035086373586305716 | |
}, | |
"harness|ko_mmlu_high_school_european_history|5": { | |
"acc": 0.5575757575757576, | |
"acc_stderr": 0.03878372113711275, | |
"acc_norm": 0.5575757575757576, | |
"acc_norm_stderr": 0.03878372113711275 | |
}, | |
"harness|ko_truthfulqa_mc|0": { | |
"mc1": 0.27906976744186046, | |
"mc1_stderr": 0.01570210709062789, | |
"mc2": 0.4486611820923937, | |
"mc2_stderr": 0.015416976946375454 | |
}, | |
"harness|ko_commongen_v2|2": { | |
"acc": 0.48406139315230223, | |
"acc_stderr": 0.017181617837190192, | |
"acc_norm": 0.5619834710743802, | |
"acc_norm_stderr": 0.01705775370216029 | |
} | |
}, | |
"versions": { | |
"all": 0, | |
"harness|ko_arc_challenge|25": 0, | |
"harness|ko_hellaswag|10": 0, | |
"harness|ko_mmlu_world_religions|5": 1, | |
"harness|ko_mmlu_management|5": 1, | |
"harness|ko_mmlu_miscellaneous|5": 1, | |
"harness|ko_mmlu_anatomy|5": 1, | |
"harness|ko_mmlu_abstract_algebra|5": 1, | |
"harness|ko_mmlu_conceptual_physics|5": 1, | |
"harness|ko_mmlu_virology|5": 1, | |
"harness|ko_mmlu_philosophy|5": 1, | |
"harness|ko_mmlu_human_aging|5": 1, | |
"harness|ko_mmlu_human_sexuality|5": 1, | |
"harness|ko_mmlu_medical_genetics|5": 1, | |
"harness|ko_mmlu_high_school_geography|5": 1, | |
"harness|ko_mmlu_electrical_engineering|5": 1, | |
"harness|ko_mmlu_college_physics|5": 1, | |
"harness|ko_mmlu_high_school_microeconomics|5": 1, | |
"harness|ko_mmlu_high_school_macroeconomics|5": 1, | |
"harness|ko_mmlu_computer_security|5": 1, | |
"harness|ko_mmlu_global_facts|5": 1, | |
"harness|ko_mmlu_jurisprudence|5": 1, | |
"harness|ko_mmlu_high_school_chemistry|5": 1, | |
"harness|ko_mmlu_high_school_biology|5": 1, | |
"harness|ko_mmlu_marketing|5": 1, | |
"harness|ko_mmlu_clinical_knowledge|5": 1, | |
"harness|ko_mmlu_public_relations|5": 1, | |
"harness|ko_mmlu_high_school_mathematics|5": 1, | |
"harness|ko_mmlu_high_school_physics|5": 1, | |
"harness|ko_mmlu_sociology|5": 1, | |
"harness|ko_mmlu_college_medicine|5": 1, | |
"harness|ko_mmlu_elementary_mathematics|5": 1, | |
"harness|ko_mmlu_college_biology|5": 1, | |
"harness|ko_mmlu_college_chemistry|5": 1, | |
"harness|ko_mmlu_us_foreign_policy|5": 1, | |
"harness|ko_mmlu_moral_disputes|5": 1, | |
"harness|ko_mmlu_logical_fallacies|5": 1, | |
"harness|ko_mmlu_prehistory|5": 1, | |
"harness|ko_mmlu_college_mathematics|5": 1, | |
"harness|ko_mmlu_high_school_government_and_politics|5": 1, | |
"harness|ko_mmlu_econometrics|5": 1, | |
"harness|ko_mmlu_high_school_psychology|5": 1, | |
"harness|ko_mmlu_formal_logic|5": 1, | |
"harness|ko_mmlu_nutrition|5": 1, | |
"harness|ko_mmlu_business_ethics|5": 1, | |
"harness|ko_mmlu_international_law|5": 1, | |
"harness|ko_mmlu_astronomy|5": 1, | |
"harness|ko_mmlu_professional_psychology|5": 1, | |
"harness|ko_mmlu_professional_accounting|5": 1, | |
"harness|ko_mmlu_machine_learning|5": 1, | |
"harness|ko_mmlu_high_school_statistics|5": 1, | |
"harness|ko_mmlu_moral_scenarios|5": 1, | |
"harness|ko_mmlu_college_computer_science|5": 1, | |
"harness|ko_mmlu_high_school_computer_science|5": 1, | |
"harness|ko_mmlu_professional_medicine|5": 1, | |
"harness|ko_mmlu_security_studies|5": 1, | |
"harness|ko_mmlu_high_school_world_history|5": 1, | |
"harness|ko_mmlu_professional_law|5": 1, | |
"harness|ko_mmlu_high_school_us_history|5": 1, | |
"harness|ko_mmlu_high_school_european_history|5": 1, | |
"harness|ko_truthfulqa_mc|0": 0, | |
"harness|ko_commongen_v2|2": 1 | |
}, | |
"config_general": { | |
"model_name": "42MARU/GenAI-llama2-ko-en-instruct-v3-13b", | |
"model_sha": "199c2113f09f153bce1ad7aac35e6e756a99b89b", | |
"model_dtype": "torch.float16", | |
"lighteval_sha": "", | |
"num_few_shot_default": 0, | |
"num_fewshot_seeds": 1, | |
"override_batch_size": 1, | |
"max_samples": null | |
} | |
} |