leaderboard-test-results
/
42MARU
/GenAI-llama2-ko-en-instruct-v6-13B
/result_2023-11-27 02:12:26.json
{ | |
"results": { | |
"harness|ko_arc_challenge|25": { | |
"acc": 0.3916382252559727, | |
"acc_stderr": 0.014264122124938215, | |
"acc_norm": 0.45051194539249145, | |
"acc_norm_stderr": 0.014539646098471627 | |
}, | |
"harness|ko_hellaswag|10": { | |
"acc": 0.4272057359091814, | |
"acc_stderr": 0.004936616428922639, | |
"acc_norm": 0.5610436168094005, | |
"acc_norm_stderr": 0.004952454721934797 | |
}, | |
"harness|ko_mmlu_world_religions|5": { | |
"acc": 0.4678362573099415, | |
"acc_stderr": 0.03826882417660369, | |
"acc_norm": 0.4678362573099415, | |
"acc_norm_stderr": 0.03826882417660369 | |
}, | |
"harness|ko_mmlu_management|5": { | |
"acc": 0.4854368932038835, | |
"acc_stderr": 0.04948637324026637, | |
"acc_norm": 0.4854368932038835, | |
"acc_norm_stderr": 0.04948637324026637 | |
}, | |
"harness|ko_mmlu_miscellaneous|5": { | |
"acc": 0.5210727969348659, | |
"acc_stderr": 0.017864076786212903, | |
"acc_norm": 0.5210727969348659, | |
"acc_norm_stderr": 0.017864076786212903 | |
}, | |
"harness|ko_mmlu_anatomy|5": { | |
"acc": 0.4148148148148148, | |
"acc_stderr": 0.04256193767901407, | |
"acc_norm": 0.4148148148148148, | |
"acc_norm_stderr": 0.04256193767901407 | |
}, | |
"harness|ko_mmlu_abstract_algebra|5": { | |
"acc": 0.29, | |
"acc_stderr": 0.045604802157206824, | |
"acc_norm": 0.29, | |
"acc_norm_stderr": 0.045604802157206824 | |
}, | |
"harness|ko_mmlu_conceptual_physics|5": { | |
"acc": 0.39574468085106385, | |
"acc_stderr": 0.03196758697835362, | |
"acc_norm": 0.39574468085106385, | |
"acc_norm_stderr": 0.03196758697835362 | |
}, | |
"harness|ko_mmlu_virology|5": { | |
"acc": 0.42771084337349397, | |
"acc_stderr": 0.03851597683718533, | |
"acc_norm": 0.42771084337349397, | |
"acc_norm_stderr": 0.03851597683718533 | |
}, | |
"harness|ko_mmlu_philosophy|5": { | |
"acc": 0.4565916398713826, | |
"acc_stderr": 0.028290869054197604, | |
"acc_norm": 0.4565916398713826, | |
"acc_norm_stderr": 0.028290869054197604 | |
}, | |
"harness|ko_mmlu_human_aging|5": { | |
"acc": 0.47533632286995514, | |
"acc_stderr": 0.03351695167652629, | |
"acc_norm": 0.47533632286995514, | |
"acc_norm_stderr": 0.03351695167652629 | |
}, | |
"harness|ko_mmlu_human_sexuality|5": { | |
"acc": 0.4580152671755725, | |
"acc_stderr": 0.04369802690578756, | |
"acc_norm": 0.4580152671755725, | |
"acc_norm_stderr": 0.04369802690578756 | |
}, | |
"harness|ko_mmlu_medical_genetics|5": { | |
"acc": 0.37, | |
"acc_stderr": 0.04852365870939098, | |
"acc_norm": 0.37, | |
"acc_norm_stderr": 0.04852365870939098 | |
}, | |
"harness|ko_mmlu_high_school_geography|5": { | |
"acc": 0.5202020202020202, | |
"acc_stderr": 0.03559443565563919, | |
"acc_norm": 0.5202020202020202, | |
"acc_norm_stderr": 0.03559443565563919 | |
}, | |
"harness|ko_mmlu_electrical_engineering|5": { | |
"acc": 0.4068965517241379, | |
"acc_stderr": 0.04093793981266237, | |
"acc_norm": 0.4068965517241379, | |
"acc_norm_stderr": 0.04093793981266237 | |
}, | |
"harness|ko_mmlu_college_physics|5": { | |
"acc": 0.23529411764705882, | |
"acc_stderr": 0.04220773659171453, | |
"acc_norm": 0.23529411764705882, | |
"acc_norm_stderr": 0.04220773659171453 | |
}, | |
"harness|ko_mmlu_high_school_microeconomics|5": { | |
"acc": 0.40756302521008403, | |
"acc_stderr": 0.031918633744784645, | |
"acc_norm": 0.40756302521008403, | |
"acc_norm_stderr": 0.031918633744784645 | |
}, | |
"harness|ko_mmlu_high_school_macroeconomics|5": { | |
"acc": 0.4230769230769231, | |
"acc_stderr": 0.02504919787604233, | |
"acc_norm": 0.4230769230769231, | |
"acc_norm_stderr": 0.02504919787604233 | |
}, | |
"harness|ko_mmlu_computer_security|5": { | |
"acc": 0.57, | |
"acc_stderr": 0.049756985195624284, | |
"acc_norm": 0.57, | |
"acc_norm_stderr": 0.049756985195624284 | |
}, | |
"harness|ko_mmlu_global_facts|5": { | |
"acc": 0.35, | |
"acc_stderr": 0.0479372485441102, | |
"acc_norm": 0.35, | |
"acc_norm_stderr": 0.0479372485441102 | |
}, | |
"harness|ko_mmlu_jurisprudence|5": { | |
"acc": 0.48148148148148145, | |
"acc_stderr": 0.04830366024635331, | |
"acc_norm": 0.48148148148148145, | |
"acc_norm_stderr": 0.04830366024635331 | |
}, | |
"harness|ko_mmlu_high_school_chemistry|5": { | |
"acc": 0.39408866995073893, | |
"acc_stderr": 0.034381579670365446, | |
"acc_norm": 0.39408866995073893, | |
"acc_norm_stderr": 0.034381579670365446 | |
}, | |
"harness|ko_mmlu_high_school_biology|5": { | |
"acc": 0.45483870967741935, | |
"acc_stderr": 0.028327743091561056, | |
"acc_norm": 0.45483870967741935, | |
"acc_norm_stderr": 0.028327743091561056 | |
}, | |
"harness|ko_mmlu_marketing|5": { | |
"acc": 0.6410256410256411, | |
"acc_stderr": 0.03142616993791924, | |
"acc_norm": 0.6410256410256411, | |
"acc_norm_stderr": 0.03142616993791924 | |
}, | |
"harness|ko_mmlu_clinical_knowledge|5": { | |
"acc": 0.45660377358490567, | |
"acc_stderr": 0.03065674869673943, | |
"acc_norm": 0.45660377358490567, | |
"acc_norm_stderr": 0.03065674869673943 | |
}, | |
"harness|ko_mmlu_public_relations|5": { | |
"acc": 0.4727272727272727, | |
"acc_stderr": 0.04782001791380063, | |
"acc_norm": 0.4727272727272727, | |
"acc_norm_stderr": 0.04782001791380063 | |
}, | |
"harness|ko_mmlu_high_school_mathematics|5": { | |
"acc": 0.24814814814814815, | |
"acc_stderr": 0.0263357394040558, | |
"acc_norm": 0.24814814814814815, | |
"acc_norm_stderr": 0.0263357394040558 | |
}, | |
"harness|ko_mmlu_high_school_physics|5": { | |
"acc": 0.31788079470198677, | |
"acc_stderr": 0.03802039760107903, | |
"acc_norm": 0.31788079470198677, | |
"acc_norm_stderr": 0.03802039760107903 | |
}, | |
"harness|ko_mmlu_sociology|5": { | |
"acc": 0.572139303482587, | |
"acc_stderr": 0.03498541988407795, | |
"acc_norm": 0.572139303482587, | |
"acc_norm_stderr": 0.03498541988407795 | |
}, | |
"harness|ko_mmlu_college_medicine|5": { | |
"acc": 0.3699421965317919, | |
"acc_stderr": 0.03681229633394319, | |
"acc_norm": 0.3699421965317919, | |
"acc_norm_stderr": 0.03681229633394319 | |
}, | |
"harness|ko_mmlu_elementary_mathematics|5": { | |
"acc": 0.2724867724867725, | |
"acc_stderr": 0.022930973071633345, | |
"acc_norm": 0.2724867724867725, | |
"acc_norm_stderr": 0.022930973071633345 | |
}, | |
"harness|ko_mmlu_college_biology|5": { | |
"acc": 0.3263888888888889, | |
"acc_stderr": 0.03921067198982266, | |
"acc_norm": 0.3263888888888889, | |
"acc_norm_stderr": 0.03921067198982266 | |
}, | |
"harness|ko_mmlu_college_chemistry|5": { | |
"acc": 0.36, | |
"acc_stderr": 0.04824181513244218, | |
"acc_norm": 0.36, | |
"acc_norm_stderr": 0.04824181513244218 | |
}, | |
"harness|ko_mmlu_us_foreign_policy|5": { | |
"acc": 0.59, | |
"acc_stderr": 0.04943110704237102, | |
"acc_norm": 0.59, | |
"acc_norm_stderr": 0.04943110704237102 | |
}, | |
"harness|ko_mmlu_moral_disputes|5": { | |
"acc": 0.4421965317919075, | |
"acc_stderr": 0.026738603643807403, | |
"acc_norm": 0.4421965317919075, | |
"acc_norm_stderr": 0.026738603643807403 | |
}, | |
"harness|ko_mmlu_logical_fallacies|5": { | |
"acc": 0.44785276073619634, | |
"acc_stderr": 0.03906947479456601, | |
"acc_norm": 0.44785276073619634, | |
"acc_norm_stderr": 0.03906947479456601 | |
}, | |
"harness|ko_mmlu_prehistory|5": { | |
"acc": 0.4567901234567901, | |
"acc_stderr": 0.027716661650194038, | |
"acc_norm": 0.4567901234567901, | |
"acc_norm_stderr": 0.027716661650194038 | |
}, | |
"harness|ko_mmlu_college_mathematics|5": { | |
"acc": 0.3, | |
"acc_stderr": 0.046056618647183814, | |
"acc_norm": 0.3, | |
"acc_norm_stderr": 0.046056618647183814 | |
}, | |
"harness|ko_mmlu_high_school_government_and_politics|5": { | |
"acc": 0.49740932642487046, | |
"acc_stderr": 0.03608390745384487, | |
"acc_norm": 0.49740932642487046, | |
"acc_norm_stderr": 0.03608390745384487 | |
}, | |
"harness|ko_mmlu_econometrics|5": { | |
"acc": 0.20175438596491227, | |
"acc_stderr": 0.03775205013583639, | |
"acc_norm": 0.20175438596491227, | |
"acc_norm_stderr": 0.03775205013583639 | |
}, | |
"harness|ko_mmlu_high_school_psychology|5": { | |
"acc": 0.4935779816513762, | |
"acc_stderr": 0.021435554820013077, | |
"acc_norm": 0.4935779816513762, | |
"acc_norm_stderr": 0.021435554820013077 | |
}, | |
"harness|ko_mmlu_formal_logic|5": { | |
"acc": 0.30952380952380953, | |
"acc_stderr": 0.04134913018303316, | |
"acc_norm": 0.30952380952380953, | |
"acc_norm_stderr": 0.04134913018303316 | |
}, | |
"harness|ko_mmlu_nutrition|5": { | |
"acc": 0.42810457516339867, | |
"acc_stderr": 0.02833239748366427, | |
"acc_norm": 0.42810457516339867, | |
"acc_norm_stderr": 0.02833239748366427 | |
}, | |
"harness|ko_mmlu_business_ethics|5": { | |
"acc": 0.43, | |
"acc_stderr": 0.049756985195624284, | |
"acc_norm": 0.43, | |
"acc_norm_stderr": 0.049756985195624284 | |
}, | |
"harness|ko_mmlu_international_law|5": { | |
"acc": 0.5950413223140496, | |
"acc_stderr": 0.04481137755942469, | |
"acc_norm": 0.5950413223140496, | |
"acc_norm_stderr": 0.04481137755942469 | |
}, | |
"harness|ko_mmlu_astronomy|5": { | |
"acc": 0.3881578947368421, | |
"acc_stderr": 0.03965842097512744, | |
"acc_norm": 0.3881578947368421, | |
"acc_norm_stderr": 0.03965842097512744 | |
}, | |
"harness|ko_mmlu_professional_psychology|5": { | |
"acc": 0.3660130718954248, | |
"acc_stderr": 0.019488025745529672, | |
"acc_norm": 0.3660130718954248, | |
"acc_norm_stderr": 0.019488025745529672 | |
}, | |
"harness|ko_mmlu_professional_accounting|5": { | |
"acc": 0.2801418439716312, | |
"acc_stderr": 0.02678917235114024, | |
"acc_norm": 0.2801418439716312, | |
"acc_norm_stderr": 0.02678917235114024 | |
}, | |
"harness|ko_mmlu_machine_learning|5": { | |
"acc": 0.25892857142857145, | |
"acc_stderr": 0.04157751539865629, | |
"acc_norm": 0.25892857142857145, | |
"acc_norm_stderr": 0.04157751539865629 | |
}, | |
"harness|ko_mmlu_high_school_statistics|5": { | |
"acc": 0.3101851851851852, | |
"acc_stderr": 0.031546962856566295, | |
"acc_norm": 0.3101851851851852, | |
"acc_norm_stderr": 0.031546962856566295 | |
}, | |
"harness|ko_mmlu_moral_scenarios|5": { | |
"acc": 0.2446927374301676, | |
"acc_stderr": 0.014378169884098424, | |
"acc_norm": 0.2446927374301676, | |
"acc_norm_stderr": 0.014378169884098424 | |
}, | |
"harness|ko_mmlu_college_computer_science|5": { | |
"acc": 0.33, | |
"acc_stderr": 0.047258156262526045, | |
"acc_norm": 0.33, | |
"acc_norm_stderr": 0.047258156262526045 | |
}, | |
"harness|ko_mmlu_high_school_computer_science|5": { | |
"acc": 0.44, | |
"acc_stderr": 0.049888765156985884, | |
"acc_norm": 0.44, | |
"acc_norm_stderr": 0.049888765156985884 | |
}, | |
"harness|ko_mmlu_professional_medicine|5": { | |
"acc": 0.33088235294117646, | |
"acc_stderr": 0.028582709753898428, | |
"acc_norm": 0.33088235294117646, | |
"acc_norm_stderr": 0.028582709753898428 | |
}, | |
"harness|ko_mmlu_security_studies|5": { | |
"acc": 0.46938775510204084, | |
"acc_stderr": 0.031949171367580624, | |
"acc_norm": 0.46938775510204084, | |
"acc_norm_stderr": 0.031949171367580624 | |
}, | |
"harness|ko_mmlu_high_school_world_history|5": { | |
"acc": 0.5485232067510548, | |
"acc_stderr": 0.032393600173974704, | |
"acc_norm": 0.5485232067510548, | |
"acc_norm_stderr": 0.032393600173974704 | |
}, | |
"harness|ko_mmlu_professional_law|5": { | |
"acc": 0.31486310299869624, | |
"acc_stderr": 0.011862561755715945, | |
"acc_norm": 0.31486310299869624, | |
"acc_norm_stderr": 0.011862561755715945 | |
}, | |
"harness|ko_mmlu_high_school_us_history|5": { | |
"acc": 0.4411764705882353, | |
"acc_stderr": 0.034849415144292316, | |
"acc_norm": 0.4411764705882353, | |
"acc_norm_stderr": 0.034849415144292316 | |
}, | |
"harness|ko_mmlu_high_school_european_history|5": { | |
"acc": 0.5393939393939394, | |
"acc_stderr": 0.03892207016552012, | |
"acc_norm": 0.5393939393939394, | |
"acc_norm_stderr": 0.03892207016552012 | |
}, | |
"harness|ko_truthfulqa_mc|0": { | |
"mc1": 0.3023255813953488, | |
"mc1_stderr": 0.01607750926613303, | |
"mc2": 0.45710797981768625, | |
"mc2_stderr": 0.015464643764155465 | |
}, | |
"harness|ko_commongen_v2|2": { | |
"acc": 0.46871310507674147, | |
"acc_stderr": 0.017156666859785456, | |
"acc_norm": 0.5619834710743802, | |
"acc_norm_stderr": 0.017057753702160294 | |
} | |
}, | |
"versions": { | |
"all": 0, | |
"harness|ko_arc_challenge|25": 0, | |
"harness|ko_hellaswag|10": 0, | |
"harness|ko_mmlu_world_religions|5": 1, | |
"harness|ko_mmlu_management|5": 1, | |
"harness|ko_mmlu_miscellaneous|5": 1, | |
"harness|ko_mmlu_anatomy|5": 1, | |
"harness|ko_mmlu_abstract_algebra|5": 1, | |
"harness|ko_mmlu_conceptual_physics|5": 1, | |
"harness|ko_mmlu_virology|5": 1, | |
"harness|ko_mmlu_philosophy|5": 1, | |
"harness|ko_mmlu_human_aging|5": 1, | |
"harness|ko_mmlu_human_sexuality|5": 1, | |
"harness|ko_mmlu_medical_genetics|5": 1, | |
"harness|ko_mmlu_high_school_geography|5": 1, | |
"harness|ko_mmlu_electrical_engineering|5": 1, | |
"harness|ko_mmlu_college_physics|5": 1, | |
"harness|ko_mmlu_high_school_microeconomics|5": 1, | |
"harness|ko_mmlu_high_school_macroeconomics|5": 1, | |
"harness|ko_mmlu_computer_security|5": 1, | |
"harness|ko_mmlu_global_facts|5": 1, | |
"harness|ko_mmlu_jurisprudence|5": 1, | |
"harness|ko_mmlu_high_school_chemistry|5": 1, | |
"harness|ko_mmlu_high_school_biology|5": 1, | |
"harness|ko_mmlu_marketing|5": 1, | |
"harness|ko_mmlu_clinical_knowledge|5": 1, | |
"harness|ko_mmlu_public_relations|5": 1, | |
"harness|ko_mmlu_high_school_mathematics|5": 1, | |
"harness|ko_mmlu_high_school_physics|5": 1, | |
"harness|ko_mmlu_sociology|5": 1, | |
"harness|ko_mmlu_college_medicine|5": 1, | |
"harness|ko_mmlu_elementary_mathematics|5": 1, | |
"harness|ko_mmlu_college_biology|5": 1, | |
"harness|ko_mmlu_college_chemistry|5": 1, | |
"harness|ko_mmlu_us_foreign_policy|5": 1, | |
"harness|ko_mmlu_moral_disputes|5": 1, | |
"harness|ko_mmlu_logical_fallacies|5": 1, | |
"harness|ko_mmlu_prehistory|5": 1, | |
"harness|ko_mmlu_college_mathematics|5": 1, | |
"harness|ko_mmlu_high_school_government_and_politics|5": 1, | |
"harness|ko_mmlu_econometrics|5": 1, | |
"harness|ko_mmlu_high_school_psychology|5": 1, | |
"harness|ko_mmlu_formal_logic|5": 1, | |
"harness|ko_mmlu_nutrition|5": 1, | |
"harness|ko_mmlu_business_ethics|5": 1, | |
"harness|ko_mmlu_international_law|5": 1, | |
"harness|ko_mmlu_astronomy|5": 1, | |
"harness|ko_mmlu_professional_psychology|5": 1, | |
"harness|ko_mmlu_professional_accounting|5": 1, | |
"harness|ko_mmlu_machine_learning|5": 1, | |
"harness|ko_mmlu_high_school_statistics|5": 1, | |
"harness|ko_mmlu_moral_scenarios|5": 1, | |
"harness|ko_mmlu_college_computer_science|5": 1, | |
"harness|ko_mmlu_high_school_computer_science|5": 1, | |
"harness|ko_mmlu_professional_medicine|5": 1, | |
"harness|ko_mmlu_security_studies|5": 1, | |
"harness|ko_mmlu_high_school_world_history|5": 1, | |
"harness|ko_mmlu_professional_law|5": 1, | |
"harness|ko_mmlu_high_school_us_history|5": 1, | |
"harness|ko_mmlu_high_school_european_history|5": 1, | |
"harness|ko_truthfulqa_mc|0": 0, | |
"harness|ko_commongen_v2|2": 1 | |
}, | |
"config_general": { | |
"model_name": "42MARU/GenAI-llama2-ko-en-instruct-v6-13B", | |
"model_sha": "f24326c48f4edb60bc3bdc186b65e0fcb9254c1e", | |
"model_dtype": "torch.float16", | |
"lighteval_sha": "", | |
"num_few_shot_default": 0, | |
"num_fewshot_seeds": 1, | |
"override_batch_size": 1, | |
"max_samples": null | |
} | |
} |