leaderboard-test-results
/
42MARU
/GenAI-llama2-ko-en-instruct-v2-13b
/result_2023-10-18 01:40:38.json
{ | |
"results": { | |
"harness|ko_arc_challenge|25": { | |
"acc": 0.3848122866894198, | |
"acc_stderr": 0.014218371065251095, | |
"acc_norm": 0.4402730375426621, | |
"acc_norm_stderr": 0.014506769524804243 | |
}, | |
"harness|ko_hellaswag|10": { | |
"acc": 0.4190400318661621, | |
"acc_stderr": 0.0049239357498424945, | |
"acc_norm": 0.5560645289782912, | |
"acc_norm_stderr": 0.004958314114266494 | |
}, | |
"harness|ko_mmlu_world_religions|5": { | |
"acc": 0.5614035087719298, | |
"acc_stderr": 0.038057975055904594, | |
"acc_norm": 0.5614035087719298, | |
"acc_norm_stderr": 0.038057975055904594 | |
}, | |
"harness|ko_mmlu_management|5": { | |
"acc": 0.6019417475728155, | |
"acc_stderr": 0.04846748253977238, | |
"acc_norm": 0.6019417475728155, | |
"acc_norm_stderr": 0.04846748253977238 | |
}, | |
"harness|ko_mmlu_miscellaneous|5": { | |
"acc": 0.5287356321839081, | |
"acc_stderr": 0.017850410794380173, | |
"acc_norm": 0.5287356321839081, | |
"acc_norm_stderr": 0.017850410794380173 | |
}, | |
"harness|ko_mmlu_anatomy|5": { | |
"acc": 0.4222222222222222, | |
"acc_stderr": 0.04266763404099582, | |
"acc_norm": 0.4222222222222222, | |
"acc_norm_stderr": 0.04266763404099582 | |
}, | |
"harness|ko_mmlu_abstract_algebra|5": { | |
"acc": 0.28, | |
"acc_stderr": 0.045126085985421276, | |
"acc_norm": 0.28, | |
"acc_norm_stderr": 0.045126085985421276 | |
}, | |
"harness|ko_mmlu_conceptual_physics|5": { | |
"acc": 0.3404255319148936, | |
"acc_stderr": 0.030976692998534443, | |
"acc_norm": 0.3404255319148936, | |
"acc_norm_stderr": 0.030976692998534443 | |
}, | |
"harness|ko_mmlu_virology|5": { | |
"acc": 0.39759036144578314, | |
"acc_stderr": 0.038099730845402184, | |
"acc_norm": 0.39759036144578314, | |
"acc_norm_stderr": 0.038099730845402184 | |
}, | |
"harness|ko_mmlu_philosophy|5": { | |
"acc": 0.5273311897106109, | |
"acc_stderr": 0.028355633568328188, | |
"acc_norm": 0.5273311897106109, | |
"acc_norm_stderr": 0.028355633568328188 | |
}, | |
"harness|ko_mmlu_human_aging|5": { | |
"acc": 0.4798206278026906, | |
"acc_stderr": 0.033530461674123, | |
"acc_norm": 0.4798206278026906, | |
"acc_norm_stderr": 0.033530461674123 | |
}, | |
"harness|ko_mmlu_human_sexuality|5": { | |
"acc": 0.5114503816793893, | |
"acc_stderr": 0.043841400240780176, | |
"acc_norm": 0.5114503816793893, | |
"acc_norm_stderr": 0.043841400240780176 | |
}, | |
"harness|ko_mmlu_medical_genetics|5": { | |
"acc": 0.39, | |
"acc_stderr": 0.04902071300001975, | |
"acc_norm": 0.39, | |
"acc_norm_stderr": 0.04902071300001975 | |
}, | |
"harness|ko_mmlu_high_school_geography|5": { | |
"acc": 0.5353535353535354, | |
"acc_stderr": 0.03553436368828061, | |
"acc_norm": 0.5353535353535354, | |
"acc_norm_stderr": 0.03553436368828061 | |
}, | |
"harness|ko_mmlu_electrical_engineering|5": { | |
"acc": 0.4827586206896552, | |
"acc_stderr": 0.04164188720169377, | |
"acc_norm": 0.4827586206896552, | |
"acc_norm_stderr": 0.04164188720169377 | |
}, | |
"harness|ko_mmlu_college_physics|5": { | |
"acc": 0.23529411764705882, | |
"acc_stderr": 0.04220773659171452, | |
"acc_norm": 0.23529411764705882, | |
"acc_norm_stderr": 0.04220773659171452 | |
}, | |
"harness|ko_mmlu_high_school_microeconomics|5": { | |
"acc": 0.49159663865546216, | |
"acc_stderr": 0.03247390276569669, | |
"acc_norm": 0.49159663865546216, | |
"acc_norm_stderr": 0.03247390276569669 | |
}, | |
"harness|ko_mmlu_high_school_macroeconomics|5": { | |
"acc": 0.46153846153846156, | |
"acc_stderr": 0.025275892070240634, | |
"acc_norm": 0.46153846153846156, | |
"acc_norm_stderr": 0.025275892070240634 | |
}, | |
"harness|ko_mmlu_computer_security|5": { | |
"acc": 0.45, | |
"acc_stderr": 0.05, | |
"acc_norm": 0.45, | |
"acc_norm_stderr": 0.05 | |
}, | |
"harness|ko_mmlu_global_facts|5": { | |
"acc": 0.37, | |
"acc_stderr": 0.048523658709391, | |
"acc_norm": 0.37, | |
"acc_norm_stderr": 0.048523658709391 | |
}, | |
"harness|ko_mmlu_jurisprudence|5": { | |
"acc": 0.5277777777777778, | |
"acc_stderr": 0.04826217294139894, | |
"acc_norm": 0.5277777777777778, | |
"acc_norm_stderr": 0.04826217294139894 | |
}, | |
"harness|ko_mmlu_high_school_chemistry|5": { | |
"acc": 0.3399014778325123, | |
"acc_stderr": 0.033327690684107895, | |
"acc_norm": 0.3399014778325123, | |
"acc_norm_stderr": 0.033327690684107895 | |
}, | |
"harness|ko_mmlu_high_school_biology|5": { | |
"acc": 0.4806451612903226, | |
"acc_stderr": 0.0284226874043121, | |
"acc_norm": 0.4806451612903226, | |
"acc_norm_stderr": 0.0284226874043121 | |
}, | |
"harness|ko_mmlu_marketing|5": { | |
"acc": 0.6752136752136753, | |
"acc_stderr": 0.03067902276549883, | |
"acc_norm": 0.6752136752136753, | |
"acc_norm_stderr": 0.03067902276549883 | |
}, | |
"harness|ko_mmlu_clinical_knowledge|5": { | |
"acc": 0.47547169811320755, | |
"acc_stderr": 0.030735822206205615, | |
"acc_norm": 0.47547169811320755, | |
"acc_norm_stderr": 0.030735822206205615 | |
}, | |
"harness|ko_mmlu_public_relations|5": { | |
"acc": 0.5454545454545454, | |
"acc_stderr": 0.04769300568972745, | |
"acc_norm": 0.5454545454545454, | |
"acc_norm_stderr": 0.04769300568972745 | |
}, | |
"harness|ko_mmlu_high_school_mathematics|5": { | |
"acc": 0.2777777777777778, | |
"acc_stderr": 0.027309140588230172, | |
"acc_norm": 0.2777777777777778, | |
"acc_norm_stderr": 0.027309140588230172 | |
}, | |
"harness|ko_mmlu_high_school_physics|5": { | |
"acc": 0.33112582781456956, | |
"acc_stderr": 0.038425817186598696, | |
"acc_norm": 0.33112582781456956, | |
"acc_norm_stderr": 0.038425817186598696 | |
}, | |
"harness|ko_mmlu_sociology|5": { | |
"acc": 0.5671641791044776, | |
"acc_stderr": 0.03503490923673282, | |
"acc_norm": 0.5671641791044776, | |
"acc_norm_stderr": 0.03503490923673282 | |
}, | |
"harness|ko_mmlu_college_medicine|5": { | |
"acc": 0.4393063583815029, | |
"acc_stderr": 0.037842719328874674, | |
"acc_norm": 0.4393063583815029, | |
"acc_norm_stderr": 0.037842719328874674 | |
}, | |
"harness|ko_mmlu_elementary_mathematics|5": { | |
"acc": 0.3333333333333333, | |
"acc_stderr": 0.0242785680243077, | |
"acc_norm": 0.3333333333333333, | |
"acc_norm_stderr": 0.0242785680243077 | |
}, | |
"harness|ko_mmlu_college_biology|5": { | |
"acc": 0.4027777777777778, | |
"acc_stderr": 0.04101405519842425, | |
"acc_norm": 0.4027777777777778, | |
"acc_norm_stderr": 0.04101405519842425 | |
}, | |
"harness|ko_mmlu_college_chemistry|5": { | |
"acc": 0.34, | |
"acc_stderr": 0.047609522856952344, | |
"acc_norm": 0.34, | |
"acc_norm_stderr": 0.047609522856952344 | |
}, | |
"harness|ko_mmlu_us_foreign_policy|5": { | |
"acc": 0.63, | |
"acc_stderr": 0.04852365870939099, | |
"acc_norm": 0.63, | |
"acc_norm_stderr": 0.04852365870939099 | |
}, | |
"harness|ko_mmlu_moral_disputes|5": { | |
"acc": 0.49421965317919075, | |
"acc_stderr": 0.026917296179149116, | |
"acc_norm": 0.49421965317919075, | |
"acc_norm_stderr": 0.026917296179149116 | |
}, | |
"harness|ko_mmlu_logical_fallacies|5": { | |
"acc": 0.48466257668711654, | |
"acc_stderr": 0.039265223787088445, | |
"acc_norm": 0.48466257668711654, | |
"acc_norm_stderr": 0.039265223787088445 | |
}, | |
"harness|ko_mmlu_prehistory|5": { | |
"acc": 0.49074074074074076, | |
"acc_stderr": 0.027815973433878014, | |
"acc_norm": 0.49074074074074076, | |
"acc_norm_stderr": 0.027815973433878014 | |
}, | |
"harness|ko_mmlu_college_mathematics|5": { | |
"acc": 0.3, | |
"acc_stderr": 0.04605661864718381, | |
"acc_norm": 0.3, | |
"acc_norm_stderr": 0.04605661864718381 | |
}, | |
"harness|ko_mmlu_high_school_government_and_politics|5": { | |
"acc": 0.5440414507772021, | |
"acc_stderr": 0.03594413711272436, | |
"acc_norm": 0.5440414507772021, | |
"acc_norm_stderr": 0.03594413711272436 | |
}, | |
"harness|ko_mmlu_econometrics|5": { | |
"acc": 0.2543859649122807, | |
"acc_stderr": 0.040969851398436716, | |
"acc_norm": 0.2543859649122807, | |
"acc_norm_stderr": 0.040969851398436716 | |
}, | |
"harness|ko_mmlu_high_school_psychology|5": { | |
"acc": 0.5559633027522936, | |
"acc_stderr": 0.021302621211654518, | |
"acc_norm": 0.5559633027522936, | |
"acc_norm_stderr": 0.021302621211654518 | |
}, | |
"harness|ko_mmlu_formal_logic|5": { | |
"acc": 0.30158730158730157, | |
"acc_stderr": 0.04104947269903394, | |
"acc_norm": 0.30158730158730157, | |
"acc_norm_stderr": 0.04104947269903394 | |
}, | |
"harness|ko_mmlu_nutrition|5": { | |
"acc": 0.4673202614379085, | |
"acc_stderr": 0.028568699752225875, | |
"acc_norm": 0.4673202614379085, | |
"acc_norm_stderr": 0.028568699752225875 | |
}, | |
"harness|ko_mmlu_business_ethics|5": { | |
"acc": 0.51, | |
"acc_stderr": 0.05024183937956912, | |
"acc_norm": 0.51, | |
"acc_norm_stderr": 0.05024183937956912 | |
}, | |
"harness|ko_mmlu_international_law|5": { | |
"acc": 0.7272727272727273, | |
"acc_stderr": 0.04065578140908705, | |
"acc_norm": 0.7272727272727273, | |
"acc_norm_stderr": 0.04065578140908705 | |
}, | |
"harness|ko_mmlu_astronomy|5": { | |
"acc": 0.47368421052631576, | |
"acc_stderr": 0.04063302731486671, | |
"acc_norm": 0.47368421052631576, | |
"acc_norm_stderr": 0.04063302731486671 | |
}, | |
"harness|ko_mmlu_professional_psychology|5": { | |
"acc": 0.3741830065359477, | |
"acc_stderr": 0.019576953122088833, | |
"acc_norm": 0.3741830065359477, | |
"acc_norm_stderr": 0.019576953122088833 | |
}, | |
"harness|ko_mmlu_professional_accounting|5": { | |
"acc": 0.31560283687943264, | |
"acc_stderr": 0.027724989449509314, | |
"acc_norm": 0.31560283687943264, | |
"acc_norm_stderr": 0.027724989449509314 | |
}, | |
"harness|ko_mmlu_machine_learning|5": { | |
"acc": 0.22321428571428573, | |
"acc_stderr": 0.039523019677025116, | |
"acc_norm": 0.22321428571428573, | |
"acc_norm_stderr": 0.039523019677025116 | |
}, | |
"harness|ko_mmlu_high_school_statistics|5": { | |
"acc": 0.4074074074074074, | |
"acc_stderr": 0.033509916046960436, | |
"acc_norm": 0.4074074074074074, | |
"acc_norm_stderr": 0.033509916046960436 | |
}, | |
"harness|ko_mmlu_moral_scenarios|5": { | |
"acc": 0.264804469273743, | |
"acc_stderr": 0.014756906483260664, | |
"acc_norm": 0.264804469273743, | |
"acc_norm_stderr": 0.014756906483260664 | |
}, | |
"harness|ko_mmlu_college_computer_science|5": { | |
"acc": 0.35, | |
"acc_stderr": 0.0479372485441102, | |
"acc_norm": 0.35, | |
"acc_norm_stderr": 0.0479372485441102 | |
}, | |
"harness|ko_mmlu_high_school_computer_science|5": { | |
"acc": 0.39, | |
"acc_stderr": 0.04902071300001975, | |
"acc_norm": 0.39, | |
"acc_norm_stderr": 0.04902071300001975 | |
}, | |
"harness|ko_mmlu_professional_medicine|5": { | |
"acc": 0.39705882352941174, | |
"acc_stderr": 0.029722152099280058, | |
"acc_norm": 0.39705882352941174, | |
"acc_norm_stderr": 0.029722152099280058 | |
}, | |
"harness|ko_mmlu_security_studies|5": { | |
"acc": 0.49387755102040815, | |
"acc_stderr": 0.032006820201639086, | |
"acc_norm": 0.49387755102040815, | |
"acc_norm_stderr": 0.032006820201639086 | |
}, | |
"harness|ko_mmlu_high_school_world_history|5": { | |
"acc": 0.5738396624472574, | |
"acc_stderr": 0.03219035703131774, | |
"acc_norm": 0.5738396624472574, | |
"acc_norm_stderr": 0.03219035703131774 | |
}, | |
"harness|ko_mmlu_professional_law|5": { | |
"acc": 0.35071707953063885, | |
"acc_stderr": 0.012187773370741518, | |
"acc_norm": 0.35071707953063885, | |
"acc_norm_stderr": 0.012187773370741518 | |
}, | |
"harness|ko_mmlu_high_school_us_history|5": { | |
"acc": 0.4950980392156863, | |
"acc_stderr": 0.03509143375606786, | |
"acc_norm": 0.4950980392156863, | |
"acc_norm_stderr": 0.03509143375606786 | |
}, | |
"harness|ko_mmlu_high_school_european_history|5": { | |
"acc": 0.5696969696969697, | |
"acc_stderr": 0.03866225962879077, | |
"acc_norm": 0.5696969696969697, | |
"acc_norm_stderr": 0.03866225962879077 | |
}, | |
"harness|ko_truthfulqa_mc|0": { | |
"mc1": 0.27539779681762544, | |
"mc1_stderr": 0.015638135667775523, | |
"mc2": 0.44227632802507094, | |
"mc2_stderr": 0.015242459306682204 | |
}, | |
"harness|ko_commongen_v2|2": { | |
"acc": 0.5088547815820543, | |
"acc_stderr": 0.017187658199336743, | |
"acc_norm": 0.5608028335301063, | |
"acc_norm_stderr": 0.017062775744780705 | |
} | |
}, | |
"versions": { | |
"all": 0, | |
"harness|ko_arc_challenge|25": 0, | |
"harness|ko_hellaswag|10": 0, | |
"harness|ko_mmlu_world_religions|5": 1, | |
"harness|ko_mmlu_management|5": 1, | |
"harness|ko_mmlu_miscellaneous|5": 1, | |
"harness|ko_mmlu_anatomy|5": 1, | |
"harness|ko_mmlu_abstract_algebra|5": 1, | |
"harness|ko_mmlu_conceptual_physics|5": 1, | |
"harness|ko_mmlu_virology|5": 1, | |
"harness|ko_mmlu_philosophy|5": 1, | |
"harness|ko_mmlu_human_aging|5": 1, | |
"harness|ko_mmlu_human_sexuality|5": 1, | |
"harness|ko_mmlu_medical_genetics|5": 1, | |
"harness|ko_mmlu_high_school_geography|5": 1, | |
"harness|ko_mmlu_electrical_engineering|5": 1, | |
"harness|ko_mmlu_college_physics|5": 1, | |
"harness|ko_mmlu_high_school_microeconomics|5": 1, | |
"harness|ko_mmlu_high_school_macroeconomics|5": 1, | |
"harness|ko_mmlu_computer_security|5": 1, | |
"harness|ko_mmlu_global_facts|5": 1, | |
"harness|ko_mmlu_jurisprudence|5": 1, | |
"harness|ko_mmlu_high_school_chemistry|5": 1, | |
"harness|ko_mmlu_high_school_biology|5": 1, | |
"harness|ko_mmlu_marketing|5": 1, | |
"harness|ko_mmlu_clinical_knowledge|5": 1, | |
"harness|ko_mmlu_public_relations|5": 1, | |
"harness|ko_mmlu_high_school_mathematics|5": 1, | |
"harness|ko_mmlu_high_school_physics|5": 1, | |
"harness|ko_mmlu_sociology|5": 1, | |
"harness|ko_mmlu_college_medicine|5": 1, | |
"harness|ko_mmlu_elementary_mathematics|5": 1, | |
"harness|ko_mmlu_college_biology|5": 1, | |
"harness|ko_mmlu_college_chemistry|5": 1, | |
"harness|ko_mmlu_us_foreign_policy|5": 1, | |
"harness|ko_mmlu_moral_disputes|5": 1, | |
"harness|ko_mmlu_logical_fallacies|5": 1, | |
"harness|ko_mmlu_prehistory|5": 1, | |
"harness|ko_mmlu_college_mathematics|5": 1, | |
"harness|ko_mmlu_high_school_government_and_politics|5": 1, | |
"harness|ko_mmlu_econometrics|5": 1, | |
"harness|ko_mmlu_high_school_psychology|5": 1, | |
"harness|ko_mmlu_formal_logic|5": 1, | |
"harness|ko_mmlu_nutrition|5": 1, | |
"harness|ko_mmlu_business_ethics|5": 1, | |
"harness|ko_mmlu_international_law|5": 1, | |
"harness|ko_mmlu_astronomy|5": 1, | |
"harness|ko_mmlu_professional_psychology|5": 1, | |
"harness|ko_mmlu_professional_accounting|5": 1, | |
"harness|ko_mmlu_machine_learning|5": 1, | |
"harness|ko_mmlu_high_school_statistics|5": 1, | |
"harness|ko_mmlu_moral_scenarios|5": 1, | |
"harness|ko_mmlu_college_computer_science|5": 1, | |
"harness|ko_mmlu_high_school_computer_science|5": 1, | |
"harness|ko_mmlu_professional_medicine|5": 1, | |
"harness|ko_mmlu_security_studies|5": 1, | |
"harness|ko_mmlu_high_school_world_history|5": 1, | |
"harness|ko_mmlu_professional_law|5": 1, | |
"harness|ko_mmlu_high_school_us_history|5": 1, | |
"harness|ko_mmlu_high_school_european_history|5": 1, | |
"harness|ko_truthfulqa_mc|0": 0, | |
"harness|ko_commongen_v2|2": 1 | |
}, | |
"config_general": { | |
"model_name": "42MARU/GenAI-llama2-ko-en-instruct-v2-13b", | |
"model_sha": "9f429309fc6b939d08c659ab4666f6e80324dcd1", | |
"model_dtype": "torch.float16", | |
"lighteval_sha": "", | |
"num_few_shot_default": 0, | |
"num_fewshot_seeds": 1, | |
"override_batch_size": 1, | |
"max_samples": null | |
} | |
} |