results / GritLM /GritLM-7B /result_2024-06-05 14:51:05.json
choco9966's picture
Add results for 2024-06-05 14:51:05
37b7156 verified
raw
history blame
No virus
17.9 kB
{
"results": {
"harness|ko_arc_challenge|25": {
"acc": 0.3003412969283277,
"acc_stderr": 0.013395909309957005,
"acc_norm": 0.35665529010238906,
"acc_norm_stderr": 0.013998056902620199
},
"harness|ko_hellaswag|10": {
"acc": 0.3573989245170285,
"acc_stderr": 0.004782542754102084,
"acc_norm": 0.4565823541127266,
"acc_norm_stderr": 0.004970933420231928
},
"harness|ko_mmlu_world_religions|5": {
"acc": 0.4444444444444444,
"acc_stderr": 0.03811079669833531,
"acc_norm": 0.4444444444444444,
"acc_norm_stderr": 0.03811079669833531
},
"harness|ko_mmlu_management|5": {
"acc": 0.5339805825242718,
"acc_stderr": 0.04939291447273481,
"acc_norm": 0.5339805825242718,
"acc_norm_stderr": 0.04939291447273481
},
"harness|ko_mmlu_miscellaneous|5": {
"acc": 0.43039591315453385,
"acc_stderr": 0.017705868776292374,
"acc_norm": 0.43039591315453385,
"acc_norm_stderr": 0.017705868776292374
},
"harness|ko_mmlu_anatomy|5": {
"acc": 0.34074074074074073,
"acc_stderr": 0.040943762699967926,
"acc_norm": 0.34074074074074073,
"acc_norm_stderr": 0.040943762699967926
},
"harness|ko_mmlu_abstract_algebra|5": {
"acc": 0.23,
"acc_stderr": 0.042295258468165065,
"acc_norm": 0.23,
"acc_norm_stderr": 0.042295258468165065
},
"harness|ko_mmlu_conceptual_physics|5": {
"acc": 0.37872340425531914,
"acc_stderr": 0.03170995606040655,
"acc_norm": 0.37872340425531914,
"acc_norm_stderr": 0.03170995606040655
},
"harness|ko_mmlu_virology|5": {
"acc": 0.4397590361445783,
"acc_stderr": 0.03864139923699121,
"acc_norm": 0.4397590361445783,
"acc_norm_stderr": 0.03864139923699121
},
"harness|ko_mmlu_philosophy|5": {
"acc": 0.45980707395498394,
"acc_stderr": 0.028306190403305696,
"acc_norm": 0.45980707395498394,
"acc_norm_stderr": 0.028306190403305696
},
"harness|ko_mmlu_human_aging|5": {
"acc": 0.47533632286995514,
"acc_stderr": 0.033516951676526276,
"acc_norm": 0.47533632286995514,
"acc_norm_stderr": 0.033516951676526276
},
"harness|ko_mmlu_human_sexuality|5": {
"acc": 0.3816793893129771,
"acc_stderr": 0.04260735157644561,
"acc_norm": 0.3816793893129771,
"acc_norm_stderr": 0.04260735157644561
},
"harness|ko_mmlu_medical_genetics|5": {
"acc": 0.42,
"acc_stderr": 0.04960449637488583,
"acc_norm": 0.42,
"acc_norm_stderr": 0.04960449637488583
},
"harness|ko_mmlu_high_school_geography|5": {
"acc": 0.5151515151515151,
"acc_stderr": 0.03560716516531061,
"acc_norm": 0.5151515151515151,
"acc_norm_stderr": 0.03560716516531061
},
"harness|ko_mmlu_electrical_engineering|5": {
"acc": 0.38620689655172413,
"acc_stderr": 0.04057324734419034,
"acc_norm": 0.38620689655172413,
"acc_norm_stderr": 0.04057324734419034
},
"harness|ko_mmlu_college_physics|5": {
"acc": 0.19607843137254902,
"acc_stderr": 0.03950581861179964,
"acc_norm": 0.19607843137254902,
"acc_norm_stderr": 0.03950581861179964
},
"harness|ko_mmlu_high_school_microeconomics|5": {
"acc": 0.5294117647058824,
"acc_stderr": 0.03242225027115006,
"acc_norm": 0.5294117647058824,
"acc_norm_stderr": 0.03242225027115006
},
"harness|ko_mmlu_high_school_macroeconomics|5": {
"acc": 0.441025641025641,
"acc_stderr": 0.02517404838400077,
"acc_norm": 0.441025641025641,
"acc_norm_stderr": 0.02517404838400077
},
"harness|ko_mmlu_computer_security|5": {
"acc": 0.62,
"acc_stderr": 0.04878317312145632,
"acc_norm": 0.62,
"acc_norm_stderr": 0.04878317312145632
},
"harness|ko_mmlu_global_facts|5": {
"acc": 0.31,
"acc_stderr": 0.04648231987117316,
"acc_norm": 0.31,
"acc_norm_stderr": 0.04648231987117316
},
"harness|ko_mmlu_jurisprudence|5": {
"acc": 0.5277777777777778,
"acc_stderr": 0.048262172941398944,
"acc_norm": 0.5277777777777778,
"acc_norm_stderr": 0.048262172941398944
},
"harness|ko_mmlu_high_school_chemistry|5": {
"acc": 0.35960591133004927,
"acc_stderr": 0.03376458246509568,
"acc_norm": 0.35960591133004927,
"acc_norm_stderr": 0.03376458246509568
},
"harness|ko_mmlu_high_school_biology|5": {
"acc": 0.44193548387096776,
"acc_stderr": 0.028251557906849738,
"acc_norm": 0.44193548387096776,
"acc_norm_stderr": 0.028251557906849738
},
"harness|ko_mmlu_marketing|5": {
"acc": 0.6410256410256411,
"acc_stderr": 0.03142616993791923,
"acc_norm": 0.6410256410256411,
"acc_norm_stderr": 0.03142616993791923
},
"harness|ko_mmlu_clinical_knowledge|5": {
"acc": 0.41132075471698115,
"acc_stderr": 0.030285009259009812,
"acc_norm": 0.41132075471698115,
"acc_norm_stderr": 0.030285009259009812
},
"harness|ko_mmlu_public_relations|5": {
"acc": 0.5272727272727272,
"acc_stderr": 0.04782001791380061,
"acc_norm": 0.5272727272727272,
"acc_norm_stderr": 0.04782001791380061
},
"harness|ko_mmlu_high_school_mathematics|5": {
"acc": 0.3296296296296296,
"acc_stderr": 0.028661201116524575,
"acc_norm": 0.3296296296296296,
"acc_norm_stderr": 0.028661201116524575
},
"harness|ko_mmlu_high_school_physics|5": {
"acc": 0.2582781456953642,
"acc_stderr": 0.035737053147634576,
"acc_norm": 0.2582781456953642,
"acc_norm_stderr": 0.035737053147634576
},
"harness|ko_mmlu_sociology|5": {
"acc": 0.5870646766169154,
"acc_stderr": 0.034815208033673474,
"acc_norm": 0.5870646766169154,
"acc_norm_stderr": 0.034815208033673474
},
"harness|ko_mmlu_college_medicine|5": {
"acc": 0.35260115606936415,
"acc_stderr": 0.03643037168958549,
"acc_norm": 0.35260115606936415,
"acc_norm_stderr": 0.03643037168958549
},
"harness|ko_mmlu_elementary_mathematics|5": {
"acc": 0.3412698412698413,
"acc_stderr": 0.02441923496681907,
"acc_norm": 0.3412698412698413,
"acc_norm_stderr": 0.02441923496681907
},
"harness|ko_mmlu_college_biology|5": {
"acc": 0.3541666666666667,
"acc_stderr": 0.039994111357535424,
"acc_norm": 0.3541666666666667,
"acc_norm_stderr": 0.039994111357535424
},
"harness|ko_mmlu_college_chemistry|5": {
"acc": 0.35,
"acc_stderr": 0.0479372485441102,
"acc_norm": 0.35,
"acc_norm_stderr": 0.0479372485441102
},
"harness|ko_mmlu_us_foreign_policy|5": {
"acc": 0.6,
"acc_stderr": 0.04923659639173309,
"acc_norm": 0.6,
"acc_norm_stderr": 0.04923659639173309
},
"harness|ko_mmlu_moral_disputes|5": {
"acc": 0.5115606936416185,
"acc_stderr": 0.02691189868637793,
"acc_norm": 0.5115606936416185,
"acc_norm_stderr": 0.02691189868637793
},
"harness|ko_mmlu_logical_fallacies|5": {
"acc": 0.4785276073619632,
"acc_stderr": 0.0392474687675113,
"acc_norm": 0.4785276073619632,
"acc_norm_stderr": 0.0392474687675113
},
"harness|ko_mmlu_prehistory|5": {
"acc": 0.4228395061728395,
"acc_stderr": 0.027487472980871598,
"acc_norm": 0.4228395061728395,
"acc_norm_stderr": 0.027487472980871598
},
"harness|ko_mmlu_college_mathematics|5": {
"acc": 0.31,
"acc_stderr": 0.04648231987117316,
"acc_norm": 0.31,
"acc_norm_stderr": 0.04648231987117316
},
"harness|ko_mmlu_high_school_government_and_politics|5": {
"acc": 0.47668393782383417,
"acc_stderr": 0.03604513672442206,
"acc_norm": 0.47668393782383417,
"acc_norm_stderr": 0.03604513672442206
},
"harness|ko_mmlu_econometrics|5": {
"acc": 0.2982456140350877,
"acc_stderr": 0.043036840335373173,
"acc_norm": 0.2982456140350877,
"acc_norm_stderr": 0.043036840335373173
},
"harness|ko_mmlu_high_school_psychology|5": {
"acc": 0.47155963302752296,
"acc_stderr": 0.021402615697348047,
"acc_norm": 0.47155963302752296,
"acc_norm_stderr": 0.021402615697348047
},
"harness|ko_mmlu_formal_logic|5": {
"acc": 0.4523809523809524,
"acc_stderr": 0.044518079590553275,
"acc_norm": 0.4523809523809524,
"acc_norm_stderr": 0.044518079590553275
},
"harness|ko_mmlu_nutrition|5": {
"acc": 0.46405228758169936,
"acc_stderr": 0.02855582751652878,
"acc_norm": 0.46405228758169936,
"acc_norm_stderr": 0.02855582751652878
},
"harness|ko_mmlu_business_ethics|5": {
"acc": 0.41,
"acc_stderr": 0.04943110704237102,
"acc_norm": 0.41,
"acc_norm_stderr": 0.04943110704237102
},
"harness|ko_mmlu_international_law|5": {
"acc": 0.6859504132231405,
"acc_stderr": 0.042369647530410184,
"acc_norm": 0.6859504132231405,
"acc_norm_stderr": 0.042369647530410184
},
"harness|ko_mmlu_astronomy|5": {
"acc": 0.34210526315789475,
"acc_stderr": 0.03860731599316092,
"acc_norm": 0.34210526315789475,
"acc_norm_stderr": 0.03860731599316092
},
"harness|ko_mmlu_professional_psychology|5": {
"acc": 0.3937908496732026,
"acc_stderr": 0.01976621199107307,
"acc_norm": 0.3937908496732026,
"acc_norm_stderr": 0.01976621199107307
},
"harness|ko_mmlu_professional_accounting|5": {
"acc": 0.3617021276595745,
"acc_stderr": 0.028663820147199492,
"acc_norm": 0.3617021276595745,
"acc_norm_stderr": 0.028663820147199492
},
"harness|ko_mmlu_machine_learning|5": {
"acc": 0.44642857142857145,
"acc_stderr": 0.04718471485219588,
"acc_norm": 0.44642857142857145,
"acc_norm_stderr": 0.04718471485219588
},
"harness|ko_mmlu_high_school_statistics|5": {
"acc": 0.37037037037037035,
"acc_stderr": 0.03293377139415191,
"acc_norm": 0.37037037037037035,
"acc_norm_stderr": 0.03293377139415191
},
"harness|ko_mmlu_moral_scenarios|5": {
"acc": 0.2446927374301676,
"acc_stderr": 0.014378169884098426,
"acc_norm": 0.2446927374301676,
"acc_norm_stderr": 0.014378169884098426
},
"harness|ko_mmlu_college_computer_science|5": {
"acc": 0.38,
"acc_stderr": 0.048783173121456316,
"acc_norm": 0.38,
"acc_norm_stderr": 0.048783173121456316
},
"harness|ko_mmlu_high_school_computer_science|5": {
"acc": 0.55,
"acc_stderr": 0.04999999999999999,
"acc_norm": 0.55,
"acc_norm_stderr": 0.04999999999999999
},
"harness|ko_mmlu_professional_medicine|5": {
"acc": 0.3272058823529412,
"acc_stderr": 0.028501452860396567,
"acc_norm": 0.3272058823529412,
"acc_norm_stderr": 0.028501452860396567
},
"harness|ko_mmlu_security_studies|5": {
"acc": 0.5224489795918368,
"acc_stderr": 0.03197694118713672,
"acc_norm": 0.5224489795918368,
"acc_norm_stderr": 0.03197694118713672
},
"harness|ko_mmlu_high_school_world_history|5": {
"acc": 0.5780590717299579,
"acc_stderr": 0.032148146302403695,
"acc_norm": 0.5780590717299579,
"acc_norm_stderr": 0.032148146302403695
},
"harness|ko_mmlu_professional_law|5": {
"acc": 0.32073011734028684,
"acc_stderr": 0.011921199991782622,
"acc_norm": 0.32073011734028684,
"acc_norm_stderr": 0.011921199991782622
},
"harness|ko_mmlu_high_school_us_history|5": {
"acc": 0.4166666666666667,
"acc_stderr": 0.0346022832723917,
"acc_norm": 0.4166666666666667,
"acc_norm_stderr": 0.0346022832723917
},
"harness|ko_mmlu_high_school_european_history|5": {
"acc": 0.38181818181818183,
"acc_stderr": 0.037937131711656344,
"acc_norm": 0.38181818181818183,
"acc_norm_stderr": 0.037937131711656344
},
"harness|ko_truthfulqa_mc|0": {
"mc1": 0.2778457772337821,
"mc1_stderr": 0.015680929364024626,
"mc2": 0.4510026253728979,
"mc2_stderr": 0.015432182201955667
},
"harness|ko_commongen_v2|2": {
"acc": 0.3730814639905549,
"acc_stderr": 0.016627318275137443,
"acc_norm": 0.43919716646989376,
"acc_norm_stderr": 0.0170627757447807
}
},
"versions": {
"all": 0,
"harness|ko_arc_challenge|25": 0,
"harness|ko_hellaswag|10": 0,
"harness|ko_mmlu_world_religions|5": 1,
"harness|ko_mmlu_management|5": 1,
"harness|ko_mmlu_miscellaneous|5": 1,
"harness|ko_mmlu_anatomy|5": 1,
"harness|ko_mmlu_abstract_algebra|5": 1,
"harness|ko_mmlu_conceptual_physics|5": 1,
"harness|ko_mmlu_virology|5": 1,
"harness|ko_mmlu_philosophy|5": 1,
"harness|ko_mmlu_human_aging|5": 1,
"harness|ko_mmlu_human_sexuality|5": 1,
"harness|ko_mmlu_medical_genetics|5": 1,
"harness|ko_mmlu_high_school_geography|5": 1,
"harness|ko_mmlu_electrical_engineering|5": 1,
"harness|ko_mmlu_college_physics|5": 1,
"harness|ko_mmlu_high_school_microeconomics|5": 1,
"harness|ko_mmlu_high_school_macroeconomics|5": 1,
"harness|ko_mmlu_computer_security|5": 1,
"harness|ko_mmlu_global_facts|5": 1,
"harness|ko_mmlu_jurisprudence|5": 1,
"harness|ko_mmlu_high_school_chemistry|5": 1,
"harness|ko_mmlu_high_school_biology|5": 1,
"harness|ko_mmlu_marketing|5": 1,
"harness|ko_mmlu_clinical_knowledge|5": 1,
"harness|ko_mmlu_public_relations|5": 1,
"harness|ko_mmlu_high_school_mathematics|5": 1,
"harness|ko_mmlu_high_school_physics|5": 1,
"harness|ko_mmlu_sociology|5": 1,
"harness|ko_mmlu_college_medicine|5": 1,
"harness|ko_mmlu_elementary_mathematics|5": 1,
"harness|ko_mmlu_college_biology|5": 1,
"harness|ko_mmlu_college_chemistry|5": 1,
"harness|ko_mmlu_us_foreign_policy|5": 1,
"harness|ko_mmlu_moral_disputes|5": 1,
"harness|ko_mmlu_logical_fallacies|5": 1,
"harness|ko_mmlu_prehistory|5": 1,
"harness|ko_mmlu_college_mathematics|5": 1,
"harness|ko_mmlu_high_school_government_and_politics|5": 1,
"harness|ko_mmlu_econometrics|5": 1,
"harness|ko_mmlu_high_school_psychology|5": 1,
"harness|ko_mmlu_formal_logic|5": 1,
"harness|ko_mmlu_nutrition|5": 1,
"harness|ko_mmlu_business_ethics|5": 1,
"harness|ko_mmlu_international_law|5": 1,
"harness|ko_mmlu_astronomy|5": 1,
"harness|ko_mmlu_professional_psychology|5": 1,
"harness|ko_mmlu_professional_accounting|5": 1,
"harness|ko_mmlu_machine_learning|5": 1,
"harness|ko_mmlu_high_school_statistics|5": 1,
"harness|ko_mmlu_moral_scenarios|5": 1,
"harness|ko_mmlu_college_computer_science|5": 1,
"harness|ko_mmlu_high_school_computer_science|5": 1,
"harness|ko_mmlu_professional_medicine|5": 1,
"harness|ko_mmlu_security_studies|5": 1,
"harness|ko_mmlu_high_school_world_history|5": 1,
"harness|ko_mmlu_professional_law|5": 1,
"harness|ko_mmlu_high_school_us_history|5": 1,
"harness|ko_mmlu_high_school_european_history|5": 1,
"harness|ko_truthfulqa_mc|0": 0,
"harness|ko_commongen_v2|2": 1
},
"config_general": {
"model_name": "GritLM/GritLM-7B",
"model_sha": "13f00a0e36500c80ce12870ea513846a066004af",
"model_dtype": "torch.float16",
"lighteval_sha": "",
"num_few_shot_default": 0,
"num_fewshot_seeds": 1,
"override_batch_size": 1,
"max_samples": null
}
}