results / ITT-AF /ITT-Yi-Ko-6B-v1.0 /result_2024-01-25 08:20:13.json
open-ko-llm-bot's picture
Add results for 2024-01-25 08:20:13
76c3076 verified
raw
history blame
No virus
17.8 kB
{
"results": {
"harness|ko_arc_challenge|25": {
"acc": 0.36860068259385664,
"acc_stderr": 0.014097810678042187,
"acc_norm": 0.4249146757679181,
"acc_norm_stderr": 0.014445698968520769
},
"harness|ko_hellaswag|10": {
"acc": 0.3986257717586138,
"acc_stderr": 0.004886147907627406,
"acc_norm": 0.538338976299542,
"acc_norm_stderr": 0.004975091055697193
},
"harness|ko_mmlu_world_religions|5": {
"acc": 0.5321637426900585,
"acc_stderr": 0.038268824176603704,
"acc_norm": 0.5321637426900585,
"acc_norm_stderr": 0.038268824176603704
},
"harness|ko_mmlu_management|5": {
"acc": 0.5436893203883495,
"acc_stderr": 0.049318019942204146,
"acc_norm": 0.5436893203883495,
"acc_norm_stderr": 0.049318019942204146
},
"harness|ko_mmlu_miscellaneous|5": {
"acc": 0.561941251596424,
"acc_stderr": 0.017742232238257244,
"acc_norm": 0.561941251596424,
"acc_norm_stderr": 0.017742232238257244
},
"harness|ko_mmlu_anatomy|5": {
"acc": 0.4740740740740741,
"acc_stderr": 0.04313531696750573,
"acc_norm": 0.4740740740740741,
"acc_norm_stderr": 0.04313531696750573
},
"harness|ko_mmlu_abstract_algebra|5": {
"acc": 0.28,
"acc_stderr": 0.04512608598542127,
"acc_norm": 0.28,
"acc_norm_stderr": 0.04512608598542127
},
"harness|ko_mmlu_conceptual_physics|5": {
"acc": 0.39148936170212767,
"acc_stderr": 0.03190701242326812,
"acc_norm": 0.39148936170212767,
"acc_norm_stderr": 0.03190701242326812
},
"harness|ko_mmlu_virology|5": {
"acc": 0.3614457831325301,
"acc_stderr": 0.0374005938202932,
"acc_norm": 0.3614457831325301,
"acc_norm_stderr": 0.0374005938202932
},
"harness|ko_mmlu_philosophy|5": {
"acc": 0.4533762057877814,
"acc_stderr": 0.028274359854894255,
"acc_norm": 0.4533762057877814,
"acc_norm_stderr": 0.028274359854894255
},
"harness|ko_mmlu_human_aging|5": {
"acc": 0.4977578475336323,
"acc_stderr": 0.033557465352232634,
"acc_norm": 0.4977578475336323,
"acc_norm_stderr": 0.033557465352232634
},
"harness|ko_mmlu_human_sexuality|5": {
"acc": 0.5572519083969466,
"acc_stderr": 0.04356447202665069,
"acc_norm": 0.5572519083969466,
"acc_norm_stderr": 0.04356447202665069
},
"harness|ko_mmlu_medical_genetics|5": {
"acc": 0.43,
"acc_stderr": 0.049756985195624284,
"acc_norm": 0.43,
"acc_norm_stderr": 0.049756985195624284
},
"harness|ko_mmlu_high_school_geography|5": {
"acc": 0.601010101010101,
"acc_stderr": 0.034889016168527305,
"acc_norm": 0.601010101010101,
"acc_norm_stderr": 0.034889016168527305
},
"harness|ko_mmlu_electrical_engineering|5": {
"acc": 0.47586206896551725,
"acc_stderr": 0.041618085035015295,
"acc_norm": 0.47586206896551725,
"acc_norm_stderr": 0.041618085035015295
},
"harness|ko_mmlu_college_physics|5": {
"acc": 0.22549019607843138,
"acc_stderr": 0.041583075330832865,
"acc_norm": 0.22549019607843138,
"acc_norm_stderr": 0.041583075330832865
},
"harness|ko_mmlu_high_school_microeconomics|5": {
"acc": 0.4789915966386555,
"acc_stderr": 0.03244980849990029,
"acc_norm": 0.4789915966386555,
"acc_norm_stderr": 0.03244980849990029
},
"harness|ko_mmlu_high_school_macroeconomics|5": {
"acc": 0.4282051282051282,
"acc_stderr": 0.025088301454694838,
"acc_norm": 0.4282051282051282,
"acc_norm_stderr": 0.025088301454694838
},
"harness|ko_mmlu_computer_security|5": {
"acc": 0.63,
"acc_stderr": 0.04852365870939099,
"acc_norm": 0.63,
"acc_norm_stderr": 0.04852365870939099
},
"harness|ko_mmlu_global_facts|5": {
"acc": 0.37,
"acc_stderr": 0.04852365870939099,
"acc_norm": 0.37,
"acc_norm_stderr": 0.04852365870939099
},
"harness|ko_mmlu_jurisprudence|5": {
"acc": 0.46296296296296297,
"acc_stderr": 0.04820403072760627,
"acc_norm": 0.46296296296296297,
"acc_norm_stderr": 0.04820403072760627
},
"harness|ko_mmlu_high_school_chemistry|5": {
"acc": 0.35960591133004927,
"acc_stderr": 0.03376458246509567,
"acc_norm": 0.35960591133004927,
"acc_norm_stderr": 0.03376458246509567
},
"harness|ko_mmlu_high_school_biology|5": {
"acc": 0.43870967741935485,
"acc_stderr": 0.028229497320317213,
"acc_norm": 0.43870967741935485,
"acc_norm_stderr": 0.028229497320317213
},
"harness|ko_mmlu_marketing|5": {
"acc": 0.7051282051282052,
"acc_stderr": 0.02987257770889118,
"acc_norm": 0.7051282051282052,
"acc_norm_stderr": 0.02987257770889118
},
"harness|ko_mmlu_clinical_knowledge|5": {
"acc": 0.4528301886792453,
"acc_stderr": 0.030635627957961816,
"acc_norm": 0.4528301886792453,
"acc_norm_stderr": 0.030635627957961816
},
"harness|ko_mmlu_public_relations|5": {
"acc": 0.5,
"acc_stderr": 0.04789131426105757,
"acc_norm": 0.5,
"acc_norm_stderr": 0.04789131426105757
},
"harness|ko_mmlu_high_school_mathematics|5": {
"acc": 0.3111111111111111,
"acc_stderr": 0.02822644674968352,
"acc_norm": 0.3111111111111111,
"acc_norm_stderr": 0.02822644674968352
},
"harness|ko_mmlu_high_school_physics|5": {
"acc": 0.271523178807947,
"acc_stderr": 0.036313298039696545,
"acc_norm": 0.271523178807947,
"acc_norm_stderr": 0.036313298039696545
},
"harness|ko_mmlu_sociology|5": {
"acc": 0.5671641791044776,
"acc_stderr": 0.03503490923673282,
"acc_norm": 0.5671641791044776,
"acc_norm_stderr": 0.03503490923673282
},
"harness|ko_mmlu_college_medicine|5": {
"acc": 0.3930635838150289,
"acc_stderr": 0.03724249595817729,
"acc_norm": 0.3930635838150289,
"acc_norm_stderr": 0.03724249595817729
},
"harness|ko_mmlu_elementary_mathematics|5": {
"acc": 0.3333333333333333,
"acc_stderr": 0.0242785680243077,
"acc_norm": 0.3333333333333333,
"acc_norm_stderr": 0.0242785680243077
},
"harness|ko_mmlu_college_biology|5": {
"acc": 0.4097222222222222,
"acc_stderr": 0.04112490974670788,
"acc_norm": 0.4097222222222222,
"acc_norm_stderr": 0.04112490974670788
},
"harness|ko_mmlu_college_chemistry|5": {
"acc": 0.29,
"acc_stderr": 0.045604802157206845,
"acc_norm": 0.29,
"acc_norm_stderr": 0.045604802157206845
},
"harness|ko_mmlu_us_foreign_policy|5": {
"acc": 0.55,
"acc_stderr": 0.05,
"acc_norm": 0.55,
"acc_norm_stderr": 0.05
},
"harness|ko_mmlu_moral_disputes|5": {
"acc": 0.4913294797687861,
"acc_stderr": 0.026915047355369804,
"acc_norm": 0.4913294797687861,
"acc_norm_stderr": 0.026915047355369804
},
"harness|ko_mmlu_logical_fallacies|5": {
"acc": 0.4723926380368098,
"acc_stderr": 0.039223782906109894,
"acc_norm": 0.4723926380368098,
"acc_norm_stderr": 0.039223782906109894
},
"harness|ko_mmlu_prehistory|5": {
"acc": 0.46296296296296297,
"acc_stderr": 0.02774431344337654,
"acc_norm": 0.46296296296296297,
"acc_norm_stderr": 0.02774431344337654
},
"harness|ko_mmlu_college_mathematics|5": {
"acc": 0.26,
"acc_stderr": 0.0440844002276808,
"acc_norm": 0.26,
"acc_norm_stderr": 0.0440844002276808
},
"harness|ko_mmlu_high_school_government_and_politics|5": {
"acc": 0.5647668393782384,
"acc_stderr": 0.03578038165008586,
"acc_norm": 0.5647668393782384,
"acc_norm_stderr": 0.03578038165008586
},
"harness|ko_mmlu_econometrics|5": {
"acc": 0.2807017543859649,
"acc_stderr": 0.04227054451232199,
"acc_norm": 0.2807017543859649,
"acc_norm_stderr": 0.04227054451232199
},
"harness|ko_mmlu_high_school_psychology|5": {
"acc": 0.581651376146789,
"acc_stderr": 0.021149548596443874,
"acc_norm": 0.581651376146789,
"acc_norm_stderr": 0.021149548596443874
},
"harness|ko_mmlu_formal_logic|5": {
"acc": 0.2857142857142857,
"acc_stderr": 0.040406101782088394,
"acc_norm": 0.2857142857142857,
"acc_norm_stderr": 0.040406101782088394
},
"harness|ko_mmlu_nutrition|5": {
"acc": 0.42483660130718953,
"acc_stderr": 0.028304576673141107,
"acc_norm": 0.42483660130718953,
"acc_norm_stderr": 0.028304576673141107
},
"harness|ko_mmlu_business_ethics|5": {
"acc": 0.58,
"acc_stderr": 0.049604496374885836,
"acc_norm": 0.58,
"acc_norm_stderr": 0.049604496374885836
},
"harness|ko_mmlu_international_law|5": {
"acc": 0.6859504132231405,
"acc_stderr": 0.04236964753041018,
"acc_norm": 0.6859504132231405,
"acc_norm_stderr": 0.04236964753041018
},
"harness|ko_mmlu_astronomy|5": {
"acc": 0.4605263157894737,
"acc_stderr": 0.04056242252249033,
"acc_norm": 0.4605263157894737,
"acc_norm_stderr": 0.04056242252249033
},
"harness|ko_mmlu_professional_psychology|5": {
"acc": 0.39705882352941174,
"acc_stderr": 0.019794488900024106,
"acc_norm": 0.39705882352941174,
"acc_norm_stderr": 0.019794488900024106
},
"harness|ko_mmlu_professional_accounting|5": {
"acc": 0.3475177304964539,
"acc_stderr": 0.02840662780959095,
"acc_norm": 0.3475177304964539,
"acc_norm_stderr": 0.02840662780959095
},
"harness|ko_mmlu_machine_learning|5": {
"acc": 0.3392857142857143,
"acc_stderr": 0.04493949068613539,
"acc_norm": 0.3392857142857143,
"acc_norm_stderr": 0.04493949068613539
},
"harness|ko_mmlu_high_school_statistics|5": {
"acc": 0.2638888888888889,
"acc_stderr": 0.03005820270430985,
"acc_norm": 0.2638888888888889,
"acc_norm_stderr": 0.03005820270430985
},
"harness|ko_mmlu_moral_scenarios|5": {
"acc": 0.2212290502793296,
"acc_stderr": 0.01388216459888727,
"acc_norm": 0.2212290502793296,
"acc_norm_stderr": 0.01388216459888727
},
"harness|ko_mmlu_college_computer_science|5": {
"acc": 0.33,
"acc_stderr": 0.04725815626252605,
"acc_norm": 0.33,
"acc_norm_stderr": 0.04725815626252605
},
"harness|ko_mmlu_high_school_computer_science|5": {
"acc": 0.52,
"acc_stderr": 0.050211673156867795,
"acc_norm": 0.52,
"acc_norm_stderr": 0.050211673156867795
},
"harness|ko_mmlu_professional_medicine|5": {
"acc": 0.3713235294117647,
"acc_stderr": 0.02934980313976587,
"acc_norm": 0.3713235294117647,
"acc_norm_stderr": 0.02934980313976587
},
"harness|ko_mmlu_security_studies|5": {
"acc": 0.40408163265306124,
"acc_stderr": 0.03141470802586589,
"acc_norm": 0.40408163265306124,
"acc_norm_stderr": 0.03141470802586589
},
"harness|ko_mmlu_high_school_world_history|5": {
"acc": 0.6160337552742616,
"acc_stderr": 0.031658678064106674,
"acc_norm": 0.6160337552742616,
"acc_norm_stderr": 0.031658678064106674
},
"harness|ko_mmlu_professional_law|5": {
"acc": 0.3259452411994785,
"acc_stderr": 0.01197150729498278,
"acc_norm": 0.3259452411994785,
"acc_norm_stderr": 0.01197150729498278
},
"harness|ko_mmlu_high_school_us_history|5": {
"acc": 0.5098039215686274,
"acc_stderr": 0.03508637358630572,
"acc_norm": 0.5098039215686274,
"acc_norm_stderr": 0.03508637358630572
},
"harness|ko_mmlu_high_school_european_history|5": {
"acc": 0.5636363636363636,
"acc_stderr": 0.03872592983524754,
"acc_norm": 0.5636363636363636,
"acc_norm_stderr": 0.03872592983524754
},
"harness|ko_truthfulqa_mc|0": {
"mc1": 0.2802937576499388,
"mc1_stderr": 0.015723139524608746,
"mc2": 0.43117439069714647,
"mc2_stderr": 0.014988526622853661
},
"harness|ko_commongen_v2|2": {
"acc": 0.3825265643447462,
"acc_stderr": 0.016709165387228817,
"acc_norm": 0.4309327036599764,
"acc_norm_stderr": 0.017025558196043136
}
},
"versions": {
"all": 0,
"harness|ko_arc_challenge|25": 0,
"harness|ko_hellaswag|10": 0,
"harness|ko_mmlu_world_religions|5": 1,
"harness|ko_mmlu_management|5": 1,
"harness|ko_mmlu_miscellaneous|5": 1,
"harness|ko_mmlu_anatomy|5": 1,
"harness|ko_mmlu_abstract_algebra|5": 1,
"harness|ko_mmlu_conceptual_physics|5": 1,
"harness|ko_mmlu_virology|5": 1,
"harness|ko_mmlu_philosophy|5": 1,
"harness|ko_mmlu_human_aging|5": 1,
"harness|ko_mmlu_human_sexuality|5": 1,
"harness|ko_mmlu_medical_genetics|5": 1,
"harness|ko_mmlu_high_school_geography|5": 1,
"harness|ko_mmlu_electrical_engineering|5": 1,
"harness|ko_mmlu_college_physics|5": 1,
"harness|ko_mmlu_high_school_microeconomics|5": 1,
"harness|ko_mmlu_high_school_macroeconomics|5": 1,
"harness|ko_mmlu_computer_security|5": 1,
"harness|ko_mmlu_global_facts|5": 1,
"harness|ko_mmlu_jurisprudence|5": 1,
"harness|ko_mmlu_high_school_chemistry|5": 1,
"harness|ko_mmlu_high_school_biology|5": 1,
"harness|ko_mmlu_marketing|5": 1,
"harness|ko_mmlu_clinical_knowledge|5": 1,
"harness|ko_mmlu_public_relations|5": 1,
"harness|ko_mmlu_high_school_mathematics|5": 1,
"harness|ko_mmlu_high_school_physics|5": 1,
"harness|ko_mmlu_sociology|5": 1,
"harness|ko_mmlu_college_medicine|5": 1,
"harness|ko_mmlu_elementary_mathematics|5": 1,
"harness|ko_mmlu_college_biology|5": 1,
"harness|ko_mmlu_college_chemistry|5": 1,
"harness|ko_mmlu_us_foreign_policy|5": 1,
"harness|ko_mmlu_moral_disputes|5": 1,
"harness|ko_mmlu_logical_fallacies|5": 1,
"harness|ko_mmlu_prehistory|5": 1,
"harness|ko_mmlu_college_mathematics|5": 1,
"harness|ko_mmlu_high_school_government_and_politics|5": 1,
"harness|ko_mmlu_econometrics|5": 1,
"harness|ko_mmlu_high_school_psychology|5": 1,
"harness|ko_mmlu_formal_logic|5": 1,
"harness|ko_mmlu_nutrition|5": 1,
"harness|ko_mmlu_business_ethics|5": 1,
"harness|ko_mmlu_international_law|5": 1,
"harness|ko_mmlu_astronomy|5": 1,
"harness|ko_mmlu_professional_psychology|5": 1,
"harness|ko_mmlu_professional_accounting|5": 1,
"harness|ko_mmlu_machine_learning|5": 1,
"harness|ko_mmlu_high_school_statistics|5": 1,
"harness|ko_mmlu_moral_scenarios|5": 1,
"harness|ko_mmlu_college_computer_science|5": 1,
"harness|ko_mmlu_high_school_computer_science|5": 1,
"harness|ko_mmlu_professional_medicine|5": 1,
"harness|ko_mmlu_security_studies|5": 1,
"harness|ko_mmlu_high_school_world_history|5": 1,
"harness|ko_mmlu_professional_law|5": 1,
"harness|ko_mmlu_high_school_us_history|5": 1,
"harness|ko_mmlu_high_school_european_history|5": 1,
"harness|ko_truthfulqa_mc|0": 0,
"harness|ko_commongen_v2|2": 1
},
"config_general": {
"model_name": "ITT-AF/ITT-Yi-Ko-6B-v1.0",
"model_sha": "138b6cebc9ef970542a57f56701a4507dc5d12f7",
"model_dtype": "torch.float16",
"lighteval_sha": "",
"num_few_shot_default": 0,
"num_fewshot_seeds": 1,
"override_batch_size": 1,
"max_samples": null
}
}