|
{ |
|
"results": { |
|
"daily": { |
|
"daily": 7 |
|
}, |
|
"quarterly": { |
|
"quarterly": 7 |
|
}, |
|
"harness|arc_challenge|25": { |
|
"acc": 0.29948805460750855, |
|
"acc_stderr": 0.013385021637313567, |
|
"acc_norm": 0.3506825938566553, |
|
"acc_norm_stderr": 0.013944635930726089 |
|
}, |
|
"harness|hellaswag|10": { |
|
"acc": 0.3333001394144593, |
|
"acc_stderr": 0.004704293898729902, |
|
"acc_norm": 0.4137621987651862, |
|
"acc_norm_stderr": 0.004915003499517831 |
|
}, |
|
"harness|mmlu_world_religions|5": { |
|
"acc": 0.47953216374269003, |
|
"acc_stderr": 0.0383161053282193, |
|
"acc_norm": 0.47953216374269003, |
|
"acc_norm_stderr": 0.0383161053282193 |
|
}, |
|
"harness|mmlu_management|5": { |
|
"acc": 0.5631067961165048, |
|
"acc_stderr": 0.049111471073657764, |
|
"acc_norm": 0.5631067961165048, |
|
"acc_norm_stderr": 0.049111471073657764 |
|
}, |
|
"harness|mmlu_miscellaneous|5": { |
|
"acc": 0.47509578544061304, |
|
"acc_stderr": 0.01785777070490102, |
|
"acc_norm": 0.47509578544061304, |
|
"acc_norm_stderr": 0.01785777070490102 |
|
}, |
|
"harness|mmlu_anatomy|5": { |
|
"acc": 0.28888888888888886, |
|
"acc_stderr": 0.0391545063041425, |
|
"acc_norm": 0.28888888888888886, |
|
"acc_norm_stderr": 0.0391545063041425 |
|
}, |
|
"harness|mmlu_abstract_algebra|5": { |
|
"acc": 0.31, |
|
"acc_stderr": 0.04648231987117316, |
|
"acc_norm": 0.31, |
|
"acc_norm_stderr": 0.04648231987117316 |
|
}, |
|
"harness|mmlu_conceptual_physics|5": { |
|
"acc": 0.46808510638297873, |
|
"acc_stderr": 0.03261936918467382, |
|
"acc_norm": 0.46808510638297873, |
|
"acc_norm_stderr": 0.03261936918467382 |
|
}, |
|
"harness|mmlu_virology|5": { |
|
"acc": 0.45180722891566266, |
|
"acc_stderr": 0.03874371556587953, |
|
"acc_norm": 0.45180722891566266, |
|
"acc_norm_stderr": 0.03874371556587953 |
|
}, |
|
"harness|mmlu_philosophy|5": { |
|
"acc": 0.47266881028938906, |
|
"acc_stderr": 0.028355633568328188, |
|
"acc_norm": 0.47266881028938906, |
|
"acc_norm_stderr": 0.028355633568328188 |
|
}, |
|
"harness|mmlu_human_aging|5": { |
|
"acc": 0.45739910313901344, |
|
"acc_stderr": 0.033435777055830646, |
|
"acc_norm": 0.45739910313901344, |
|
"acc_norm_stderr": 0.033435777055830646 |
|
}, |
|
"harness|mmlu_human_sexuality|5": { |
|
"acc": 0.5267175572519084, |
|
"acc_stderr": 0.04379024936553894, |
|
"acc_norm": 0.5267175572519084, |
|
"acc_norm_stderr": 0.04379024936553894 |
|
}, |
|
"harness|mmlu_medical_genetics|5": { |
|
"acc": 0.39, |
|
"acc_stderr": 0.04902071300001975, |
|
"acc_norm": 0.39, |
|
"acc_norm_stderr": 0.04902071300001975 |
|
}, |
|
"harness|mmlu_high_school_geography|5": { |
|
"acc": 0.5555555555555556, |
|
"acc_stderr": 0.035402943770953675, |
|
"acc_norm": 0.5555555555555556, |
|
"acc_norm_stderr": 0.035402943770953675 |
|
}, |
|
"harness|mmlu_electrical_engineering|5": { |
|
"acc": 0.5724137931034483, |
|
"acc_stderr": 0.04122737111370332, |
|
"acc_norm": 0.5724137931034483, |
|
"acc_norm_stderr": 0.04122737111370332 |
|
}, |
|
"harness|mmlu_college_physics|5": { |
|
"acc": 0.3137254901960784, |
|
"acc_stderr": 0.04617034827006716, |
|
"acc_norm": 0.3137254901960784, |
|
"acc_norm_stderr": 0.04617034827006716 |
|
}, |
|
"harness|mmlu_high_school_microeconomics|5": { |
|
"acc": 0.5, |
|
"acc_stderr": 0.032478490123081544, |
|
"acc_norm": 0.5, |
|
"acc_norm_stderr": 0.032478490123081544 |
|
}, |
|
"harness|mmlu_high_school_macroeconomics|5": { |
|
"acc": 0.47692307692307695, |
|
"acc_stderr": 0.025323990861736125, |
|
"acc_norm": 0.47692307692307695, |
|
"acc_norm_stderr": 0.025323990861736125 |
|
}, |
|
"harness|mmlu_computer_security|5": { |
|
"acc": 0.53, |
|
"acc_stderr": 0.05016135580465919, |
|
"acc_norm": 0.53, |
|
"acc_norm_stderr": 0.05016135580465919 |
|
}, |
|
"harness|mmlu_global_facts|5": { |
|
"acc": 0.37, |
|
"acc_stderr": 0.048523658709391, |
|
"acc_norm": 0.37, |
|
"acc_norm_stderr": 0.048523658709391 |
|
}, |
|
"harness|mmlu_jurisprudence|5": { |
|
"acc": 0.5740740740740741, |
|
"acc_stderr": 0.047803436269367894, |
|
"acc_norm": 0.5740740740740741, |
|
"acc_norm_stderr": 0.047803436269367894 |
|
}, |
|
"harness|mmlu_high_school_chemistry|5": { |
|
"acc": 0.4187192118226601, |
|
"acc_stderr": 0.03471192860518468, |
|
"acc_norm": 0.4187192118226601, |
|
"acc_norm_stderr": 0.03471192860518468 |
|
}, |
|
"harness|mmlu_high_school_biology|5": { |
|
"acc": 0.47419354838709676, |
|
"acc_stderr": 0.02840609505765332, |
|
"acc_norm": 0.47419354838709676, |
|
"acc_norm_stderr": 0.02840609505765332 |
|
}, |
|
"harness|mmlu_marketing|5": { |
|
"acc": 0.6752136752136753, |
|
"acc_stderr": 0.03067902276549883, |
|
"acc_norm": 0.6752136752136753, |
|
"acc_norm_stderr": 0.03067902276549883 |
|
}, |
|
"harness|mmlu_clinical_knowledge|5": { |
|
"acc": 0.44150943396226416, |
|
"acc_stderr": 0.030561590426731833, |
|
"acc_norm": 0.44150943396226416, |
|
"acc_norm_stderr": 0.030561590426731833 |
|
}, |
|
"harness|mmlu_public_relations|5": { |
|
"acc": 0.4727272727272727, |
|
"acc_stderr": 0.04782001791380063, |
|
"acc_norm": 0.4727272727272727, |
|
"acc_norm_stderr": 0.04782001791380063 |
|
}, |
|
"harness|mmlu_high_school_mathematics|5": { |
|
"acc": 0.4185185185185185, |
|
"acc_stderr": 0.030078013075022066, |
|
"acc_norm": 0.4185185185185185, |
|
"acc_norm_stderr": 0.030078013075022066 |
|
}, |
|
"harness|mmlu_high_school_physics|5": { |
|
"acc": 0.304635761589404, |
|
"acc_stderr": 0.03757949922943343, |
|
"acc_norm": 0.304635761589404, |
|
"acc_norm_stderr": 0.03757949922943343 |
|
}, |
|
"harness|mmlu_sociology|5": { |
|
"acc": 0.6069651741293532, |
|
"acc_stderr": 0.0345368246603156, |
|
"acc_norm": 0.6069651741293532, |
|
"acc_norm_stderr": 0.0345368246603156 |
|
}, |
|
"harness|mmlu_college_medicine|5": { |
|
"acc": 0.4046242774566474, |
|
"acc_stderr": 0.03742461193887248, |
|
"acc_norm": 0.4046242774566474, |
|
"acc_norm_stderr": 0.03742461193887248 |
|
}, |
|
"harness|mmlu_elementary_mathematics|5": { |
|
"acc": 0.5476190476190477, |
|
"acc_stderr": 0.02563425811555495, |
|
"acc_norm": 0.5476190476190477, |
|
"acc_norm_stderr": 0.02563425811555495 |
|
}, |
|
"harness|mmlu_college_biology|5": { |
|
"acc": 0.3472222222222222, |
|
"acc_stderr": 0.039812405437178615, |
|
"acc_norm": 0.3472222222222222, |
|
"acc_norm_stderr": 0.039812405437178615 |
|
}, |
|
"harness|mmlu_college_chemistry|5": { |
|
"acc": 0.33, |
|
"acc_stderr": 0.04725815626252605, |
|
"acc_norm": 0.33, |
|
"acc_norm_stderr": 0.04725815626252605 |
|
}, |
|
"harness|mmlu_us_foreign_policy|5": { |
|
"acc": 0.57, |
|
"acc_stderr": 0.04975698519562426, |
|
"acc_norm": 0.57, |
|
"acc_norm_stderr": 0.04975698519562426 |
|
}, |
|
"harness|mmlu_moral_disputes|5": { |
|
"acc": 0.49710982658959535, |
|
"acc_stderr": 0.026918645383239015, |
|
"acc_norm": 0.49710982658959535, |
|
"acc_norm_stderr": 0.026918645383239015 |
|
}, |
|
"harness|mmlu_logical_fallacies|5": { |
|
"acc": 0.5276073619631901, |
|
"acc_stderr": 0.03922378290610991, |
|
"acc_norm": 0.5276073619631901, |
|
"acc_norm_stderr": 0.03922378290610991 |
|
}, |
|
"harness|mmlu_prehistory|5": { |
|
"acc": 0.49691358024691357, |
|
"acc_stderr": 0.027820214158594377, |
|
"acc_norm": 0.49691358024691357, |
|
"acc_norm_stderr": 0.027820214158594377 |
|
}, |
|
"harness|mmlu_college_mathematics|5": { |
|
"acc": 0.45, |
|
"acc_stderr": 0.05, |
|
"acc_norm": 0.45, |
|
"acc_norm_stderr": 0.05 |
|
}, |
|
"harness|mmlu_high_school_government_and_politics|5": { |
|
"acc": 0.49222797927461137, |
|
"acc_stderr": 0.03608003225569654, |
|
"acc_norm": 0.49222797927461137, |
|
"acc_norm_stderr": 0.03608003225569654 |
|
}, |
|
"harness|mmlu_econometrics|5": { |
|
"acc": 0.41228070175438597, |
|
"acc_stderr": 0.046306532033665956, |
|
"acc_norm": 0.41228070175438597, |
|
"acc_norm_stderr": 0.046306532033665956 |
|
}, |
|
"harness|mmlu_high_school_psychology|5": { |
|
"acc": 0.5027522935779817, |
|
"acc_stderr": 0.02143699835976532, |
|
"acc_norm": 0.5027522935779817, |
|
"acc_norm_stderr": 0.02143699835976532 |
|
}, |
|
"harness|mmlu_formal_logic|5": { |
|
"acc": 0.40476190476190477, |
|
"acc_stderr": 0.04390259265377561, |
|
"acc_norm": 0.40476190476190477, |
|
"acc_norm_stderr": 0.04390259265377561 |
|
}, |
|
"harness|mmlu_nutrition|5": { |
|
"acc": 0.49019607843137253, |
|
"acc_stderr": 0.028624412550167958, |
|
"acc_norm": 0.49019607843137253, |
|
"acc_norm_stderr": 0.028624412550167958 |
|
}, |
|
"harness|mmlu_business_ethics|5": { |
|
"acc": 0.5, |
|
"acc_stderr": 0.050251890762960605, |
|
"acc_norm": 0.5, |
|
"acc_norm_stderr": 0.050251890762960605 |
|
}, |
|
"harness|mmlu_international_law|5": { |
|
"acc": 0.7355371900826446, |
|
"acc_stderr": 0.04026187527591205, |
|
"acc_norm": 0.7355371900826446, |
|
"acc_norm_stderr": 0.04026187527591205 |
|
}, |
|
"harness|mmlu_astronomy|5": { |
|
"acc": 0.45394736842105265, |
|
"acc_stderr": 0.04051646342874142, |
|
"acc_norm": 0.45394736842105265, |
|
"acc_norm_stderr": 0.04051646342874142 |
|
}, |
|
"harness|mmlu_professional_psychology|5": { |
|
"acc": 0.39705882352941174, |
|
"acc_stderr": 0.019794488900024113, |
|
"acc_norm": 0.39705882352941174, |
|
"acc_norm_stderr": 0.019794488900024113 |
|
}, |
|
"harness|mmlu_professional_accounting|5": { |
|
"acc": 0.40070921985815605, |
|
"acc_stderr": 0.029233465745573086, |
|
"acc_norm": 0.40070921985815605, |
|
"acc_norm_stderr": 0.029233465745573086 |
|
}, |
|
"harness|mmlu_machine_learning|5": { |
|
"acc": 0.39285714285714285, |
|
"acc_stderr": 0.04635550135609976, |
|
"acc_norm": 0.39285714285714285, |
|
"acc_norm_stderr": 0.04635550135609976 |
|
}, |
|
"harness|mmlu_high_school_statistics|5": { |
|
"acc": 0.4675925925925926, |
|
"acc_stderr": 0.034028015813589656, |
|
"acc_norm": 0.4675925925925926, |
|
"acc_norm_stderr": 0.034028015813589656 |
|
}, |
|
"harness|mmlu_moral_scenarios|5": { |
|
"acc": 0.3329608938547486, |
|
"acc_stderr": 0.015761716178397552, |
|
"acc_norm": 0.3329608938547486, |
|
"acc_norm_stderr": 0.015761716178397552 |
|
}, |
|
"harness|mmlu_college_computer_science|5": { |
|
"acc": 0.43, |
|
"acc_stderr": 0.049756985195624284, |
|
"acc_norm": 0.43, |
|
"acc_norm_stderr": 0.049756985195624284 |
|
}, |
|
"harness|mmlu_high_school_computer_science|5": { |
|
"acc": 0.76, |
|
"acc_stderr": 0.042923469599092816, |
|
"acc_norm": 0.76, |
|
"acc_norm_stderr": 0.042923469599092816 |
|
}, |
|
"harness|mmlu_professional_medicine|5": { |
|
"acc": 0.35294117647058826, |
|
"acc_stderr": 0.029029422815681404, |
|
"acc_norm": 0.35294117647058826, |
|
"acc_norm_stderr": 0.029029422815681404 |
|
}, |
|
"harness|mmlu_security_studies|5": { |
|
"acc": 0.6163265306122448, |
|
"acc_stderr": 0.031130880396235943, |
|
"acc_norm": 0.6163265306122448, |
|
"acc_norm_stderr": 0.031130880396235943 |
|
}, |
|
"harness|mmlu_high_school_world_history|5": { |
|
"acc": 0.5654008438818565, |
|
"acc_stderr": 0.03226759995510145, |
|
"acc_norm": 0.5654008438818565, |
|
"acc_norm_stderr": 0.03226759995510145 |
|
}, |
|
"harness|mmlu_professional_law|5": { |
|
"acc": 0.36571056062581486, |
|
"acc_stderr": 0.012301028188840567, |
|
"acc_norm": 0.36571056062581486, |
|
"acc_norm_stderr": 0.012301028188840567 |
|
}, |
|
"harness|mmlu_high_school_us_history|5": { |
|
"acc": 0.4852941176470588, |
|
"acc_stderr": 0.03507793834791324, |
|
"acc_norm": 0.4852941176470588, |
|
"acc_norm_stderr": 0.03507793834791324 |
|
}, |
|
"harness|mmlu_high_school_european_history|5": { |
|
"acc": 0.5151515151515151, |
|
"acc_stderr": 0.03902551007374448, |
|
"acc_norm": 0.5151515151515151, |
|
"acc_norm_stderr": 0.03902551007374448 |
|
}, |
|
"harness|truthfulqa_mc|0": { |
|
"mc1": 0.2937576499388005, |
|
"mc1_stderr": 0.015945068581236614, |
|
"mc2": 0.4670848140389129, |
|
"mc2_stderr": 0.01585178282587417 |
|
} |
|
}, |
|
"versions": { |
|
"all": 0, |
|
"harness|arc_challenge|25": 0, |
|
"harness|hellaswag|10": 0, |
|
"harness|mmlu_world_religions|5": 1, |
|
"harness|mmlu_management|5": 1, |
|
"harness|mmlu_miscellaneous|5": 1, |
|
"harness|mmlu_anatomy|5": 1, |
|
"harness|mmlu_abstract_algebra|5": 1, |
|
"harness|mmlu_conceptual_physics|5": 1, |
|
"harness|mmlu_virology|5": 1, |
|
"harness|mmlu_philosophy|5": 1, |
|
"harness|mmlu_human_aging|5": 1, |
|
"harness|mmlu_human_sexuality|5": 1, |
|
"harness|mmlu_medical_genetics|5": 1, |
|
"harness|mmlu_high_school_geography|5": 1, |
|
"harness|mmlu_electrical_engineering|5": 1, |
|
"harness|mmlu_college_physics|5": 1, |
|
"harness|mmlu_high_school_microeconomics|5": 1, |
|
"harness|mmlu_high_school_macroeconomics|5": 1, |
|
"harness|mmlu_computer_security|5": 1, |
|
"harness|mmlu_global_facts|5": 1, |
|
"harness|mmlu_jurisprudence|5": 1, |
|
"harness|mmlu_high_school_chemistry|5": 1, |
|
"harness|mmlu_high_school_biology|5": 1, |
|
"harness|mmlu_marketing|5": 1, |
|
"harness|mmlu_clinical_knowledge|5": 1, |
|
"harness|mmlu_public_relations|5": 1, |
|
"harness|mmlu_high_school_mathematics|5": 1, |
|
"harness|mmlu_high_school_physics|5": 1, |
|
"harness|mmlu_sociology|5": 1, |
|
"harness|mmlu_college_medicine|5": 1, |
|
"harness|mmlu_elementary_mathematics|5": 1, |
|
"harness|mmlu_college_biology|5": 1, |
|
"harness|mmlu_college_chemistry|5": 1, |
|
"harness|mmlu_us_foreign_policy|5": 1, |
|
"harness|mmlu_moral_disputes|5": 1, |
|
"harness|mmlu_logical_fallacies|5": 1, |
|
"harness|mmlu_prehistory|5": 1, |
|
"harness|mmlu_college_mathematics|5": 1, |
|
"harness|mmlu_high_school_government_and_politics|5": 1, |
|
"harness|mmlu_econometrics|5": 1, |
|
"harness|mmlu_high_school_psychology|5": 1, |
|
"harness|mmlu_formal_logic|5": 1, |
|
"harness|mmlu_nutrition|5": 1, |
|
"harness|mmlu_business_ethics|5": 1, |
|
"harness|mmlu_international_law|5": 1, |
|
"harness|mmlu_astronomy|5": 1, |
|
"harness|mmlu_professional_psychology|5": 1, |
|
"harness|mmlu_professional_accounting|5": 1, |
|
"harness|mmlu_machine_learning|5": 1, |
|
"harness|mmlu_high_school_statistics|5": 1, |
|
"harness|mmlu_moral_scenarios|5": 1, |
|
"harness|mmlu_college_computer_science|5": 1, |
|
"harness|mmlu_high_school_computer_science|5": 1, |
|
"harness|mmlu_professional_medicine|5": 1, |
|
"harness|mmlu_security_studies|5": 1, |
|
"harness|mmlu_high_school_world_history|5": 1, |
|
"harness|mmlu_professional_law|5": 1, |
|
"harness|mmlu_high_school_us_history|5": 1, |
|
"harness|mmlu_high_school_european_history|5": 1, |
|
"harness|truthfulqa_mc|0": 0 |
|
}, |
|
"config_general": { |
|
"model_name": "01-ai/Yi-1.5-9B-32K", |
|
"model_sha": "c0239dbc923b8a2b5ca849763bdd592d39c60850", |
|
"model_dtype": "torch.float16", |
|
"lighteval_sha": "", |
|
"num_few_shot_default": 0, |
|
"num_fewshot_seeds": 1, |
|
"override_batch_size": 1, |
|
"max_samples": null |
|
} |
|
} |