lm1-misc-pile
/
1b121b21b
/evaluation
/lm1-1b1-21b-results_lm-eval_global_step39672_2022-12-01-17-56-56.json
{ | |
"results": { | |
"copa": { | |
"acc": 0.68, | |
"acc_stderr": 0.04688261722621505 | |
}, | |
"piqa": { | |
"acc": 0.6545157780195865, | |
"acc_stderr": 0.011094802893617745, | |
"acc_norm": 0.6605005440696409, | |
"acc_norm_stderr": 0.011048455047173913 | |
}, | |
"rte": { | |
"acc": 0.5234657039711191, | |
"acc_stderr": 0.03006330041190266 | |
}, | |
"winogrande": { | |
"acc": 0.4996053670086819, | |
"acc_stderr": 0.014052481306049516 | |
}, | |
"hendrycksTest-abstract_algebra": { | |
"acc": 0.28, | |
"acc_stderr": 0.04512608598542127, | |
"acc_norm": 0.29, | |
"acc_norm_stderr": 0.045604802157206824 | |
}, | |
"hendrycksTest-anatomy": { | |
"acc": 0.2074074074074074, | |
"acc_stderr": 0.03502553170678318, | |
"acc_norm": 0.1925925925925926, | |
"acc_norm_stderr": 0.03406542058502651 | |
}, | |
"hendrycksTest-astronomy": { | |
"acc": 0.21052631578947367, | |
"acc_stderr": 0.03317672787533157, | |
"acc_norm": 0.27631578947368424, | |
"acc_norm_stderr": 0.03639057569952924 | |
}, | |
"hendrycksTest-business_ethics": { | |
"acc": 0.34, | |
"acc_stderr": 0.04760952285695236, | |
"acc_norm": 0.29, | |
"acc_norm_stderr": 0.04560480215720683 | |
}, | |
"hendrycksTest-clinical_knowledge": { | |
"acc": 0.20754716981132076, | |
"acc_stderr": 0.02495991802891127, | |
"acc_norm": 0.32452830188679244, | |
"acc_norm_stderr": 0.028815615713432115 | |
}, | |
"hendrycksTest-college_biology": { | |
"acc": 0.25, | |
"acc_stderr": 0.03621034121889507, | |
"acc_norm": 0.2361111111111111, | |
"acc_norm_stderr": 0.03551446610810826 | |
}, | |
"hendrycksTest-college_chemistry": { | |
"acc": 0.23, | |
"acc_stderr": 0.042295258468165085, | |
"acc_norm": 0.31, | |
"acc_norm_stderr": 0.04648231987117316 | |
}, | |
"hendrycksTest-college_computer_science": { | |
"acc": 0.27, | |
"acc_stderr": 0.0446196043338474, | |
"acc_norm": 0.28, | |
"acc_norm_stderr": 0.04512608598542128 | |
}, | |
"hendrycksTest-college_mathematics": { | |
"acc": 0.26, | |
"acc_stderr": 0.0440844002276808, | |
"acc_norm": 0.31, | |
"acc_norm_stderr": 0.04648231987117316 | |
}, | |
"hendrycksTest-college_medicine": { | |
"acc": 0.2138728323699422, | |
"acc_stderr": 0.031265112061730424, | |
"acc_norm": 0.26011560693641617, | |
"acc_norm_stderr": 0.03345036916788992 | |
}, | |
"hendrycksTest-college_physics": { | |
"acc": 0.2549019607843137, | |
"acc_stderr": 0.04336432707993178, | |
"acc_norm": 0.30392156862745096, | |
"acc_norm_stderr": 0.045766654032077636 | |
}, | |
"hendrycksTest-computer_security": { | |
"acc": 0.29, | |
"acc_stderr": 0.04560480215720684, | |
"acc_norm": 0.33, | |
"acc_norm_stderr": 0.047258156262526045 | |
}, | |
"hendrycksTest-conceptual_physics": { | |
"acc": 0.23829787234042554, | |
"acc_stderr": 0.027851252973889764, | |
"acc_norm": 0.1574468085106383, | |
"acc_norm_stderr": 0.023809905196619695 | |
}, | |
"hendrycksTest-econometrics": { | |
"acc": 0.24561403508771928, | |
"acc_stderr": 0.04049339297748142, | |
"acc_norm": 0.24561403508771928, | |
"acc_norm_stderr": 0.04049339297748142 | |
}, | |
"hendrycksTest-electrical_engineering": { | |
"acc": 0.2896551724137931, | |
"acc_stderr": 0.03780019230438015, | |
"acc_norm": 0.2896551724137931, | |
"acc_norm_stderr": 0.03780019230438014 | |
}, | |
"hendrycksTest-elementary_mathematics": { | |
"acc": 0.21428571428571427, | |
"acc_stderr": 0.021132859182754433, | |
"acc_norm": 0.23809523809523808, | |
"acc_norm_stderr": 0.021935878081184756 | |
}, | |
"hendrycksTest-formal_logic": { | |
"acc": 0.2857142857142857, | |
"acc_stderr": 0.04040610178208841, | |
"acc_norm": 0.2857142857142857, | |
"acc_norm_stderr": 0.0404061017820884 | |
}, | |
"hendrycksTest-global_facts": { | |
"acc": 0.18, | |
"acc_stderr": 0.03861229196653694, | |
"acc_norm": 0.19, | |
"acc_norm_stderr": 0.039427724440366234 | |
}, | |
"hendrycksTest-high_school_biology": { | |
"acc": 0.20967741935483872, | |
"acc_stderr": 0.023157879349083522, | |
"acc_norm": 0.2645161290322581, | |
"acc_norm_stderr": 0.02509189237885928 | |
}, | |
"hendrycksTest-high_school_chemistry": { | |
"acc": 0.16748768472906403, | |
"acc_stderr": 0.026273086047535414, | |
"acc_norm": 0.2413793103448276, | |
"acc_norm_stderr": 0.03010833071801162 | |
}, | |
"hendrycksTest-high_school_computer_science": { | |
"acc": 0.23, | |
"acc_stderr": 0.042295258468165044, | |
"acc_norm": 0.23, | |
"acc_norm_stderr": 0.04229525846816505 | |
}, | |
"hendrycksTest-high_school_european_history": { | |
"acc": 0.24242424242424243, | |
"acc_stderr": 0.033464098810559534, | |
"acc_norm": 0.26666666666666666, | |
"acc_norm_stderr": 0.03453131801885414 | |
}, | |
"hendrycksTest-high_school_geography": { | |
"acc": 0.20202020202020202, | |
"acc_stderr": 0.028606204289229872, | |
"acc_norm": 0.2727272727272727, | |
"acc_norm_stderr": 0.03173071239071724 | |
}, | |
"hendrycksTest-high_school_government_and_politics": { | |
"acc": 0.18652849740932642, | |
"acc_stderr": 0.02811209121011748, | |
"acc_norm": 0.23834196891191708, | |
"acc_norm_stderr": 0.030748905363909895 | |
}, | |
"hendrycksTest-high_school_macroeconomics": { | |
"acc": 0.2282051282051282, | |
"acc_stderr": 0.021278393863586282, | |
"acc_norm": 0.258974358974359, | |
"acc_norm_stderr": 0.02221110681006166 | |
}, | |
"hendrycksTest-high_school_mathematics": { | |
"acc": 0.2222222222222222, | |
"acc_stderr": 0.025348097468097838, | |
"acc_norm": 0.3148148148148148, | |
"acc_norm_stderr": 0.028317533496066468 | |
}, | |
"hendrycksTest-high_school_microeconomics": { | |
"acc": 0.20588235294117646, | |
"acc_stderr": 0.026265024608275882, | |
"acc_norm": 0.29411764705882354, | |
"acc_norm_stderr": 0.029597329730978093 | |
}, | |
"hendrycksTest-high_school_physics": { | |
"acc": 0.24503311258278146, | |
"acc_stderr": 0.03511807571804724, | |
"acc_norm": 0.2119205298013245, | |
"acc_norm_stderr": 0.033367670865679766 | |
}, | |
"hendrycksTest-high_school_psychology": { | |
"acc": 0.22201834862385322, | |
"acc_stderr": 0.017818849564796634, | |
"acc_norm": 0.23486238532110093, | |
"acc_norm_stderr": 0.018175110510343578 | |
}, | |
"hendrycksTest-high_school_statistics": { | |
"acc": 0.2222222222222222, | |
"acc_stderr": 0.02835321286686346, | |
"acc_norm": 0.24537037037037038, | |
"acc_norm_stderr": 0.029346665094372937 | |
}, | |
"hendrycksTest-high_school_us_history": { | |
"acc": 0.28431372549019607, | |
"acc_stderr": 0.03166009679399812, | |
"acc_norm": 0.27941176470588236, | |
"acc_norm_stderr": 0.031493281045079556 | |
}, | |
"hendrycksTest-high_school_world_history": { | |
"acc": 0.25738396624472576, | |
"acc_stderr": 0.028458820991460285, | |
"acc_norm": 0.2742616033755274, | |
"acc_norm_stderr": 0.029041333510598035 | |
}, | |
"hendrycksTest-human_aging": { | |
"acc": 0.3273542600896861, | |
"acc_stderr": 0.031493846709941306, | |
"acc_norm": 0.2645739910313901, | |
"acc_norm_stderr": 0.029605103217038308 | |
}, | |
"hendrycksTest-human_sexuality": { | |
"acc": 0.31297709923664124, | |
"acc_stderr": 0.04066962905677697, | |
"acc_norm": 0.3282442748091603, | |
"acc_norm_stderr": 0.04118438565806298 | |
}, | |
"hendrycksTest-international_law": { | |
"acc": 0.1652892561983471, | |
"acc_stderr": 0.03390780612972776, | |
"acc_norm": 0.4462809917355372, | |
"acc_norm_stderr": 0.0453793517794788 | |
}, | |
"hendrycksTest-jurisprudence": { | |
"acc": 0.23148148148148148, | |
"acc_stderr": 0.04077494709252626, | |
"acc_norm": 0.4074074074074074, | |
"acc_norm_stderr": 0.047500773411999854 | |
}, | |
"hendrycksTest-logical_fallacies": { | |
"acc": 0.1901840490797546, | |
"acc_stderr": 0.030833491146281235, | |
"acc_norm": 0.26380368098159507, | |
"acc_norm_stderr": 0.03462419931615623 | |
}, | |
"hendrycksTest-machine_learning": { | |
"acc": 0.33035714285714285, | |
"acc_stderr": 0.044642857142857144, | |
"acc_norm": 0.21428571428571427, | |
"acc_norm_stderr": 0.03894641120044792 | |
}, | |
"hendrycksTest-management": { | |
"acc": 0.17475728155339806, | |
"acc_stderr": 0.037601780060266224, | |
"acc_norm": 0.23300970873786409, | |
"acc_norm_stderr": 0.04185832598928315 | |
}, | |
"hendrycksTest-marketing": { | |
"acc": 0.2863247863247863, | |
"acc_stderr": 0.02961432369045665, | |
"acc_norm": 0.3162393162393162, | |
"acc_norm_stderr": 0.030463656747340244 | |
}, | |
"hendrycksTest-medical_genetics": { | |
"acc": 0.31, | |
"acc_stderr": 0.04648231987117316, | |
"acc_norm": 0.38, | |
"acc_norm_stderr": 0.04878317312145633 | |
}, | |
"hendrycksTest-miscellaneous": { | |
"acc": 0.25925925925925924, | |
"acc_stderr": 0.015671006009339572, | |
"acc_norm": 0.2515964240102171, | |
"acc_norm_stderr": 0.01551732236552963 | |
}, | |
"hendrycksTest-moral_disputes": { | |
"acc": 0.2630057803468208, | |
"acc_stderr": 0.02370309952525817, | |
"acc_norm": 0.3092485549132948, | |
"acc_norm_stderr": 0.02488314057007175 | |
}, | |
"hendrycksTest-moral_scenarios": { | |
"acc": 0.24022346368715083, | |
"acc_stderr": 0.014288343803925293, | |
"acc_norm": 0.24692737430167597, | |
"acc_norm_stderr": 0.014422292204808835 | |
}, | |
"hendrycksTest-nutrition": { | |
"acc": 0.24183006535947713, | |
"acc_stderr": 0.024518195641879334, | |
"acc_norm": 0.38235294117647056, | |
"acc_norm_stderr": 0.027826109307283686 | |
}, | |
"hendrycksTest-philosophy": { | |
"acc": 0.20257234726688103, | |
"acc_stderr": 0.022827317491059682, | |
"acc_norm": 0.28938906752411575, | |
"acc_norm_stderr": 0.025755865922632935 | |
}, | |
"hendrycksTest-prehistory": { | |
"acc": 0.21604938271604937, | |
"acc_stderr": 0.022899162918445803, | |
"acc_norm": 0.1882716049382716, | |
"acc_norm_stderr": 0.021751866060815875 | |
}, | |
"hendrycksTest-professional_accounting": { | |
"acc": 0.2375886524822695, | |
"acc_stderr": 0.025389512552729903, | |
"acc_norm": 0.24468085106382978, | |
"acc_norm_stderr": 0.025645553622266726 | |
}, | |
"hendrycksTest-professional_law": { | |
"acc": 0.2561929595827901, | |
"acc_stderr": 0.011149173153110582, | |
"acc_norm": 0.28292046936114734, | |
"acc_norm_stderr": 0.011503891323188976 | |
}, | |
"hendrycksTest-professional_medicine": { | |
"acc": 0.22058823529411764, | |
"acc_stderr": 0.025187786660227248, | |
"acc_norm": 0.21323529411764705, | |
"acc_norm_stderr": 0.024880971512294275 | |
}, | |
"hendrycksTest-professional_psychology": { | |
"acc": 0.24509803921568626, | |
"acc_stderr": 0.017401816711427667, | |
"acc_norm": 0.2679738562091503, | |
"acc_norm_stderr": 0.017917974069594726 | |
}, | |
"hendrycksTest-public_relations": { | |
"acc": 0.24545454545454545, | |
"acc_stderr": 0.04122066502878285, | |
"acc_norm": 0.2, | |
"acc_norm_stderr": 0.03831305140884603 | |
}, | |
"hendrycksTest-security_studies": { | |
"acc": 0.2979591836734694, | |
"acc_stderr": 0.02927956741106567, | |
"acc_norm": 0.2571428571428571, | |
"acc_norm_stderr": 0.02797982353874455 | |
}, | |
"hendrycksTest-sociology": { | |
"acc": 0.31840796019900497, | |
"acc_stderr": 0.03294118479054095, | |
"acc_norm": 0.3333333333333333, | |
"acc_norm_stderr": 0.03333333333333334 | |
}, | |
"hendrycksTest-us_foreign_policy": { | |
"acc": 0.33, | |
"acc_stderr": 0.04725815626252605, | |
"acc_norm": 0.33, | |
"acc_norm_stderr": 0.047258156262526045 | |
}, | |
"hendrycksTest-virology": { | |
"acc": 0.3072289156626506, | |
"acc_stderr": 0.03591566797824662, | |
"acc_norm": 0.25903614457831325, | |
"acc_norm_stderr": 0.03410646614071855 | |
}, | |
"hendrycksTest-world_religions": { | |
"acc": 0.2982456140350877, | |
"acc_stderr": 0.03508771929824565, | |
"acc_norm": 0.3684210526315789, | |
"acc_norm_stderr": 0.036996580176568775 | |
} | |
}, | |
"versions": { | |
"copa": 0, | |
"piqa": 0, | |
"rte": 0, | |
"winogrande": 0, | |
"hendrycksTest-abstract_algebra": 0, | |
"hendrycksTest-anatomy": 0, | |
"hendrycksTest-astronomy": 0, | |
"hendrycksTest-business_ethics": 0, | |
"hendrycksTest-clinical_knowledge": 0, | |
"hendrycksTest-college_biology": 0, | |
"hendrycksTest-college_chemistry": 0, | |
"hendrycksTest-college_computer_science": 0, | |
"hendrycksTest-college_mathematics": 0, | |
"hendrycksTest-college_medicine": 0, | |
"hendrycksTest-college_physics": 0, | |
"hendrycksTest-computer_security": 0, | |
"hendrycksTest-conceptual_physics": 0, | |
"hendrycksTest-econometrics": 0, | |
"hendrycksTest-electrical_engineering": 0, | |
"hendrycksTest-elementary_mathematics": 0, | |
"hendrycksTest-formal_logic": 0, | |
"hendrycksTest-global_facts": 0, | |
"hendrycksTest-high_school_biology": 0, | |
"hendrycksTest-high_school_chemistry": 0, | |
"hendrycksTest-high_school_computer_science": 0, | |
"hendrycksTest-high_school_european_history": 0, | |
"hendrycksTest-high_school_geography": 0, | |
"hendrycksTest-high_school_government_and_politics": 0, | |
"hendrycksTest-high_school_macroeconomics": 0, | |
"hendrycksTest-high_school_mathematics": 0, | |
"hendrycksTest-high_school_microeconomics": 0, | |
"hendrycksTest-high_school_physics": 0, | |
"hendrycksTest-high_school_psychology": 0, | |
"hendrycksTest-high_school_statistics": 0, | |
"hendrycksTest-high_school_us_history": 0, | |
"hendrycksTest-high_school_world_history": 0, | |
"hendrycksTest-human_aging": 0, | |
"hendrycksTest-human_sexuality": 0, | |
"hendrycksTest-international_law": 0, | |
"hendrycksTest-jurisprudence": 0, | |
"hendrycksTest-logical_fallacies": 0, | |
"hendrycksTest-machine_learning": 0, | |
"hendrycksTest-management": 0, | |
"hendrycksTest-marketing": 0, | |
"hendrycksTest-medical_genetics": 0, | |
"hendrycksTest-miscellaneous": 0, | |
"hendrycksTest-moral_disputes": 0, | |
"hendrycksTest-moral_scenarios": 0, | |
"hendrycksTest-nutrition": 0, | |
"hendrycksTest-philosophy": 0, | |
"hendrycksTest-prehistory": 0, | |
"hendrycksTest-professional_accounting": 0, | |
"hendrycksTest-professional_law": 0, | |
"hendrycksTest-professional_medicine": 0, | |
"hendrycksTest-professional_psychology": 0, | |
"hendrycksTest-public_relations": 0, | |
"hendrycksTest-security_studies": 0, | |
"hendrycksTest-sociology": 0, | |
"hendrycksTest-us_foreign_policy": 0, | |
"hendrycksTest-virology": 0, | |
"hendrycksTest-world_religions": 0 | |
} | |
} |